Showtime !
authorAdrien Di Mascio <Adrien.DiMascio@logilab.fr>
Wed, 05 Nov 2008 15:52:50 +0100
changeset 0 b97547f5f1fa
child 1 88d637274072
child 6 29ab115b9fcb
Showtime !
COPYING
MANIFEST
MANIFEST.in
README
__init__.py
__pkginfo__.py
_exceptions.py
bin/cubicweb-ctl
cleanappl.sh
common/__init__.py
common/appobject.py
common/entity.py
common/html4zope.py
common/i18n.py
common/mail.py
common/migration.py
common/mixins.py
common/mttransforms.py
common/registerers.py
common/rest.py
common/schema.py
common/selectors.py
common/tal.py
common/test/data/bootstrap_packages
common/test/data/entities.py
common/test/data/migration/0.0.3_Any.py
common/test/data/migration/0.0.4_Any.py
common/test/data/migration/0.1.0_Any.py
common/test/data/migration/0.1.0_common.py
common/test/data/migration/0.1.0_repository.py
common/test/data/migration/0.1.0_web.py
common/test/data/migration/0.1.2_Any.py
common/test/data/migration/depends.map
common/test/data/schema/Affaire.sql
common/test/data/schema/Note.py
common/test/data/schema/Note.sql
common/test/data/schema/Personne.sql
common/test/data/schema/Societe.sql
common/test/data/schema/relations.rel
common/test/data/server_migration/2.10.2_Any.sql
common/test/data/server_migration/2.5.0_Any.sql
common/test/data/server_migration/2.6.0_Any.sql
common/test/data/server_migration/bootstrapmigration_repository.py
common/test/unittest_entity.py
common/test/unittest_mail.py
common/test/unittest_migration.py
common/test/unittest_rest.py
common/test/unittest_uilib.py
common/test/unittest_utils.py
common/uilib.py
common/utils.py
common/view.py
cwconfig.py
cwctl.py
cwvreg.py
dbapi.py
debian.etch/control
debian/changelog
debian/compat
debian/control
debian/copyright
debian/cubicweb-client.dirs
debian/cubicweb-common.dirs
debian/cubicweb-common.postinst
debian/cubicweb-core.dirs
debian/cubicweb-ctl.bash_completion
debian/cubicweb-ctl.cubicweb.init
debian/cubicweb-ctl.dirs
debian/cubicweb-ctl.logrotate
debian/cubicweb-ctl.manpages
debian/cubicweb-ctl.postinst
debian/cubicweb-ctl.postrm
debian/cubicweb-ctl.prerm
debian/cubicweb-dev.dirs
debian/cubicweb-doc
debian/cubicweb-documentation.dirs
debian/cubicweb-documentation.install
debian/cubicweb-documentation.postinst
debian/cubicweb-documentation.prerm
debian/cubicweb-server.dirs
debian/cubicweb-server.postinst
debian/cubicweb-server.prerm
debian/cubicweb-twisted.dirs
debian/cubicweb-twisted.postinst
debian/cubicweb-twisted.prerm
debian/cubicweb-web.dirs
debian/cubicweb-web.postinst
debian/pycompat
debian/rules
devtools/__init__.py
devtools/_apptest.py
devtools/apptest.py
devtools/cwtwill.py
devtools/devctl.py
devtools/fake.py
devtools/fill.py
devtools/fix_po_encoding
devtools/htmlparser.py
devtools/livetest.py
devtools/migrtest.py
devtools/pkginfo.py
devtools/repotest.py
devtools/stresstester.py
devtools/test/data/bootstrap_packages
devtools/test/data/dbfill.conf
devtools/test/data/firstnames.txt
devtools/test/data/schema/Bug.sql
devtools/test/data/schema/Project.sql
devtools/test/data/schema/Story.sql
devtools/test/data/schema/Version.sql
devtools/test/data/schema/custom.py
devtools/test/data/schema/relations.rel
devtools/test/data/views/__init__.py
devtools/test/data/views/bug.py
devtools/test/runtests.py
devtools/test/unittest_dbfill.py
devtools/test/unittest_fill.py
devtools/test/unittest_testlib.py
devtools/testlib.py
doc/.static/logilab.png
doc/.static/sphinx-default.css
doc/.templates/layout.html
doc/Makefile
doc/argouml.log
doc/conf.py
doc/cubicweb-uml.txt
doc/cubicweb.png
doc/cubicweb.zargo
doc/cubicweb.zargo~0.14.1
doc/devmanual_fr/advanced_notes.txt
doc/devmanual_fr/archi_globale.dia
doc/devmanual_fr/archi_globale.png
doc/devmanual_fr/chap_autres_composants_ui.txt
doc/devmanual_fr/chap_bases_framework_cubicweb.txt
doc/devmanual_fr/chap_configuration_instance.txt
doc/devmanual_fr/chap_creation_instance.txt
doc/devmanual_fr/chap_definition_schema.txt
doc/devmanual_fr/chap_definition_workflows.txt
doc/devmanual_fr/chap_fondements_cubicweb.txt
doc/devmanual_fr/chap_i18n.txt
doc/devmanual_fr/chap_manipulation_donnees.txt
doc/devmanual_fr/chap_migration.txt
doc/devmanual_fr/chap_mise_en_place_environnement.txt
doc/devmanual_fr/chap_rql.txt
doc/devmanual_fr/chap_serveur_crochets.txt
doc/devmanual_fr/chap_serveur_notification.txt
doc/devmanual_fr/chap_tests.txt
doc/devmanual_fr/chap_ui_gestion_formulaire.txt
doc/devmanual_fr/chap_ui_js_json.txt
doc/devmanual_fr/chap_visualisation_donnees.txt
doc/devmanual_fr/gae.txt
doc/devmanual_fr/index.txt
doc/devmanual_fr/main_template_layout.dia
doc/devmanual_fr/main_template_layout.png
doc/devmanual_fr/makefile
doc/devmanual_fr/sect_cubicweb-ctl.txt
doc/devmanual_fr/sect_definition_entites.txt
doc/devmanual_fr/sect_definition_schema.txt
doc/devmanual_fr/sect_installation.txt
doc/devmanual_fr/sect_mercurial.txt
doc/devmanual_fr/sect_stdlib_schemas.txt
doc/devmanual_fr/sect_stdlib_vues.txt
doc/faq.fr.txt
doc/howto.fr.txt
doc/html-build/.doctrees/devmanual_fr/advanced_notes.doctree
doc/html-build/.doctrees/devmanual_fr/chap_autres_composants_ui.doctree
doc/html-build/.doctrees/devmanual_fr/chap_bases_framework_cubicweb.doctree
doc/html-build/.doctrees/devmanual_fr/chap_configuration_instance.doctree
doc/html-build/.doctrees/devmanual_fr/chap_definition_schema.doctree
doc/html-build/.doctrees/devmanual_fr/chap_definition_workflows.doctree
doc/html-build/.doctrees/devmanual_fr/chap_fondements_cubicweb.doctree
doc/html-build/.doctrees/devmanual_fr/chap_i18n.doctree
doc/html-build/.doctrees/devmanual_fr/chap_manipulation_donnees.doctree
doc/html-build/.doctrees/devmanual_fr/chap_migration.doctree
doc/html-build/.doctrees/devmanual_fr/chap_mise_en_place_environnement.doctree
doc/html-build/.doctrees/devmanual_fr/chap_rql.doctree
doc/html-build/.doctrees/devmanual_fr/chap_serveur_crochets.doctree
doc/html-build/.doctrees/devmanual_fr/chap_serveur_notification.doctree
doc/html-build/.doctrees/devmanual_fr/chap_tests.doctree
doc/html-build/.doctrees/devmanual_fr/chap_ui_gestion_formulaire.doctree
doc/html-build/.doctrees/devmanual_fr/chap_ui_js_json.doctree
doc/html-build/.doctrees/devmanual_fr/chap_visualisation_donnees.doctree
doc/html-build/.doctrees/devmanual_fr/index.doctree
doc/html-build/.doctrees/devmanual_fr/sect_cubicweb-ctl.doctree
doc/html-build/.doctrees/devmanual_fr/sect_definition_entites.doctree
doc/html-build/.doctrees/devmanual_fr/sect_definition_schema.doctree
doc/html-build/.doctrees/devmanual_fr/sect_installation.doctree
doc/html-build/.doctrees/devmanual_fr/sect_mercurial.doctree
doc/html-build/.doctrees/devmanual_fr/sect_stdlib_schemas.doctree
doc/html-build/.doctrees/devmanual_fr/sect_stdlib_vues.doctree
doc/html-build/.doctrees/environment.pickle
doc/html-build/.doctrees/index.doctree
doc/html-build/.doctrees/plan_formation_python_cubicweb.doctree
doc/html-build/.doctrees/querier.doctree
doc/html-build/.doctrees/securite.doctree
doc/html-build/.doctrees/source/index.doctree
doc/html-build/source/index.html
doc/index-content.txt
doc/index.txt
doc/makefile
doc/plan_formation_python_cubicweb.txt
doc/querier.txt
doc/securite.txt
doc/tutmanual_fr/images/lax-book.00-login.en.png
doc/tutmanual_fr/images/lax-book.01-start.en.png
doc/tutmanual_fr/images/lax-book.02-cookie-values.en.png
doc/tutmanual_fr/images/lax-book.02-create-blog.en.png
doc/tutmanual_fr/images/lax-book.03-list-one-blog.en.png
doc/tutmanual_fr/images/lax-book.03-site-config-panel.en.png
doc/tutmanual_fr/images/lax-book.03-state-submitted.en.png
doc/tutmanual_fr/images/lax-book.03-transitions-view.en.png
doc/tutmanual_fr/images/lax-book.04-detail-one-blog.en.png
doc/tutmanual_fr/images/lax-book.05-list-two-blog.en.png
doc/tutmanual_fr/images/lax-book.06-add-relation-entryof.en.png
doc/tutmanual_fr/images/lax-book.06-header-no-login.en.png
doc/tutmanual_fr/images/lax-book.06-main-template-layout.en.png
doc/tutmanual_fr/images/lax-book.06-main-template-logo.en.png
doc/tutmanual_fr/images/lax-book.06-simple-main-template.en.png
doc/tutmanual_fr/images/lax-book.07-detail-one-blogentry.en.png
doc/tutmanual_fr/images/lax-book.08-schema.en.png
doc/tutmanual_fr/images/lax-book.09-new-view-blogentry.en.png
doc/tutmanual_fr/images/lax-book.10-blog-with-two-entries.en.png
doc/tutmanual_fr/tut-create-app.en.txt
doc/tutmanual_fr/tut-create-app.fr.txt
embedded/README
embedded/mx/DateTime/ARPA.py
embedded/mx/DateTime/DateTime.py
embedded/mx/DateTime/ISO.py
embedded/mx/DateTime/Parser.py
embedded/mx/DateTime/Timezone.py
embedded/mx/DateTime/__init__.py
embedded/mx/DateTime/mxDateTime_python.py
embedded/mx/__init__.py
entities/__init__.py
entities/authobjs.py
entities/lib.py
entities/schemaobjs.py
entities/test/data/bootstrap_packages
entities/test/data/schema.py
entities/test/unittest_base.py
entities/wfobjs.py
etwist/__init__.py
etwist/request.py
etwist/server.py
etwist/twconfig.py
etwist/twctl.py
gettext.py
goa/__init__.py
goa/appobjects/__init__.py
goa/appobjects/components.py
goa/appobjects/dbmgmt.py
goa/appobjects/gauthservice.py
goa/appobjects/sessions.py
goa/bin/laxctl
goa/db.py
goa/dbinit.py
goa/dbmyams.py
goa/doc/FAQ.en.txt
goa/doc/README_LAX.fr.txt
goa/doc/devmanual_fr/advanced_notes.txt
goa/doc/devmanual_fr/archi_globale.dia
goa/doc/devmanual_fr/archi_globale.png
goa/doc/devmanual_fr/chap_autres_composants_ui.txt
goa/doc/devmanual_fr/chap_bases_framework_erudi.txt
goa/doc/devmanual_fr/chap_configuration_instance.txt
goa/doc/devmanual_fr/chap_definition_schema.txt
goa/doc/devmanual_fr/chap_definition_workflows.txt
goa/doc/devmanual_fr/chap_fondements_erudi.txt
goa/doc/devmanual_fr/chap_i18n.txt
goa/doc/devmanual_fr/chap_manipulation_donnees.txt
goa/doc/devmanual_fr/chap_migration.txt
goa/doc/devmanual_fr/chap_mise_en_place_environnement.txt
goa/doc/devmanual_fr/chap_rql.txt
goa/doc/devmanual_fr/chap_serveur_crochets.txt
goa/doc/devmanual_fr/chap_serveur_notification.txt
goa/doc/devmanual_fr/chap_tests.txt
goa/doc/devmanual_fr/chap_ui_gestion_formulaire.txt
goa/doc/devmanual_fr/chap_ui_js_json.txt
goa/doc/devmanual_fr/chap_visualisation_donnees.txt
goa/doc/devmanual_fr/index.txt
goa/doc/devmanual_fr/main_template_layout.dia
goa/doc/devmanual_fr/main_template_layout.png
goa/doc/devmanual_fr/makefile
goa/doc/devmanual_fr/sect_definition_entites.txt
goa/doc/devmanual_fr/sect_definition_schema.txt
goa/doc/devmanual_fr/sect_erudi-ctl.txt
goa/doc/devmanual_fr/sect_installation.txt
goa/doc/devmanual_fr/sect_mercurial.txt
goa/doc/devmanual_fr/sect_stdlib_schemas.txt
goa/doc/devmanual_fr/sect_stdlib_vues.txt
goa/doc/quickstart.txt
goa/doc/tutorial-wine.txt
goa/doc/tutorial.en.txt
goa/gaesource.py
goa/goaconfig.py
goa/goactl.py
goa/goavreg.py
goa/overrides/__init__.py
goa/overrides/mttransforms.py
goa/overrides/rqlannotation.py
goa/overrides/server__init__.py
goa/overrides/server_utils.py
goa/overrides/toolsutils.py
goa/rqlinterpreter.py
goa/skel/app.yaml.tmpl
goa/skel/custom.py
goa/skel/cw-cubes/README.txt
goa/skel/i18n/en.po
goa/skel/i18n/fr.po
goa/skel/loader.py
goa/skel/main.py
goa/skel/schema.py
goa/skel/views.py
goa/test/data/__init__.py
goa/test/data/bootstrap_packages
goa/test/data/schema.py
goa/test/data/settings.py
goa/test/data/views.py
goa/test/pytestconf.py
goa/test/unittest_db.py
goa/test/unittest_editcontroller.py
goa/test/unittest_metadata.py
goa/test/unittest_rql.py
goa/test/unittest_schema.py
goa/test/unittest_views.py
goa/testlib.py
goa/tools/__init__.py
goa/tools/generate_schema_img.py
goa/tools/i18n.py
goa/tools/laxctl.py
hercule.py
i18n/en.po
i18n/entities.pot
i18n/fr.po
interfaces.py
man/cubicweb-ctl.1
md5crypt.py
misc/cwdesklets/gfx/bg.png
misc/cwdesklets/gfx/border-left.png
misc/cwdesklets/gfx/logo_cw.png
misc/cwdesklets/gfx/rss.png
misc/cwdesklets/rql_query.display
misc/cwdesklets/rqlsensor/__init__.py
misc/cwdesklets/web_query.display
misc/cwfs/A_FAIRE
misc/cwfs/cwfs-spec.txt
misc/cwfs/cwfs.py
misc/cwfs/cwfs_test.py
misc/cwzope/cwzope.py
misc/migration/2.37.1_Any.py
misc/migration/2.39.0_Any.py
misc/migration/2.42.0_Any.py
misc/migration/2.42.1_Any.py
misc/migration/2.43.0_Any.py
misc/migration/2.44.0_Any.py
misc/migration/2.45.0_Any.py
misc/migration/2.46.0_Any.py
misc/migration/2.47.0_Any.py
misc/migration/2.48.8_Any.py
misc/migration/2.49.3_Any.py
misc/migration/2.50.0_Any.py
misc/migration/3.0.0_Any.py
misc/migration/bootstrapmigration_repository.py
misc/migration/postcreate.py
pylintrc
rset.py
schema.py
schemas/Bookmark.py
schemas/Card.py
schemas/_regproc.sql.mysql
schemas/_regproc.sql.postgres
schemas/base.py
schemas/bootstrap.py
schemaviewer.py
server/__init__.py
server/checkintegrity.py
server/hookhelper.py
server/hooks.py
server/hooksmanager.py
server/migractions.py
server/pool.py
server/querier.py
server/repository.py
server/rqlannotation.py
server/rqlrewrite.py
server/schemahooks.py
server/schemaserial.py
server/securityhooks.py
server/server.py
server/serverconfig.py
server/serverctl.py
server/session.py
server/sources/__init__.py
server/sources/native.py
server/sources/rql2sql.py
server/sqlutils.py
server/ssplanner.py
server/test/data/bootstrap_packages
server/test/data/config1/application_hooks.py
server/test/data/config1/bootstrap_packages
server/test/data/config1/server-ctl.conf
server/test/data/config1/sources
server/test/data/config2/application_hooks.py
server/test/data/config2/bootstrap_packages
server/test/data/config2/server-ctl.conf
server/test/data/config2/sources
server/test/data/hooks.py
server/test/data/migration/postcreate.py
server/test/data/migrschema/Affaire.py
server/test/data/migrschema/Folder2.py
server/test/data/migrschema/Note.py
server/test/data/migrschema/Personne.sql
server/test/data/migrschema/Societe.perms
server/test/data/migrschema/Societe.sql
server/test/data/migrschema/relations.rel
server/test/data/schema/Affaire.py
server/test/data/schema/Note.sql
server/test/data/schema/Personne.sql
server/test/data/schema/Societe.py
server/test/data/schema/custom.py
server/test/data/schema/note.py
server/test/data/schema/relations.rel
server/test/runtests.py
server/test/unittest_checkintegrity.py
server/test/unittest_config.py
server/test/unittest_hookhelper.py
server/test/unittest_hooks.py
server/test/unittest_hooksmanager.py
server/test/unittest_migractions.py
server/test/unittest_querier.py
server/test/unittest_repository.py
server/test/unittest_rql2sql.py
server/test/unittest_rqlannotation.py
server/test/unittest_rqlrewrite.py
server/test/unittest_schemaserial.py
server/test/unittest_security.py
server/test/unittest_session.py
server/test/unittest_sqlutils.py
server/test/unittest_ssplanner.py
server/test/unittest_tools.py
server/utils.py
setup.py
skeleton/MANIFEST.in
skeleton/__init__.py.tmpl
skeleton/__pkginfo__.py.tmpl
skeleton/data/cubes.CUBENAME.css
skeleton/data/cubes.CUBENAME.js
skeleton/data/external_resources.tmpl
skeleton/debian/DISTNAME.prerm.tmpl
skeleton/debian/changelog.tmpl
skeleton/debian/compat
skeleton/debian/control.tmpl
skeleton/debian/copyright.tmpl
skeleton/debian/rules.tmpl
skeleton/entities.py
skeleton/i18n/en.po
skeleton/i18n/fr.po
skeleton/migration/postcreate.py
skeleton/migration/precreate.py
skeleton/schema.py
skeleton/setup.py
skeleton/site_cubicweb.py
skeleton/sobjects.py
skeleton/test/data/bootstrap_cubes.tmpl
skeleton/test/pytestconf.py
skeleton/test/realdb_test_CUBENAME.py
skeleton/test/test_CUBENAME.py
skeleton/views.py
sobjects/__init__.py
sobjects/email.py
sobjects/hooks.py
sobjects/notification.py
sobjects/supervising.py
sobjects/test/data/bootstrap_packages
sobjects/test/data/schema.py
sobjects/test/data/sobjects/__init__.py
sobjects/test/unittest_email.py
sobjects/test/unittest_hooks.py
sobjects/test/unittest_notification.py
sobjects/test/unittest_supervising.py
test/data/bootstrap_packages
test/data/erqlexpr_on_ertype.py
test/data/rqlexpr_on_ertype_read.py
test/data/rrqlexpr_on_attr.py
test/data/rrqlexpr_on_eetype.py
test/unittest_cwconfig.py
test/unittest_cwctl.py
test/unittest_dbapi.py
test/unittest_rset.py
test/unittest_schema.py
test/unittest_vregistry.py
toolsutils.py
vregistry.py
web/__init__.py
web/_exceptions.py
web/action.py
web/application.py
web/box.py
web/component.py
web/controller.py
web/data/asc.gif
web/data/banner.png
web/data/bg.gif
web/data/bg_trame_grise.png
web/data/black-check.png
web/data/bullet.png
web/data/bullet_orange.png
web/data/button.png
web/data/calendar.gif
web/data/critical.png
web/data/cubicweb.acl.css
web/data/cubicweb.ajax.js
web/data/cubicweb.bookmarks.js
web/data/cubicweb.calendar.css
web/data/cubicweb.calendar.js
web/data/cubicweb.calendar_popup.css
web/data/cubicweb.compat.js
web/data/cubicweb.css
web/data/cubicweb.edition.js
web/data/cubicweb.fckcwconfig.js
web/data/cubicweb.form.css
web/data/cubicweb.formfilter.js
web/data/cubicweb.gmap.js
web/data/cubicweb.goa.js
web/data/cubicweb.html_tree.css
web/data/cubicweb.htmlhelpers.js
web/data/cubicweb.ie.css
web/data/cubicweb.iprogress.css
web/data/cubicweb.login.css
web/data/cubicweb.mailform.css
web/data/cubicweb.preferences.css
web/data/cubicweb.print.css
web/data/cubicweb.python.js
web/data/cubicweb.schema.css
web/data/cubicweb.sortable.js
web/data/cubicweb.suggest.css
web/data/cubicweb.tablesorter.css
web/data/cubicweb.timeline-bundle.js
web/data/cubicweb.timeline-ext.js
web/data/cubicweb.timetable.css
web/data/cubicweb.widgets.js
web/data/desc.gif
web/data/download.gif
web/data/dublincore-button.png
web/data/dublincore-icon.png
web/data/error.png
web/data/external_resources
web/data/favicon.ico
web/data/feed-icon.png
web/data/feed-icon16x16.png
web/data/feed-icon32x32.png
web/data/file.gif
web/data/folder-closed.gif
web/data/folder.gif
web/data/gmap.utility.labeledmarker.js
web/data/gmap_blue_marker.png
web/data/go.png
web/data/gradient-grey-up.png
web/data/gradient-grey.gif
web/data/help.png
web/data/help_ie.png
web/data/icon_blank.png
web/data/icon_bookmark.gif
web/data/icon_emailaddress.gif
web/data/icon_euser.gif
web/data/icon_map.png
web/data/icon_state.gif
web/data/information.png
web/data/jquery.autocomplete.css
web/data/jquery.autocomplete.js
web/data/jquery.js
web/data/jquery.json.js
web/data/jquery.tablesorter.js
web/data/jquery.treeview.css
web/data/jquery.treeview.js
web/data/liveclipboard-icon.png
web/data/loading.gif
web/data/logo.png
web/data/logo.xcf
web/data/mail.gif
web/data/microformats-button.png
web/data/microformats-icon.png
web/data/minus.gif
web/data/no-check-no-border.png
web/data/nomail.gif
web/data/nomail.xcf
web/data/plus.gif
web/data/puce.png
web/data/puce_down.png
web/data/puce_down_black.png
web/data/pygments.css
web/data/required.png
web/data/rss-button.png
web/data/rss.png
web/data/search.png
web/data/sendcancel.png
web/data/sendok.png
web/data/shadow.gif
web/data/timeline-bundle.css
web/data/timeline/blue-circle.png
web/data/timeline/bubble-arrows.png
web/data/timeline/bubble-body-and-arrows.png
web/data/timeline/bubble-body.png
web/data/timeline/bubble-bottom-arrow.png
web/data/timeline/bubble-bottom-left.png
web/data/timeline/bubble-bottom-right.png
web/data/timeline/bubble-bottom.png
web/data/timeline/bubble-left-arrow.png
web/data/timeline/bubble-left.png
web/data/timeline/bubble-right-arrow.png
web/data/timeline/bubble-right.png
web/data/timeline/bubble-top-arrow.png
web/data/timeline/bubble-top-left.png
web/data/timeline/bubble-top-right.png
web/data/timeline/bubble-top.png
web/data/timeline/close-button.png
web/data/timeline/copyright-vertical.png
web/data/timeline/copyright.png
web/data/timeline/dark-blue-circle.png
web/data/timeline/dark-green-circle.png
web/data/timeline/dark-red-circle.png
web/data/timeline/dull-blue-circle.png
web/data/timeline/dull-green-circle.png
web/data/timeline/dull-red-circle.png
web/data/timeline/gray-circle.png
web/data/timeline/green-circle.png
web/data/timeline/message-bottom-left.png
web/data/timeline/message-bottom-right.png
web/data/timeline/message-left.png
web/data/timeline/message-right.png
web/data/timeline/message-top-left.png
web/data/timeline/message-top-right.png
web/data/timeline/message.png
web/data/timeline/progress-running.gif
web/data/timeline/red-circle.png
web/data/timeline/sundial.png
web/data/timeline/top-bubble.png
web/data/treeview-black-line.gif
web/data/treeview-black.gif
web/data/treeview-default-line.gif
web/data/treeview-default.gif
web/data/treeview-famfamfam-line.gif
web/data/treeview-famfamfam.gif
web/data/treeview-gray-line.gif
web/data/treeview-gray.gif
web/data/treeview-red-line.gif
web/data/treeview-red.gif
web/facet.py
web/form.py
web/htmlwidgets.py
web/httpcache.py
web/request.py
web/test/data/bootstrap_packages
web/test/data/schema/Personne.sql
web/test/data/schema/Societe.sql
web/test/data/schema/relations.rel
web/test/data/schema/testschema.py
web/test/data/views.py
web/test/jstest_python.jst
web/test/runtests.py
web/test/test_views.py
web/test/testutils.js
web/test/unittest_application.py
web/test/unittest_controller.py
web/test/unittest_magicsearch.py
web/test/unittest_urlpublisher.py
web/test/unittest_urlrewrite.py
web/test/unittest_views_actions.py
web/test/unittest_views_apacherewrite.py
web/test/unittest_views_basecontrollers.py
web/test/unittest_views_baseforms.py
web/test/unittest_views_baseviews.py
web/test/unittest_views_embeding.py
web/test/unittest_views_navigation.py
web/test/unittest_views_searchrestriction.py
web/test/unittest_viewselector.py
web/test/unittest_webconfig.py
web/test/unittest_widgets.py
web/views/__init__.py
web/views/actions.py
web/views/ajaxedit.py
web/views/apacherewrite.py
web/views/authentication.py
web/views/basecomponents.py
web/views/basecontrollers.py
web/views/baseforms.py
web/views/basetemplates.py
web/views/baseviews.py
web/views/bookmark.py
web/views/boxes.py
web/views/calendar.py
web/views/card.py
web/views/debug.py
web/views/dynimages.py
web/views/edit_attributes.pt
web/views/edit_multiple.pt
web/views/edit_relations.pt
web/views/editcontroller.py
web/views/emailaddress.py
web/views/embedding.py
web/views/eproperties.py
web/views/error.py
web/views/euser.py
web/views/facets.py
web/views/ibreadcrumbs.py
web/views/idownloadable.py
web/views/igeocodable.py
web/views/iprogress.py
web/views/magicsearch.py
web/views/management.py
web/views/massmailing.py
web/views/navigation.py
web/views/old_calendar.py
web/views/plots.py
web/views/schemaentities.py
web/views/searchrestriction.py
web/views/sessions.py
web/views/startup.py
web/views/tableview.py
web/views/timeline.py
web/views/timetable.py
web/views/treeview.py
web/views/urlpublishing.py
web/views/urlrewrite.py
web/views/vcard.py
web/views/wdoc.py
web/views/wfentities.py
web/views/xbel.py
web/wdoc/ChangeLog_en
web/wdoc/ChangeLog_fr
web/wdoc/about_en.rst
web/wdoc/about_fr.rst
web/wdoc/add_content_en.rst
web/wdoc/add_content_fr.rst
web/wdoc/advanced_usage_en.rst
web/wdoc/advanced_usage_schema_en.rst
web/wdoc/advanced_usage_schema_fr.rst
web/wdoc/bookmarks_en.rst
web/wdoc/bookmarks_fr.rst
web/wdoc/custom_view_en.rst
web/wdoc/custom_view_fr.rst
web/wdoc/custom_view_last_update_en.rst
web/wdoc/custom_view_last_update_fr.rst
web/wdoc/custom_view_rss_en.rst
web/wdoc/custom_view_rss_fr.rst
web/wdoc/glossary_en.rst
web/wdoc/glossary_fr.rst
web/wdoc/images/userprefs_en.png
web/wdoc/images/userprefs_fr.png
web/wdoc/main_en.rst
web/wdoc/search_en.rst
web/wdoc/search_fr.rst
web/wdoc/search_sample_queries_en.rst
web/wdoc/search_sample_queries_fr.rst
web/wdoc/standard_usage_en.rst
web/wdoc/standard_usage_fr.rst
web/wdoc/toc.xml
web/wdoc/tut_rql_en.rst
web/wdoc/tut_rql_fr.rst
web/wdoc/userprefs_en.rst
web/wdoc/userprefs_fr.rst
web/webconfig.py
web/webctl.py
web/widgets.py
wsgi/__init__.py
wsgi/handler.py
wsgi/request.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/COPYING	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,165 @@
+		   GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions. 
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version. 
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MANIFEST	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,645 @@
+README
+pylintrc
+setup.py
+./__init__.py
+./__pkginfo__.py
+./_exceptions.py
+./cwconfig.py
+./cwctl.py
+./cwvreg.py
+./dbapi.py
+./gettext.py
+./hercule.py
+./interfaces.py
+./md5crypt.py
+./rset.py
+./schema.py
+./schemaviewer.py
+./toolsutils.py
+./vregistry.py
+./common/__init__.py
+./common/appobject.py
+./common/entity.py
+./common/html4zope.py
+./common/i18n.py
+./common/mail.py
+./common/migration.py
+./common/mixins.py
+./common/mttransforms.py
+./common/registerers.py
+./common/rest.py
+./common/schema.py
+./common/selectors.py
+./common/tal.py
+./common/uilib.py
+./common/utils.py
+./common/view.py
+./common/test/unittest_entity.py
+./common/test/unittest_mail.py
+./common/test/unittest_migration.py
+./common/test/unittest_rest.py
+./common/test/unittest_uilib.py
+./common/test/unittest_utils.py
+./devtools/__init__.py
+./devtools/_apptest.py
+./devtools/apptest.py
+./devtools/cwtwill.py
+./devtools/devctl.py
+./devtools/fake.py
+./devtools/fill.py
+./devtools/htmlparser.py
+./devtools/livetest.py
+./devtools/migrtest.py
+./devtools/pkginfo.py
+./devtools/repotest.py
+./devtools/stresstester.py
+./devtools/testlib.py
+./devtools/test/runtests.py
+./devtools/test/unittest_dbfill.py
+./devtools/test/unittest_fill.py
+./devtools/test/unittest_testlib.py
+./entities/__init__.py
+./entities/authobjs.py
+./entities/lib.py
+./entities/schemaobjs.py
+./entities/wfobjs.py
+./entities/test/unittest_base.py
+./etwist/__init__.py
+./etwist/request.py
+./etwist/server.py
+./etwist/twconfig.py
+./etwist/twctl.py
+./goa/__init__.py
+./goa/db.py
+./goa/dbinit.py
+./goa/dbmyams.py
+./goa/gaesource.py
+./goa/goaconfig.py
+./goa/goactl.py
+./goa/goavreg.py
+./goa/rqlinterpreter.py
+./goa/testlib.py
+./goa/appobjects/__init__.py
+./goa/appobjects/components.py
+./goa/appobjects/dbmgmt.py
+./goa/appobjects/gauthservice.py
+./goa/appobjects/sessions.py
+./goa/overrides/__init__.py
+./goa/overrides/mttransforms.py
+./goa/overrides/rqlannotation.py
+./goa/overrides/server__init__.py
+./goa/overrides/server_utils.py
+./goa/overrides/toolsutils.py
+./goa/test/pytestconf.py
+./goa/test/unittest_db.py
+./goa/test/unittest_editcontroller.py
+./goa/test/unittest_metadata.py
+./goa/test/unittest_rql.py
+./goa/test/unittest_schema.py
+./goa/test/unittest_views.py
+./goa/test/data/__init__.py
+./goa/test/data/schema.py
+./goa/test/data/settings.py
+./goa/test/data/views.py
+./goa/tools/__init__.py
+./goa/tools/generate_schema_img.py
+./goa/tools/i18n.py
+./goa/tools/laxctl.py
+./server/__init__.py
+./server/checkintegrity.py
+./server/hookhelper.py
+./server/hooks.py
+./server/hooksmanager.py
+./server/migractions.py
+./server/msplanner.py
+./server/mssteps.py
+./server/pool.py
+./server/querier.py
+./server/repository.py
+./server/rqlannotation.py
+./server/rqlrewrite.py
+./server/schemahooks.py
+./server/schemaserial.py
+./server/securityhooks.py
+./server/server.py
+./server/serverconfig.py
+./server/serverctl.py
+./server/session.py
+./server/sqlutils.py
+./server/ssplanner.py
+./server/utils.py
+./server/sources/__init__.py
+./server/sources/extlite.py
+./server/sources/ldapuser.py
+./server/sources/native.py
+./server/sources/pyrorql.py
+./server/sources/rql2sql.py
+./server/test/runtests.py
+./server/test/unittest_checkintegrity.py
+./server/test/unittest_config.py
+./server/test/unittest_hookhelper.py
+./server/test/unittest_hooks.py
+./server/test/unittest_hooksmanager.py
+./server/test/unittest_migractions.py
+./server/test/unittest_querier.py
+./server/test/unittest_repository.py
+./server/test/unittest_rql2sql.py
+./server/test/unittest_rqlannotation.py
+./server/test/unittest_rqlrewrite.py
+./server/test/unittest_schemaserial.py
+./server/test/unittest_security.py
+./server/test/unittest_session.py
+./server/test/unittest_sqlutils.py
+./server/test/unittest_ssplanner.py
+./server/test/unittest_tools.py
+./sobjects/__init__.py
+./sobjects/email.py
+./sobjects/hooks.py
+./sobjects/notification.py
+./sobjects/supervising.py
+./sobjects/test/unittest_email.py
+./sobjects/test/unittest_hooks.py
+./sobjects/test/unittest_notification.py
+./sobjects/test/unittest_supervising.py
+./test/unittest_cwconfig.py
+./test/unittest_cwctl.py
+./test/unittest_dbapi.py
+./test/unittest_rset.py
+./test/unittest_schema.py
+./test/unittest_vregistry.py
+./web/__init__.py
+./web/_exceptions.py
+./web/action.py
+./web/application.py
+./web/box.py
+./web/component.py
+./web/controller.py
+./web/facet.py
+./web/form.py
+./web/htmlwidgets.py
+./web/httpcache.py
+./web/request.py
+./web/webconfig.py
+./web/webctl.py
+./web/widgets.py
+./web/test/runtests.py
+./web/test/test_views.py
+./web/test/unittest_application.py
+./web/test/unittest_controller.py
+./web/test/unittest_magicsearch.py
+./web/test/unittest_urlpublisher.py
+./web/test/unittest_urlrewrite.py
+./web/test/unittest_views_actions.py
+./web/test/unittest_views_apacherewrite.py
+./web/test/unittest_views_basecontrollers.py
+./web/test/unittest_views_baseforms.py
+./web/test/unittest_views_baseviews.py
+./web/test/unittest_views_embeding.py
+./web/test/unittest_views_navigation.py
+./web/test/unittest_views_searchrestriction.py
+./web/test/unittest_viewselector.py
+./web/test/unittest_webconfig.py
+./web/test/unittest_widgets.py
+./web/views/__init__.py
+./web/views/actions.py
+./web/views/ajaxedit.py
+./web/views/apacherewrite.py
+./web/views/authentication.py
+./web/views/basecomponents.py
+./web/views/basecontrollers.py
+./web/views/baseforms.py
+./web/views/basetemplates.py
+./web/views/baseviews.py
+./web/views/bookmark.py
+./web/views/boxes.py
+./web/views/calendar.py
+./web/views/card.py
+./web/views/debug.py
+./web/views/dynimages.py
+./web/views/editcontroller.py
+./web/views/emailaddress.py
+./web/views/embedding.py
+./web/views/eproperties.py
+./web/views/error.py
+./web/views/euser.py
+./web/views/facets.py
+./web/views/ibreadcrumbs.py
+./web/views/idownloadable.py
+./web/views/igeocodable.py
+./web/views/iprogress.py
+./web/views/magicsearch.py
+./web/views/management.py
+./web/views/massmailing.py
+./web/views/navigation.py
+./web/views/old_calendar.py
+./web/views/plots.py
+./web/views/schemaentities.py
+./web/views/searchrestriction.py
+./web/views/sessions.py
+./web/views/startup.py
+./web/views/tableview.py
+./web/views/timeline.py
+./web/views/timetable.py
+./web/views/treeview.py
+./web/views/urlpublishing.py
+./web/views/urlrewrite.py
+./web/views/vcard.py
+./web/views/wdoc.py
+./web/views/wfentities.py
+./web/views/xbel.py
+./wsgi/__init__.py
+./wsgi/handler.py
+./wsgi/request.py
+bin/cubicweb-ctl
+common/test/data/bootstrap_packages
+common/test/data/entities.py
+common/test/data/migration/0.0.3_Any.py
+common/test/data/migration/0.0.4_Any.py
+common/test/data/migration/0.1.0_Any.py
+common/test/data/migration/0.1.0_common.py
+common/test/data/migration/0.1.0_repository.py
+common/test/data/migration/0.1.0_web.py
+common/test/data/migration/0.1.2_Any.py
+common/test/data/migration/depends.map
+common/test/data/schema/Affaire.sql
+common/test/data/schema/Note.py
+common/test/data/schema/Note.sql
+common/test/data/schema/Personne.sql
+common/test/data/schema/Societe.sql
+common/test/data/schema/relations.rel
+common/test/data/server_migration/2.10.2_Any.sql
+common/test/data/server_migration/2.5.0_Any.sql
+common/test/data/server_migration/2.6.0_Any.sql
+common/test/data/server_migration/bootstrapmigration_repository.py
+devtools/test/data/bootstrap_packages
+devtools/test/data/dbfill.conf
+devtools/test/data/firstnames.txt
+devtools/test/data/schema/Bug.sql
+devtools/test/data/schema/Project.sql
+devtools/test/data/schema/Story.sql
+devtools/test/data/schema/Version.sql
+devtools/test/data/schema/custom.py
+devtools/test/data/schema/relations.rel
+devtools/test/data/views/__init__.py
+devtools/test/data/views/bug.py
+doc/cubicweb.zargo
+doc/index.txt
+doc/makefile
+doc/plan_formation_python_cubicweb.txt
+doc/querier.txt
+doc/securite.txt
+doc/.static/logilab.png
+doc/.templates/layout.html
+doc/devmanual_fr/advanced_notes.txt
+doc/devmanual_fr/archi_globale.png
+doc/devmanual_fr/chap_autres_composants_ui.txt
+doc/devmanual_fr/chap_bases_framework_cubicweb.txt
+doc/devmanual_fr/chap_configuration_instance.txt
+doc/devmanual_fr/chap_definition_schema.txt
+doc/devmanual_fr/chap_definition_workflows.txt
+doc/devmanual_fr/chap_fondements_cubicweb.txt
+doc/devmanual_fr/chap_i18n.txt
+doc/devmanual_fr/chap_manipulation_donnees.txt
+doc/devmanual_fr/chap_migration.txt
+doc/devmanual_fr/chap_mise_en_place_environnement.txt
+doc/devmanual_fr/chap_rql.txt
+doc/devmanual_fr/chap_serveur_crochets.txt
+doc/devmanual_fr/chap_serveur_notification.txt
+doc/devmanual_fr/chap_tests.txt
+doc/devmanual_fr/chap_ui_gestion_formulaire.txt
+doc/devmanual_fr/chap_ui_js_json.txt
+doc/devmanual_fr/chap_visualisation_donnees.txt
+doc/devmanual_fr/index.txt
+doc/devmanual_fr/main_template_layout.png
+doc/devmanual_fr/makefile
+doc/devmanual_fr/sect_cubicweb-ctl.txt
+doc/devmanual_fr/sect_definition_entites.txt
+doc/devmanual_fr/sect_definition_schema.txt
+doc/devmanual_fr/sect_installation.txt
+doc/devmanual_fr/sect_mercurial.txt
+doc/devmanual_fr/sect_stdlib_schemas.txt
+doc/devmanual_fr/sect_stdlib_vues.txt
+doc/html-build/genindex.html
+doc/html-build/index.html
+doc/html-build/modindex.html
+doc/html-build/plan_formation_python_cubicweb.html
+doc/html-build/querier.html
+doc/html-build/search.html
+doc/html-build/securite.html
+doc/html-build/_images/archi_globale.png
+doc/html-build/_images/main_template_layout.png
+doc/html-build/_sources/index.txt
+doc/html-build/_sources/plan_formation_python_cubicweb.txt
+doc/html-build/_sources/querier.txt
+doc/html-build/_sources/securite.txt
+doc/html-build/_sources/devmanual_fr/advanced_notes.txt
+doc/html-build/_sources/devmanual_fr/chap_autres_composants_ui.txt
+doc/html-build/_sources/devmanual_fr/chap_bases_framework_cubicweb.txt
+doc/html-build/_sources/devmanual_fr/chap_configuration_instance.txt
+doc/html-build/_sources/devmanual_fr/chap_definition_schema.txt
+doc/html-build/_sources/devmanual_fr/chap_definition_workflows.txt
+doc/html-build/_sources/devmanual_fr/chap_fondements_cubicweb.txt
+doc/html-build/_sources/devmanual_fr/chap_i18n.txt
+doc/html-build/_sources/devmanual_fr/chap_manipulation_donnees.txt
+doc/html-build/_sources/devmanual_fr/chap_migration.txt
+doc/html-build/_sources/devmanual_fr/chap_mise_en_place_environnement.txt
+doc/html-build/_sources/devmanual_fr/chap_rql.txt
+doc/html-build/_sources/devmanual_fr/chap_serveur_crochets.txt
+doc/html-build/_sources/devmanual_fr/chap_serveur_notification.txt
+doc/html-build/_sources/devmanual_fr/chap_tests.txt
+doc/html-build/_sources/devmanual_fr/chap_ui_gestion_formulaire.txt
+doc/html-build/_sources/devmanual_fr/chap_ui_js_json.txt
+doc/html-build/_sources/devmanual_fr/chap_visualisation_donnees.txt
+doc/html-build/_sources/devmanual_fr/index.txt
+doc/html-build/_sources/devmanual_fr/sect_cubicweb-ctl.txt
+doc/html-build/_sources/devmanual_fr/sect_definition_entites.txt
+doc/html-build/_sources/devmanual_fr/sect_definition_schema.txt
+doc/html-build/_sources/devmanual_fr/sect_installation.txt
+doc/html-build/_sources/devmanual_fr/sect_mercurial.txt
+doc/html-build/_sources/devmanual_fr/sect_stdlib_schemas.txt
+doc/html-build/_sources/devmanual_fr/sect_stdlib_vues.txt
+doc/html-build/_sources/source/index.txt
+doc/html-build/_static/contents.png
+doc/html-build/_static/file.png
+doc/html-build/_static/logilab.png
+doc/html-build/_static/minus.png
+doc/html-build/_static/navigation.png
+doc/html-build/_static/plus.png
+doc/html-build/devmanual_fr/advanced_notes.html
+doc/html-build/devmanual_fr/chap_autres_composants_ui.html
+doc/html-build/devmanual_fr/chap_bases_framework_cubicweb.html
+doc/html-build/devmanual_fr/chap_configuration_instance.html
+doc/html-build/devmanual_fr/chap_definition_schema.html
+doc/html-build/devmanual_fr/chap_definition_workflows.html
+doc/html-build/devmanual_fr/chap_fondements_cubicweb.html
+doc/html-build/devmanual_fr/chap_i18n.html
+doc/html-build/devmanual_fr/chap_manipulation_donnees.html
+doc/html-build/devmanual_fr/chap_migration.html
+doc/html-build/devmanual_fr/chap_mise_en_place_environnement.html
+doc/html-build/devmanual_fr/chap_rql.html
+doc/html-build/devmanual_fr/chap_serveur_crochets.html
+doc/html-build/devmanual_fr/chap_serveur_notification.html
+doc/html-build/devmanual_fr/chap_tests.html
+doc/html-build/devmanual_fr/chap_ui_gestion_formulaire.html
+doc/html-build/devmanual_fr/chap_ui_js_json.html
+doc/html-build/devmanual_fr/chap_visualisation_donnees.html
+doc/html-build/devmanual_fr/index.html
+doc/html-build/devmanual_fr/sect_cubicweb-ctl.html
+doc/html-build/devmanual_fr/sect_definition_entites.html
+doc/html-build/devmanual_fr/sect_definition_schema.html
+doc/html-build/devmanual_fr/sect_installation.html
+doc/html-build/devmanual_fr/sect_mercurial.html
+doc/html-build/devmanual_fr/sect_stdlib_schemas.html
+doc/html-build/devmanual_fr/sect_stdlib_vues.html
+doc/html-build/source/index.html
+entities/test/data/bootstrap_packages
+entities/test/data/schema.py
+i18n/en.po
+i18n/entities.pot
+i18n/fr.po
+man/cubicweb-ctl.1
+misc/cwdesklets/rql_query.display
+misc/cwdesklets/web_query.display
+misc/cwdesklets/gfx/bg.png
+misc/cwdesklets/gfx/border-left.png
+misc/cwdesklets/gfx/logo_cw.png
+misc/cwdesklets/gfx/rss.png
+misc/cwdesklets/rqlsensor/__init__.py
+misc/cwzope/cwzope.py
+misc/migration/2.37.1_Any.py
+misc/migration/2.39.0_Any.py
+misc/migration/2.42.0_Any.py
+misc/migration/2.42.1_Any.py
+misc/migration/2.43.0_Any.py
+misc/migration/2.44.0_Any.py
+misc/migration/2.45.0_Any.py
+misc/migration/2.46.0_Any.py
+misc/migration/2.47.0_Any.py
+misc/migration/2.48.8_Any.py
+misc/migration/2.49.3_Any.py
+misc/migration/2.50.0_Any.py
+misc/migration/3.0.0_Any.py
+misc/migration/bootstrapmigration_repository.py
+misc/migration/postcreate.py
+schemas/Bookmark.py
+schemas/Card.py
+schemas/_regproc.sql.mysql
+schemas/_regproc.sql.postgres
+schemas/base.py
+schemas/bootstrap.py
+server/test/data/bootstrap_packages
+server/test/data/hooks.py
+server/test/data/config1/application_hooks.py
+server/test/data/config1/bootstrap_packages
+server/test/data/config1/server-ctl.conf
+server/test/data/config1/sources
+server/test/data/config2/application_hooks.py
+server/test/data/config2/bootstrap_packages
+server/test/data/config2/server-ctl.conf
+server/test/data/config2/sources
+server/test/data/migration/postcreate.py
+server/test/data/migrschema/Affaire.py
+server/test/data/migrschema/Folder2.py
+server/test/data/migrschema/Note.py
+server/test/data/migrschema/Personne.sql
+server/test/data/migrschema/Societe.perms
+server/test/data/migrschema/Societe.sql
+server/test/data/migrschema/relations.rel
+server/test/data/schema/Affaire.py
+server/test/data/schema/Note.sql
+server/test/data/schema/Personne.sql
+server/test/data/schema/Societe.py
+server/test/data/schema/custom.py
+server/test/data/schema/note.py
+server/test/data/schema/relations.rel
+sobjects/test/data/bootstrap_packages
+sobjects/test/data/schema.py
+sobjects/test/data/sobjects/__init__.py
+web/data/IE_styles.css
+web/data/MochiKit.js
+web/data/acl.css
+web/data/ajax.js
+web/data/asc.gif
+web/data/banner.png
+web/data/bg.gif
+web/data/bg_trame_grise.png
+web/data/black-check.png
+web/data/bookmarks.js
+web/data/bullet.png
+web/data/bullet_orange.png
+web/data/button.png
+web/data/calendar.css
+web/data/calendar.gif
+web/data/calendar.js
+web/data/calendar_popup.css
+web/data/compat.js
+web/data/critical.png
+web/data/cubicweb.css
+web/data/desc.gif
+web/data/download.gif
+web/data/dublincore-button.png
+web/data/dublincore-icon.png
+web/data/edition.js
+web/data/error.png
+web/data/external_resources
+web/data/favicon.ico
+web/data/fckcwconfig.js
+web/data/feed-icon.png
+web/data/feed-icon16x16.png
+web/data/feed-icon32x32.png
+web/data/file.gif
+web/data/folder-closed.gif
+web/data/folder.gif
+web/data/form.css
+web/data/formfilter.js
+web/data/gmap.js
+web/data/gmap.utility.labeledmarker.js
+web/data/gmap_blue_marker.png
+web/data/go.png
+web/data/goa.js
+web/data/gradient-grey-up.png
+web/data/gradient-grey.gif
+web/data/help.png
+web/data/help_ie.png
+web/data/html_tree.css
+web/data/htmlhelpers.js
+web/data/icon_blank.png
+web/data/icon_bookmark.gif
+web/data/icon_emailaddress.gif
+web/data/icon_euser.gif
+web/data/icon_map.png
+web/data/icon_state.gif
+web/data/information.png
+web/data/iprogress.css
+web/data/jquery.autocomplete.css
+web/data/jquery.autocomplete.js
+web/data/jquery.js
+web/data/jquery.json.js
+web/data/jquery.tablesorter.js
+web/data/jquery.treeview.css
+web/data/jquery.treeview.js
+web/data/liveclipboard-icon.png
+web/data/loading.gif
+web/data/login.css
+web/data/logo.png
+web/data/logo.xcf
+web/data/mail.gif
+web/data/mailform.css
+web/data/microformats-button.png
+web/data/microformats-icon.png
+web/data/minus.gif
+web/data/no-check-no-border.png
+web/data/nomail.gif
+web/data/nomail.xcf
+web/data/plus.gif
+web/data/preferences.css
+web/data/print.css
+web/data/puce.png
+web/data/puce_down.png
+web/data/puce_down_black.png
+web/data/pygments.css
+web/data/python.js
+web/data/required.png
+web/data/rss-button.png
+web/data/rss.png
+web/data/schema.css
+web/data/search.png
+web/data/sendcancel.png
+web/data/sendok.png
+web/data/shadow.gif
+web/data/simile-ajax-api.js
+web/data/simile-ajax-bundle.js
+web/data/sortable.js
+web/data/suggest.css
+web/data/tablesorter.css
+web/data/timeline-big-bundle.js
+web/data/timeline-bundle.css
+web/data/timeline-stubs.js
+web/data/timeline.ext.js
+web/data/timeline.js
+web/data/timetable.css
+web/data/treeview-black-line.gif
+web/data/treeview-black.gif
+web/data/treeview-default-line.gif
+web/data/treeview-default.gif
+web/data/treeview-famfamfam-line.gif
+web/data/treeview-famfamfam.gif
+web/data/treeview-gray-line.gif
+web/data/treeview-gray.gif
+web/data/treeview-red-line.gif
+web/data/treeview-red.gif
+web/data/widgets.js
+web/data/timeline/blue-circle.png
+web/data/timeline/bubble-arrows.png
+web/data/timeline/bubble-body-and-arrows.png
+web/data/timeline/bubble-body.png
+web/data/timeline/bubble-bottom-arrow.png
+web/data/timeline/bubble-bottom-left.png
+web/data/timeline/bubble-bottom-right.png
+web/data/timeline/bubble-bottom.png
+web/data/timeline/bubble-left-arrow.png
+web/data/timeline/bubble-left.png
+web/data/timeline/bubble-right-arrow.png
+web/data/timeline/bubble-right.png
+web/data/timeline/bubble-top-arrow.png
+web/data/timeline/bubble-top-left.png
+web/data/timeline/bubble-top-right.png
+web/data/timeline/bubble-top.png
+web/data/timeline/close-button.png
+web/data/timeline/copyright-vertical.png
+web/data/timeline/copyright.png
+web/data/timeline/dark-blue-circle.png
+web/data/timeline/dark-green-circle.png
+web/data/timeline/dark-red-circle.png
+web/data/timeline/dull-blue-circle.png
+web/data/timeline/dull-green-circle.png
+web/data/timeline/dull-red-circle.png
+web/data/timeline/gray-circle.png
+web/data/timeline/green-circle.png
+web/data/timeline/message-bottom-left.png
+web/data/timeline/message-bottom-right.png
+web/data/timeline/message-left.png
+web/data/timeline/message-right.png
+web/data/timeline/message-top-left.png
+web/data/timeline/message-top-right.png
+web/data/timeline/message.png
+web/data/timeline/progress-running.gif
+web/data/timeline/red-circle.png
+web/data/timeline/sundial.png
+web/data/timeline/top-bubble.png
+web/views/edit_attributes.pt
+web/views/edit_multiple.pt
+web/views/edit_relations.pt
+web/wdoc/ChangeLog_en
+web/wdoc/ChangeLog_fr
+web/wdoc/about_en.rst
+web/wdoc/about_fr.rst
+web/wdoc/add_content_en.rst
+web/wdoc/add_content_fr.rst
+web/wdoc/advanced_usage_en.rst
+web/wdoc/advanced_usage_schema_en.rst
+web/wdoc/advanced_usage_schema_fr.rst
+web/wdoc/bookmarks_en.rst
+web/wdoc/bookmarks_fr.rst
+web/wdoc/custom_view_en.rst
+web/wdoc/custom_view_fr.rst
+web/wdoc/custom_view_last_update_en.rst
+web/wdoc/custom_view_last_update_fr.rst
+web/wdoc/custom_view_rss_en.rst
+web/wdoc/custom_view_rss_fr.rst
+web/wdoc/glossary_en.rst
+web/wdoc/glossary_fr.rst
+web/wdoc/main_en.rst
+web/wdoc/search_en.rst
+web/wdoc/search_fr.rst
+web/wdoc/search_sample_queries_en.rst
+web/wdoc/search_sample_queries_fr.rst
+web/wdoc/standard_usage_en.rst
+web/wdoc/standard_usage_fr.rst
+web/wdoc/toc.xml
+web/wdoc/tut_rql_en.rst
+web/wdoc/tut_rql_fr.rst
+web/wdoc/userprefs_en.rst
+web/wdoc/userprefs_fr.rst
+web/wdoc/images/userprefs_en.png
+web/wdoc/images/userprefs_fr.png
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MANIFEST.in	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+include README
+include pylintrc
+include bin/cubicweb-*
+include man/cubicweb-ctl.1
+
+recursive-include doc *.txt *.zargo *.png *.html makefile
+
+recursive-include misc *
+
+recursive-include web/data *
+recursive-include web/wdoc *.rst *.png *.xml ChangeLog*
+
+include web/views/*.pt
+
+recursive-include etwist *.xml *.html
+
+recursive-include i18n *.pot *.po
+recursive-include schemas *.py *.rel *.sql.*
+
+recursive-include common/test/data *
+recursive-include entities/test/data *
+recursive-include sobjects/test/data *
+recursive-include server/test/data *
+recursive-include server/test sources*
+recursive-include web/test/data *.js *.css *.png *.gif *.jpg *.ico external_resources
+recursive-include devtools/test/data *
+
+prune misc/cwfs
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/README	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+CubicWeb semantic web framework 
+===============================
+ 
+Install
+-------
+From the source distribution, extract the tarball and run ::
+  
+    python setup.py install
+  
+For deb and rpm packages, use the tools recommended by your distribution.
+
+  
+Documentation
+-------------
+Look in the doc/ subdirectory.
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,299 @@
+"""CubicWeb is a generic framework to quickly build applications which describes
+relations between entitites.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: General Public License version 2 - http://www.gnu.org/licenses
+"""
+__docformat__ = "restructuredtext en"
+from cubicweb.__pkginfo__ import version as __version__
+
+import __builtin__
+# '_' is available in builtins to mark internationalized string but should
+# not be used to do the actual translation
+if not hasattr(__builtin__, '_'):
+    __builtin__._ = unicode
+
+CW_SOFTWARE_ROOT = __path__[0]
+
+import sys, os, logging
+from StringIO import StringIO
+from urllib import quote as urlquote, unquote as urlunquote
+
+from logilab.common.decorators import cached
+
+
+LLDEBUG = 5
+logging.addLevelName(LLDEBUG, 'LLDEBUG')
+
+class CubicWebLogger(logging.Logger):
+
+    def lldebug(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'DEBUG'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
+        """
+        if self.manager.disable >= LLDEBUG:
+            return
+        if LLDEBUG >= self.getEffectiveLevel():
+            self._log(LLDEBUG, msg, args, **kwargs)
+
+logging.setLoggerClass(CubicWebLogger)
+
+def set_log_methods(cls, logger):
+    """bind standart logger's methods as static methods on the class
+    """
+    cls._logger = logger
+    for attr in ('lldebug', 'debug', 'info', 'warning', 'error', 'critical', 'exception'):
+        setattr(cls, attr, getattr(logger, attr))
+
+if os.environ.get('APYCOT_ROOT'):
+    logging.basicConfig(level=logging.CRITICAL)
+else:
+    logging.basicConfig()
+
+
+set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb'))
+
+# make all exceptions accessible from the package
+from cubicweb._exceptions import *
+
+# convert eid to the right type, raise ValueError if it's not a valid eid
+typed_eid = int
+
+
+#def log_thread(f, w, a):
+#    print f.f_code.co_filename, f.f_code.co_name
+#import threading
+#threading.settrace(log_thread)
+
+class Binary(StringIO):
+    """customize StringIO to make sure we don't use unicode"""
+    def __init__(self, buf= ''):
+        assert isinstance(buf, (str, buffer)), \
+               "Binary objects must use raw strings, not %s" % buf.__class__
+        StringIO.__init__(self, buf)
+
+    def write(self, data):
+        assert isinstance(data, (str, buffer)), \
+               "Binary objects must use raw strings, not %s" % data.__class__
+        StringIO.write(self, data)
+
+
+class RequestSessionMixIn(object):
+    """mixin class containing stuff shared by server session and web request
+    """
+    def __init__(self, vreg):
+        self.vreg = vreg
+        try:
+            encoding = vreg.property_value('ui.encoding')
+        except: # no vreg or property not registered
+            encoding = 'utf-8'
+        self.encoding = encoding
+        # cache result of execution for (rql expr / eids),
+        # should be emptied on commit/rollback of the server session / web 
+        # connection
+        self.local_perm_cache = {}
+
+    def property_value(self, key):
+        if self.user:
+            return self.user.property_value(key)
+        return self.vreg.property_value(key)
+    
+    def etype_rset(self, etype, size=1):
+        """return a fake result set for a particular entity type"""
+        from cubicweb.rset import ResultSet
+        rset = ResultSet([('A',)]*size, '%s X' % etype,
+                         description=[(etype,)]*size)
+        def get_entity(row, col=0, etype=etype, vreg=self.vreg, rset=rset):
+            return self.vreg.etype_class(etype)(self, rset, row, col)
+        rset.get_entity = get_entity
+        return self.decorate_rset(rset)
+
+    def eid_rset(self, eid, etype=None):
+        """return a result set for the given eid without doing actual query
+        (we have the eid, we can suppose it exists and user has access to the
+        entity)
+        """
+        from cubicweb.rset import ResultSet
+        eid = typed_eid(eid)
+        if etype is None:
+            etype = self.describe(eid)[0]
+        rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid},
+                         [(etype,)])
+        return self.decorate_rset(rset)
+
+    def entity_from_eid(self, eid, etype=None):
+        rset = self.eid_rset(eid, etype)
+        if rset:
+            return rset.get_entity(0, 0)
+        else:
+            return None
+
+    # url generation methods ##################################################
+    
+    def build_url(self, method, base_url=None, **kwargs):
+        """return an absolute URL using params dictionary key/values as URL
+        parameters. Values are automatically URL quoted, and the
+        publishing method to use may be specified or will be guessed.
+        """
+        if base_url is None:
+            base_url = self.base_url()
+        if '_restpath' in kwargs:
+            assert method == 'view', method
+            path = kwargs.pop('_restpath')
+        else:
+            path = method
+        if not kwargs:
+            return u'%s%s' % (base_url, path)
+        return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs))
+        
+
+    def build_url_params(self, **kwargs):
+        """return encoded params to incorporate them in an URL"""
+        args = []
+        for param, values in kwargs.items():
+            if not isinstance(values, (list, tuple)):
+                values = (values,)
+            for value in values:
+                args.append(u'%s=%s' % (param, self.url_quote(value)))
+        return '&'.join(args)
+
+    def url_quote(self, value, safe=''):
+        """urllib.quote is not unicode safe, use this method to do the
+        necessary encoding / decoding. Also it's designed to quote each
+        part of a url path and so the '/' character will be encoded as well.
+        """
+        if isinstance(value, unicode):
+            quoted = urlquote(value.encode(self.encoding), safe=safe)
+            return unicode(quoted, self.encoding)
+        return urlquote(str(value), safe=safe)
+
+    def url_unquote(self, quoted):
+        """returns a unicode unquoted string
+        
+        decoding is based on `self.encoding` which is the encoding
+        used in `url_quote`
+        """
+        if isinstance(quoted, unicode):
+            quoted = quoted.encode(self.encoding)
+        try:
+            return unicode(urlunquote(quoted), self.encoding)
+        except UnicodeDecodeError: # might occurs on manually typed URLs
+            return unicode(urlunquote(quoted), 'iso-8859-1')
+    
+
+    # session's user related methods #####################################
+    
+    @cached
+    def user_data(self):
+        """returns a dictionnary with this user's information"""
+        userinfo = {}
+        if self.is_internal_session:
+            userinfo['login'] = "cubicweb"
+            userinfo['name'] = "cubicweb"
+            userinfo['email'] = ""
+            return userinfo
+        user = self.actual_session().user
+        rql = "Any F,S,A where U eid %(x)s, U firstname F, U surname S, U primary_email E, E address A"
+        try:
+            firstname, lastname, email = self.execute(rql, {'x': user.eid}, 'x')[0]
+            if firstname is None and lastname is None:
+                userinfo['name'] = ''
+            else:
+                userinfo['name'] = ("%s %s" % (firstname, lastname))
+            userinfo['email'] = email
+        except IndexError:
+            userinfo['name'] = None
+            userinfo['email'] = None
+        userinfo['login'] = user.login
+        return userinfo
+
+    def is_internal_session(self):
+        """overrided on the server-side"""
+        return False
+
+    # abstract methods to override according to the web front-end #############
+    
+    def base_url(self):
+        """return the root url of the application"""
+        raise NotImplementedError
+    
+    def decorate_rset(self, rset):
+        """add vreg/req (at least) attributes to the given result set """
+        raise NotImplementedError
+    
+    def describe(self, eid):
+        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+        raise NotImplementedError
+        
+
+# XXX 2.45 is allowing nicer entity type names, use this map for bw compat    
+ETYPE_NAME_MAP = {'Eetype': 'EEType',
+                  'Ertype': 'ERType',
+                  'Efrdef': 'EFRDef',
+                  'Enfrdef': 'ENFRDef',
+                  'Econstraint': 'EConstraint',
+                  'Econstrainttype': 'EConstraintType',
+                  'Epermission': 'EPermission',
+                  'Egroup': 'EGroup',
+                  'Euser': 'EUser',
+                  'Eproperty': 'EProperty',
+                  'Emailaddress': 'EmailAddress',
+                  'Rqlexpression': 'RQLExpression',
+                  'Trinfo': 'TrInfo',
+                  }
+
+
+
+# XXX cubic web cube migration map
+CW_MIGRATION_MAP = {'erudi': 'cubicweb',
+
+                    'eaddressbook': 'addressbook',
+                    'ebasket': 'basket',
+                    'eblog': 'blog',
+                    'ebook': 'book',
+                    'ecomment': 'comment',
+                    'ecompany': 'company',
+                    'econference':  'conference',
+                    'eemail': 'email',
+                    'eevent': 'event',
+                    'eexpense': 'expense',
+                    'efile': 'file',
+                    'einvoice': 'invoice',
+                    'elink': 'link',
+                    'emailinglist': 'mailinglist',
+                    'eperson': 'person',
+                    'eshopcart': 'shopcart',
+                    'eskillmat': 'skillmat',
+                    'etask': 'task',
+                    'eworkcase': 'workcase',
+                    'eworkorder': 'workorder',
+                    'ezone': 'zone',
+                    'i18ncontent': 'i18ncontent',
+                    'svnfile': 'vcsfile',
+                    
+                    'eclassschemes': 'keyword',
+                    'eclassfolders': 'folder',
+                    'eclasstags': 'tag',
+
+                    'jpl': 'jpl',
+                    'jplintra': 'jplintra',
+                    'jplextra': 'jplextra',
+                    'jplorg': 'jplorg',
+                    'jplrecia': 'jplrecia',
+                    'crm': 'crm',
+                    'agueol': 'agueol',
+                    'docaster': 'docaster',
+                    'asteretud': 'asteretud',
+                    
+                    # XXX temp
+                    'keywords': 'keyword',
+                    'folders': 'folder',
+                    'tags': 'tag',
+                    }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/__pkginfo__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,92 @@
+# pylint: disable-msg=W0622,C0103
+"""cubicweb global packaging information for the cubicweb knowledge management
+software
+"""
+
+distname = "cubicweb"
+modname = "cubicweb"
+
+numversion = (3, 0, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = 'LCL'
+copyright = '''Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
+
+author = "Logilab"
+author_email = "contact@logilab.fr"
+
+short_desc = "a repository of entities / relations for knowledge management"
+long_desc = """CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+* a repository server
+* a RQL command line client to the repository
+* an adaptative modpython interface to the server
+* a bunch of other management tools
+"""
+
+web = ''
+ftp = ''
+pyversions = ['2.4']
+
+
+from os import listdir, environ
+from os.path import join, isdir
+import glob
+
+scripts = [s for s in glob.glob(join('bin', 'cubicweb-*'))
+           if not s.endswith('.bat')]
+include_dirs = [join('common', 'test', 'data'),
+                join('server', 'test', 'data'),
+                join('web', 'test', 'data'),
+                join('devtools', 'test', 'data'),]
+
+
+entities_dir = 'entities'
+schema_dir = 'schemas'
+sobjects_dir = 'sobjects'
+server_migration_dir = join('misc', 'migration')
+data_dir = join('web', 'data')
+wdoc_dir = join('web', 'wdoc')
+wdocimages_dir = join(wdoc_dir, 'images')
+views_dir = join('web', 'views')
+i18n_dir = 'i18n'
+
+if environ.get('APYCOT_ROOT'):
+    # --home install
+    pydir = 'python'
+else:
+    pydir = join('python2.4', 'site-packages')
+try:
+    data_files = [
+        # common data
+        #[join('share', 'cubicweb', 'entities'),
+        # [join(entities_dir, filename) for filename in listdir(entities_dir)]],
+        # server data
+        [join('share', 'cubicweb', 'schemas'),
+         [join(schema_dir, filename) for filename in listdir(schema_dir)]],
+        #[join('share', 'cubicweb', 'sobjects'),
+        # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]],
+        [join('share', 'cubicweb', 'migration'),
+         [join(server_migration_dir, filename)
+          for filename in listdir(server_migration_dir)]],
+        # web data
+        [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
+         [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
+         [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
+         [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
+         [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]],
+        # XXX: .pt install should be handled properly in a near future version
+        [join('lib', pydir, 'cubicweb', 'web', 'views'),
+         [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+         [join(i18n_dir, fname) for fname in listdir(i18n_dir)]],
+        ]
+except OSError:
+    # we are in an installed directory, don't care about this
+    pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/_exceptions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,148 @@
+"""Exceptions shared by different cubicweb packages.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from yams import ValidationError
+
+# abstract exceptions #########################################################
+
+class CubicWebException(Exception):
+    """base class for cubicweb server exception"""
+    msg = ""
+    def __str__(self):
+        if self.msg:
+            if self.args:
+                return self.msg % tuple(self.args)
+            return self.msg
+        return ' '.join(str(arg) for arg in self.args)
+
+
+class ConfigurationError(CubicWebException):
+    """a misconfiguration error"""
+
+class InternalError(CubicWebException):
+    """base class for exceptions which should not occurs"""    
+
+class SecurityError(CubicWebException): 
+    """base class for cubicweb server security exception"""
+
+class RepositoryError(CubicWebException):
+    """base class for repository exceptions"""
+
+class SourceException(CubicWebException):
+    """base class for source exceptions"""
+
+class CubicWebRuntimeError(CubicWebException):
+    """base class for runtime exceptions"""
+    
+# repository exceptions #######################################################
+
+class ConnectionError(RepositoryError):
+    """raised when a bad connection id is given or when an attempt to establish
+    a connection failed"""
+
+class AuthenticationError(ConnectionError):
+    """raised when a bad connection id is given or when an attempt to establish
+    a connection failed"""
+
+class BadConnectionId(ConnectionError):
+    """raised when a bad connection id is given or when an attempt to establish
+    a connection failed"""
+    
+BadSessionId = BadConnectionId # XXX bw compat for pyro connections
+
+class UnknownEid(RepositoryError):
+    """the eid is not defined in the system tables"""
+    msg = 'No entity with eid %s in the repository'
+
+class ETypeNotSupportedBySources(RepositoryError, InternalError):
+    """no source support an entity type"""
+    msg = 'No source supports %r entity\'s type'
+
+class RTypeNotSupportedBySources(RepositoryError, InternalError):
+    """no source support a relation type"""
+    msg = 'No source supports %r relation\'s type'
+
+    
+# security exceptions #########################################################
+
+class Unauthorized(SecurityError):
+    """raised when a user tries to perform an action without sufficient
+    credentials
+    """
+    msg = 'You are not allowed to perform this operation'
+    msg1 = 'You are not allowed to perform %s operation on %s'
+    var = None
+    #def __init__(self, *args):
+    #    self.args = args
+        
+    def __str__(self):
+        try:
+            if self.args and len(self.args) == 2:
+                return self.msg1 % self.args
+            if self.args:
+                return ' '.join(self.args)
+            return self.msg
+        except Exception, ex:
+            return str(ex)
+    
+# source exceptions ###########################################################
+
+class EidNotInSource(SourceException):
+    """trying to access an object with a particular eid from a particular
+    source has failed
+    """
+    msg = 'No entity with eid %s in %s'
+    
+    
+# registry exceptions #########################################################
+
+class RegistryException(CubicWebException):
+    """raised when an unregistered view is called"""
+
+class RegistryNotFound(RegistryException):
+    """raised when an unknown registry is requested
+
+    this is usually a programming/typo error...
+    """
+    
+class ObjectNotFound(RegistryException):
+    """raised when an unregistered object is requested
+
+    this may be a programming/typo or a misconfiguration error
+    """
+    
+# class ViewNotFound(ObjectNotFound):
+#     """raised when an unregistered view is called"""
+    
+class NoSelectableObject(RegistryException):
+    """some views with the given vid have been found but no
+    one is applyable to the result set
+    """
+
+class UnknownProperty(RegistryException):
+    """property found in database but unknown in registry"""
+
+# query exception #############################################################
+
+class QueryError(CubicWebRuntimeError):
+    """a query try to do something it shouldn't"""
+
+class NotAnEntity(CubicWebRuntimeError):
+    """raised when get_entity is called for a column which doesn't contain
+    a non final entity
+    """
+
+# tools exceptions ############################################################
+
+class ExecutionError(Exception):
+    """server execution control error (already started, not running...)"""
+
+# pylint: disable-msg=W0611
+from logilab.common.clcommands import BadCommandUsage 
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/cubicweb-ctl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+from cubicweb.cwctl import run
+import sys
+run(sys.argv[1:])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cleanappl.sh	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+rm -f web/test/tmpdb*
+rm -f web/tali18n.py
+
+rm -f applications/*/test/tmpdb*
+rm -f applications/*/tali18n.py
+rm -f applications/*/i18n/*_full.po
+rm -f applications/*/data/Schema.dot
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,52 @@
+"""Common subpackage of cubicweb : defines library functions used both on the
+hg stserver side and on the client side
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from logilab.common.adbh import FunctionDescr
+
+from cubicweb._exceptions import * # bw compat
+
+from rql.utils import register_function, iter_funcnode_variables
+
+class COMMA_JOIN(FunctionDescr):
+    supported_backends = ('postgres', 'sqlite',)
+    rtype = 'String'
+    
+    @classmethod
+    def st_description(cls, funcnode):
+        return ', '.join(term.get_description()
+                         for term in iter_funcnode_variables(funcnode))
+    
+register_function(COMMA_JOIN)  # XXX do not expose?
+
+
+class CONCAT_STRINGS(COMMA_JOIN):
+    aggregat = True
+    
+register_function(CONCAT_STRINGS) # XXX bw compat
+
+class GROUP_CONCAT(CONCAT_STRINGS):
+    supported_backends = ('mysql', 'postgres', 'sqlite',)
+    
+register_function(GROUP_CONCAT)
+
+
+class LIMIT_SIZE(FunctionDescr):
+    supported_backends = ('postgres', 'sqlite',)
+    rtype = 'String'
+    
+    @classmethod
+    def st_description(cls, funcnode):
+        return funcnode.children[0].get_description()
+    
+register_function(LIMIT_SIZE)
+
+
+class TEXT_LIMIT_SIZE(LIMIT_SIZE):
+    supported_backends = ('mysql', 'postgres', 'sqlite',)
+    
+register_function(TEXT_LIMIT_SIZE)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/appobject.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,461 @@
+"""Base class for dynamically loaded objects manipulated in the web interface
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from mx.DateTime import now, oneSecond
+from simplejson import dumps
+
+from logilab.common.deprecation import obsolete
+from rql.stmts import Union, Select
+
+from cubicweb import Unauthorized
+from cubicweb.vregistry import VObject
+from cubicweb.common.utils import UStringIO
+from cubicweb.common.uilib import html_escape, ustrftime
+from cubicweb.common.registerers import yes_registerer, priority_registerer
+from cubicweb.common.selectors import yes_selector
+
+_MARKER = object()
+
+
+class Cache(dict):    
+    def __init__(self):
+        super(Cache, self).__init__()
+        self.cache_creation_date = None
+        self.latest_cache_lookup = now()
+    
+CACHE_REGISTRY = {}
+
+class AppRsetObject(VObject):
+    """This is the base class for CubicWeb application objects
+    which are selected according to a request and result set.
+    
+    Classes are kept in the vregistry and instantiation is done at selection
+    time.
+    
+    At registration time, the following attributes are set on the class:
+    :vreg:
+      the application's registry
+    :schema:
+      the application's schema
+    :config:
+      the application's configuration
+
+    At instantiation time, the following attributes are set on the instance:
+    :req:
+      current request
+    :rset:
+      result set on which the object is applied
+    """
+
+    @classmethod
+    def registered(cls, vreg):
+        cls.vreg = vreg
+        cls.schema = vreg.schema
+        cls.config = vreg.config
+        cls.register_properties()
+        return cls
+
+    @classmethod
+    def selected(cls, req, rset, row=None, col=None, **kwargs):
+        """by default web app objects are usually instantiated on
+        selection according to a request, a result set, and optional
+        row and col
+        """
+        instance = cls(req, rset)
+        instance.row = row
+        instance.col = col
+        return instance
+
+    # Eproperties definition:
+    # key: id of the property (the actual EProperty key is build using
+    #      <registry name>.<obj id>.<property id>
+    # value: tuple (property type, vocabfunc, default value, property description)
+    #        possible types are those used by `logilab.common.configuration`
+    #
+    # notice that when it exists multiple objects with the same id (adaptation,
+    # overriding) only the first encountered definition is considered, so those
+    # objects can't try to have different default values for instance.
+    
+    property_defs = {}
+    
+    @classmethod
+    def register_properties(cls):
+        for propid, pdef in cls.property_defs.items():
+            pdef = pdef.copy() # may be shared
+            pdef['default'] = getattr(cls, propid, pdef['default'])
+            pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide'))
+            cls.vreg.register_property(cls.propkey(propid), **pdef)
+        
+    @classmethod
+    def propkey(cls, propid):
+        return '%s.%s.%s' % (cls.__registry__, cls.id, propid)
+            
+        
+    def __init__(self, req, rset):
+        super(AppRsetObject, self).__init__()
+        self.req = req
+        self.rset = rset
+
+    @property
+    def cursor(self): # XXX deprecate in favor of req.cursor?
+        msg = '.cursor is deprecated, use req.execute (or req.cursor if necessary)'
+        warn(msg, DeprecationWarning, stacklevel=2)
+        return self.req.cursor
+        
+    def get_cache(self, cachename):
+        """
+        NOTE: cachename should be dotted names as in :
+        - cubicweb.mycache
+        - cubes.blog.mycache 
+        - etc.
+        """
+        if cachename in CACHE_REGISTRY:
+            cache = CACHE_REGISTRY[cachename]
+        else:
+            cache = Cache()
+            CACHE_REGISTRY[cachename] = cache
+        _now = now()
+        if _now > cache.latest_cache_lookup + oneSecond:
+            ecache = self.req.execute('Any C,T WHERE C is ECache, C name %(name)s, C timestamp T', 
+                                      {'name':cachename}).get_entity(0,0)
+            cache.latest_cache_lookup = _now
+            if not ecache.valid(cache.cache_creation_date):
+                cache.empty()
+                cache.cache_creation_date = _now
+        return cache
+
+    def propval(self, propid):
+        assert self.req
+        return self.req.property_value(self.propkey(propid))
+
+    
+    def limited_rql(self):
+        """return a printable rql for the result set associated to the object,
+        with limit/offset correctly set according to maximum page size and
+        currently displayed page when necessary
+        """
+        # try to get page boundaries from the navigation component
+        # XXX we should probably not have a ref to this component here (eg in
+        #     cubicweb.common)
+        nav = self.vreg.select_component('navigation', self.req, self.rset)
+        if nav:
+            start, stop = nav.page_boundaries()
+            rql = self._limit_offset_rql(stop - start, start)
+        # result set may have be limited manually in which case navigation won't
+        # apply
+        elif self.rset.limited:
+            rql = self._limit_offset_rql(*self.rset.limited)
+        # navigation component doesn't apply and rset has not been limited, no
+        # need to limit query
+        else:
+            rql = self.rset.printable_rql()
+        return rql
+    
+    def _limit_offset_rql(self, limit, offset):
+        rqlst = self.rset.syntax_tree()
+        if len(rqlst.children) == 1:
+            select = rqlst.children[0]
+            olimit, ooffset = select.limit, select.offset
+            select.limit, select.offset = limit, offset
+            rql = rqlst.as_string(kwargs=self.rset.args)
+            # restore original limit/offset
+            select.limit, select.offset = olimit, ooffset
+        else:
+            newselect = Select()
+            newselect.limit = limit
+            newselect.offset = offset
+            aliases = [VariableRef(newselect.get_variable(vref.name, i))
+                       for i, vref in enumerate(rqlst.selection)]
+            newselect.set_with([SubQuery(aliases, rqlst)], check=False)
+            newunion = Union()
+            newunion.append(newselect)
+            rql = rqlst.as_string(kwargs=self.rset.args)
+            rqlst.parent = None
+        return rql
+    
+    # url generation methods ##################################################
+    
+    controller = 'view'
+    
+    def build_url(self, method=None, **kwargs):
+        """return an absolute URL using params dictionary key/values as URL
+        parameters. Values are automatically URL quoted, and the
+        publishing method to use may be specified or will be guessed.
+        """
+        # XXX I (adim) think that if method is passed explicitly, we should
+        #     not try to process it and directly call req.build_url()
+        if method is None:
+            method = self.controller
+            if method == 'view' and self.req.from_controller() == 'view' and \
+                   not '_restpath' in kwargs:
+                method = self.req.relative_path(includeparams=False) or 'view'
+        return self.req.build_url(method, **kwargs)
+
+    # various resources accessors #############################################
+
+    def etype_rset(self, etype, size=1):
+        """return a fake result set for a particular entity type"""
+        msg = '.etype_rset is deprecated, use req.etype_rset'
+        warn(msg, DeprecationWarning, stacklevel=2)
+        return self.req.etype_rset(etype, size=1)
+
+    def eid_rset(self, eid, etype=None):
+        """return a result set for the given eid"""
+        msg = '.eid_rset is deprecated, use req.eid_rset'
+        warn(msg, DeprecationWarning, stacklevel=2)
+        return self.req.eid_rset(eid, etype)
+    
+    def entity(self, row, col=0):
+        """short cut to get an entity instance for a particular row/column
+        (col default to 0)
+        """
+        return self.rset.get_entity(row, col)
+    
+    def complete_entity(self, row, col=0, skip_bytes=True):
+        """short cut to get an completed entity instance for a particular
+        row (all instance's attributes have been fetched)
+        """
+        entity = self.entity(row, col)
+        entity.complete(skip_bytes=skip_bytes)
+        return entity
+
+    def user_rql_callback(self, args, msg=None):
+        """register a user callback to execute some rql query and return an url
+        to call it ready to be inserted in html
+        """
+        def rqlexec(req, rql, args=None, key=None):
+            req.execute(rql, args, key)
+        return self.user_callback(rqlexec, args, msg)
+        
+    def user_callback(self, cb, args, msg=None, nonify=False):
+        """register the given user callback and return an url to call it ready to be
+        inserted in html
+        """
+        self.req.add_js('cubicweb.ajax.js')
+        if nonify:
+            # XXX < 2.48.3 bw compat
+            warn('nonify argument is deprecated', DeprecationWarning, stacklevel=2)
+            _cb = cb
+            def cb(*args):
+                _cb(*args)
+        cbname = self.req.register_onetime_callback(cb, *args)
+        msg = dumps(msg or '') 
+        return "javascript:userCallbackThenReloadPage('%s', %s)" % (
+            cbname, msg)
+
+    # formating methods #######################################################
+
+    def tal_render(self, template, variables):
+        """render a precompiled page template with variables in the given
+        dictionary as context
+        """
+        from cubicweb.common.tal import CubicWebContext
+        context = CubicWebContext()
+        context.update({'self': self, 'rset': self.rset, '_' : self.req._,
+                        'req': self.req, 'user': self.req.user})
+        context.update(variables)
+        output = UStringIO()
+        template.expand(context, output)
+        return output.getvalue()
+
+    def format_date(self, date, date_format=None, time=False):
+        """return a string for a mx date time according to application's
+        configuration
+        """
+        if date:
+            if date_format is None:
+                if time:
+                    date_format = self.req.property_value('ui.datetime-format')
+                else:
+                    date_format = self.req.property_value('ui.date-format')
+            return ustrftime(date, date_format)
+        return u''
+
+    def format_time(self, time):
+        """return a string for a mx date time according to application's
+        configuration
+        """
+        if time:
+            return ustrftime(time, self.req.property_value('ui.time-format'))
+        return u''
+
+    def format_float(self, num):
+        """return a string for floating point number according to application's
+        configuration
+        """
+        if num:
+            return self.req.property_value('ui.float-format') % num
+        return u''
+    
+    # security related methods ################################################
+    
+    def ensure_ro_rql(self, rql):
+        """raise an exception if the given rql is not a select query"""
+        first = rql.split(' ', 1)[0].lower()
+        if first in ('insert', 'set', 'delete'):
+            raise Unauthorized(self.req._('only select queries are authorized'))
+
+    # .accepts handling utilities #############################################
+    
+    accepts = ('Any',)
+
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        """apply the following rules:
+        * if row is None, return the sum of values returned by the method
+          for each entity's type in the result set. If any score is 0,
+          return 0.
+        * if row is specified, return the value returned by the method with
+          the entity's type of this row
+        """
+        if row is None:
+            score = 0
+            for etype in rset.column_types(0):
+                accepted = cls.accept(req.user, etype)
+                if not accepted:
+                    return 0
+                score += accepted
+            return score
+        return cls.accept(req.user, rset.description[row][col or 0])
+        
+    @classmethod
+    def accept(cls, user, etype):
+        """score etype, returning better score on exact match"""
+        if 'Any' in cls.accepts:
+            return 1
+        eschema = cls.schema.eschema(etype)
+        matching_types = [e.type for e in eschema.ancestors()]
+        matching_types.append(etype)
+        for index, basetype in enumerate(matching_types):
+            if basetype in cls.accepts:
+                return 2 + index
+        return 0
+    
+    # .rtype  handling utilities ##############################################
+    
+    @classmethod
+    def relation_possible(cls, etype):
+        """tell if a relation with etype entity is possible according to 
+        mixed class'.etype, .rtype and .target attributes
+
+        XXX should probably be moved out to a function
+        """
+        schema = cls.schema
+        rtype = cls.rtype
+        eschema = schema.eschema(etype)
+        if hasattr(cls, 'role'):
+            role = cls.role
+        elif cls.target == 'subject':
+            role = 'object'
+        else:
+            role = 'subject'
+        # check if this relation is possible according to the schema
+        try:
+            if role == 'object':
+                rschema = eschema.object_relation(rtype)
+            else:
+                rschema = eschema.subject_relation(rtype)
+        except KeyError:
+            return False            
+        if hasattr(cls, 'etype'):
+            letype = cls.etype
+            try:
+                if role == 'object':
+                    return etype in rschema.objects(letype)
+                else:
+                    return etype in rschema.subjects(letype)
+            except KeyError, ex:
+                return False
+        return True
+
+    
+    # XXX deprecated (since 2.43) ##########################
+    
+    @obsolete('use req.datadir_url')
+    def datadir_url(self):
+        """return url of the application's data directory"""
+        return self.req.datadir_url
+
+    @obsolete('use req.external_resource()')
+    def external_resource(self, rid, default=_MARKER):
+        return self.req.external_resource(rid, default)
+
+        
+class AppObject(AppRsetObject):
+    """base class for application objects which are not selected
+    according to a result set, only by their identifier.
+    
+    Those objects may not have req, rset and cursor set.
+    """
+    
+    @classmethod
+    def selected(cls, *args, **kwargs):
+        """by default web app objects are usually instantiated on
+        selection
+        """
+        return cls(*args, **kwargs)
+
+    def __init__(self, req=None, rset=None, **kwargs):
+        self.req = req
+        self.rset = rset
+        self.__dict__.update(kwargs)
+
+
+class ReloadableMixIn(object):
+    """simple mixin for reloadable parts of UI"""
+    
+    def user_callback(self, cb, args, msg=None, nonify=False):
+        """register the given user callback and return an url to call it ready to be
+        inserted in html
+        """
+        self.req.add_js('cubicweb.ajax.js')
+        if nonify:
+            _cb = cb
+            def cb(*args):
+                _cb(*args)
+        cbname = self.req.register_onetime_callback(cb, *args)
+        return self.build_js(cbname, html_escape(msg or ''))
+        
+    def build_update_js_call(self, cbname, msg):
+        rql = html_escape(self.rset.printable_rql())
+        return "javascript:userCallbackThenUpdateUI('%s', '%s', '%s', '%s', '%s', '%s')" % (
+            cbname, self.id, rql, msg, self.__registry__, self.div_id())
+    
+    def build_reload_js_call(self, cbname, msg):
+        return "javascript:userCallbackThenReloadPage('%s', '%s')" % (cbname, msg)
+
+    build_js = build_update_js_call # expect updatable component by default
+    
+    def div_id(self):
+        return ''
+
+
+class ComponentMixIn(ReloadableMixIn):
+    """simple mixin for component object"""
+    __registry__ = 'components'
+    __registerer__ = yes_registerer
+    __selectors__ = (yes_selector,)
+    __select__ = classmethod(*__selectors__)
+
+    def div_class(self):
+        return '%s %s' % (self.propval('htmlclass'), self.id)
+
+    def div_id(self):
+        return '%sComponent' % self.id
+
+
+class Component(ComponentMixIn, AppObject):
+    """base class for non displayable components
+    """
+
+class SingletonComponent(Component):
+    """base class for non displayable unique components
+    """
+    __registerer__ = priority_registerer
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/entity.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1094 @@
+"""Base class for entity objects manipulated in clients
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common import interface
+from logilab.common.compat import all
+from logilab.common.decorators import cached
+from logilab.mtconverter import TransformData, TransformError
+from rql.utils import rqlvar_maker
+
+from cubicweb import Unauthorized
+from cubicweb.vregistry import autoselectors
+from cubicweb.rset import ResultSet
+from cubicweb.common.appobject import AppRsetObject
+from cubicweb.common.registerers import id_registerer
+from cubicweb.common.selectors import yes_selector
+from cubicweb.common.uilib import printable_value, html_escape, soup2xhtml
+from cubicweb.common.mixins import MI_REL_TRIGGERS
+from cubicweb.common.mttransforms import ENGINE
+from cubicweb.schema import RQLVocabularyConstraint, RQLConstraint, bw_normalize_etype
+
+_marker = object()
+
+def greater_card(rschema, subjtypes, objtypes, index):
+    for subjtype in subjtypes:
+        for objtype in objtypes:
+            card = rschema.rproperty(subjtype, objtype, 'cardinality')[index]
+            if card in '+*':
+                return card
+    return '1'
+
+
+class RelationTags(object):
+    
+    MODE_TAGS = frozenset(('link', 'create'))
+    CATEGORY_TAGS = frozenset(('primary', 'secondary', 'generic', 'generated',
+                               'inlineview'))
+
+    def __init__(self, eclass, tagdefs):
+        self.eclass = eclass
+        self._tagdefs = {}
+        for relation, tags in tagdefs.iteritems():
+            # tags must become a set
+            if isinstance(tags, basestring):
+                tags = set((tags,))
+            elif not isinstance(tags, set):
+                tags = set(tags)
+            # relation must become a 3-uple (rtype, targettype, role)
+            if isinstance(relation, basestring):
+                self._tagdefs[(relation, '*', 'subject')] = tags
+                self._tagdefs[(relation, '*', 'object')] = tags
+            elif len(relation) == 1: # useful ?
+                self._tagdefs[(relation[0], '*', 'subject')] = tags
+                self._tagdefs[(relation[0], '*', 'object')] = tags
+            elif len(relation) == 2:
+                rtype, ttype = relation
+                ttype = bw_normalize_etype(ttype) # XXX bw compat
+                self._tagdefs[rtype, ttype, 'subject'] = tags
+                self._tagdefs[rtype, ttype, 'object'] = tags
+            elif len(relation) == 3:
+                relation = list(relation)  # XXX bw compat
+                relation[1] = bw_normalize_etype(relation[1])
+                self._tagdefs[tuple(relation)] = tags
+            else:
+                raise ValueError('bad rtag definition (%r)' % (relation,))
+        
+
+    def __initialize__(self):
+        # eclass.[*]schema are only set when registering
+        self.schema = self.eclass.schema
+        eschema = self.eschema = self.eclass.e_schema
+        rtags = self._tagdefs
+        # expand wildcards in rtags and add automatic tags
+        for rschema, tschemas, role in sorted(eschema.relation_definitions(True)):
+            rtype = rschema.type
+            star_tags = rtags.pop((rtype, '*', role), set())
+            for tschema in tschemas:
+                tags = rtags.setdefault((rtype, tschema.type, role), set(star_tags))
+                if role == 'subject':
+                    X, Y = eschema, tschema
+                    card = rschema.rproperty(X, Y, 'cardinality')[0]
+                    composed = rschema.rproperty(X, Y, 'composite') == 'object'
+                else:
+                    X, Y = tschema, eschema
+                    card = rschema.rproperty(X, Y, 'cardinality')[1]
+                    composed = rschema.rproperty(X, Y, 'composite') == 'subject'
+                # set default category tags if needed
+                if not tags & self.CATEGORY_TAGS:
+                    if card in '1+':
+                        if not rschema.is_final() and composed:
+                            category = 'generated'
+                        elif rschema.is_final() and (
+                            rschema.type.endswith('_format')
+                            or rschema.type.endswith('_encoding')):
+                            category = 'generated'
+                        else:
+                            category = 'primary'
+                    elif rschema.is_final():
+                        if (rschema.type.endswith('_format')
+                            or rschema.type.endswith('_encoding')):
+                            category = 'generated'
+                        else:
+                            category = 'secondary'
+                    else: 
+                        category = 'generic'
+                    tags.add(category)
+                if not tags & self.MODE_TAGS:
+                    if card in '?1':
+                        # by default, suppose link mode if cardinality doesn't allow
+                        # more than one relation
+                        mode = 'link'
+                    elif rschema.rproperty(X, Y, 'composite') == role:
+                        # if self is composed of the target type, create mode
+                        mode = 'create'
+                    else:
+                        # link mode by default
+                        mode = 'link'
+                    tags.add(mode)
+
+    def _default_target(self, rschema, role='subject'):
+        eschema = self.eschema
+        if role == 'subject':
+            return eschema.subject_relation(rschema).objects(eschema)[0]
+        else:
+            return eschema.object_relation(rschema).subjects(eschema)[0]
+
+    # dict compat
+    def __getitem__(self, key):
+        if isinstance(key, basestring):
+            key = (key,)
+        return self.get_tags(*key)
+
+    __contains__ = __getitem__
+    
+    def get_tags(self, rtype, targettype=None, role='subject'):
+        rschema = self.schema.rschema(rtype)
+        if targettype is None:
+            tschema = self._default_target(rschema, role)
+        else:
+            tschema = self.schema.eschema(targettype)
+        return self._tagdefs[(rtype, tschema.type, role)]
+
+    __call__ = get_tags
+    
+    def get_mode(self, rtype, targettype=None, role='subject'):
+        # XXX: should we make an assertion on rtype not being final ?
+        # assert not rschema.is_final()
+        tags = self.get_tags(rtype, targettype, role)
+        # do not change the intersection order !
+        modes = tags & self.MODE_TAGS
+        assert len(modes) == 1
+        return modes.pop()
+
+    def get_category(self, rtype, targettype=None, role='subject'):
+        tags = self.get_tags(rtype, targettype, role)
+        categories = tags & self.CATEGORY_TAGS
+        assert len(categories) == 1
+        return categories.pop()
+
+    def is_inlined(self, rtype, targettype=None, role='subject'):
+        # return set(('primary', 'secondary')) & self.get_tags(rtype, targettype)
+        return 'inlineview' in self.get_tags(rtype, targettype, role)
+
+
+class metaentity(autoselectors):
+    """this metaclass sets the relation tags on the entity class
+    and deals with the `widgets` attribute
+    """
+    def __new__(mcs, name, bases, classdict):
+        # collect baseclass' rtags
+        tagdefs = {}
+        widgets = {}
+        for base in bases:
+            tagdefs.update(getattr(base, '__rtags__', {}))
+            widgets.update(getattr(base, 'widgets', {}))
+        # update with the class' own rtgas
+        tagdefs.update(classdict.get('__rtags__', {}))
+        widgets.update(classdict.get('widgets', {}))
+        # XXX decide whether or not it's a good idea to replace __rtags__
+        #     good point: transparent support for inheritance levels >= 2
+        #     bad point: we loose the information of which tags are specific
+        #                to this entity class
+        classdict['__rtags__'] = tagdefs
+        classdict['widgets'] = widgets
+        eclass = super(metaentity, mcs).__new__(mcs, name, bases, classdict)
+        # adds the "rtags" attribute
+        eclass.rtags = RelationTags(eclass, tagdefs)
+        return eclass
+
+
+class Entity(AppRsetObject, dict):
+    """an entity instance has e_schema automagically set on
+    the class and instances has access to their issuing cursor.
+    
+    A property is set for each attribute and relation on each entity's type
+    class. Becare that among attributes, 'eid' is *NEITHER* stored in the
+    dict containment (which acts as a cache for other attributes dynamically
+    fetched)
+
+    :type e_schema: `cubicweb.schema.EntitySchema`
+    :ivar e_schema: the entity's schema
+
+    :type rest_var: str
+    :cvar rest_var: indicates which attribute should be used to build REST urls
+                    If None is specified, the first non-meta attribute will
+                    be used
+                    
+    :type skip_copy_for: list
+    :cvar skip_copy_for: a list of relations that should be skipped when copying
+                         this kind of entity. Note that some relations such
+                         as composite relations or relations that have '?1' as object
+                         cardinality
+    """
+    __metaclass__ = metaentity
+    __registry__ = 'etypes'
+    __registerer__ = id_registerer
+    __selectors__ = (yes_selector,)
+    widgets = {}
+    id = None
+    e_schema = None
+    eid = None
+    rest_attr = None
+    skip_copy_for = ()
+
+    @classmethod
+    def registered(cls, registry):
+        """build class using descriptor at registration time"""
+        assert cls.id is not None
+        super(Entity, cls).registered(registry)
+        if cls.id != 'Any':
+            cls.__initialize__()
+        return cls
+                
+    MODE_TAGS = set(('link', 'create'))
+    CATEGORY_TAGS = set(('primary', 'secondary', 'generic', 'generated')) # , 'metadata'))
+    @classmethod
+    def __initialize__(cls):
+        """initialize a specific entity class by adding descriptors to access
+        entity type's attributes and relations
+        """
+        etype = cls.id
+        assert etype != 'Any', etype
+        cls.e_schema = eschema = cls.schema.eschema(etype)
+        for rschema, _ in eschema.attribute_definitions():
+            if rschema.type == 'eid':
+                continue
+            setattr(cls, rschema.type, Attribute(rschema.type))
+        mixins = []
+        for rschema, _, x in eschema.relation_definitions():
+            if (rschema, x) in MI_REL_TRIGGERS:
+                mixin = MI_REL_TRIGGERS[(rschema, x)]
+                if not (issubclass(cls, mixin) or mixin in mixins): # already mixed ?
+                    mixins.append(mixin)
+                for iface in getattr(mixin, '__implements__', ()):
+                    if not interface.implements(cls, iface):
+                        interface.extend(cls, iface)
+            if x == 'subject':
+                setattr(cls, rschema.type, SubjectRelation(rschema))
+            else:
+                attr = 'reverse_%s' % rschema.type
+                setattr(cls, attr, ObjectRelation(rschema))
+        if mixins:
+            cls.__bases__ = tuple(mixins + [p for p in cls.__bases__ if not p is object])
+            cls.debug('plugged %s mixins on %s', mixins, etype)
+        cls.rtags.__initialize__()
+    
+    @classmethod
+    def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X',
+                  settype=True, ordermethod='fetch_order'):
+        """return a rql to fetch all entities of the class type"""
+        restrictions = restriction or []
+        if settype:
+            restrictions.append('%s is %s' % (mainvar, cls.id))
+        if fetchattrs is None:
+            fetchattrs = cls.fetch_attrs
+        selection = [mainvar]
+        orderby = []
+        # start from 26 to avoid possible conflicts with X
+        varmaker = rqlvar_maker(index=26)
+        cls._fetch_restrictions(mainvar, varmaker, fetchattrs, selection,
+                                orderby, restrictions, user, ordermethod)
+        rql = 'Any %s' % ','.join(selection)
+        if orderby:
+            rql +=  ' ORDERBY %s' % ','.join(orderby)
+        rql += ' WHERE %s' % ', '.join(restrictions)
+        return rql
+    
+    @classmethod
+    def _fetch_restrictions(cls, mainvar, varmaker, fetchattrs,
+                            selection, orderby, restrictions, user,
+                            ordermethod='fetch_order', visited=None):
+        eschema = cls.e_schema
+        if visited is None:
+            visited = set((eschema.type,))
+        elif eschema.type in visited:
+            # avoid infinite recursion
+            return
+        else:
+            visited.add(eschema.type)
+        _fetchattrs = []
+        for attr in fetchattrs:
+            try:
+                rschema = eschema.subject_relation(attr)
+            except KeyError:
+                cls.warning('skipping fetch_attr %s defined in %s (not found in schema)',
+                            attr, cls.id)
+                continue
+            if not user.matching_groups(rschema.get_groups('read')):
+                continue
+            var = varmaker.next()
+            selection.append(var)
+            restriction = '%s %s %s' % (mainvar, attr, var)
+            restrictions.append(restriction)
+            if not rschema.is_final():
+                # XXX this does not handle several destination types
+                desttype = rschema.objects(eschema.type)[0]
+                card = rschema.rproperty(eschema, desttype, 'cardinality')[0]
+                if card not in '?1':
+                    selection.pop()
+                    restrictions.pop()
+                    continue
+                if card == '?':
+                    restrictions[-1] += '?' # left outer join if not mandatory
+                destcls = cls.vreg.etype_class(desttype)
+                destcls._fetch_restrictions(var, varmaker, destcls.fetch_attrs,
+                                            selection, orderby, restrictions,
+                                            user, ordermethod, visited=visited)
+            orderterm = getattr(cls, ordermethod)(attr, var)
+            if orderterm:
+                orderby.append(orderterm)
+        return selection, orderby, restrictions
+
+    def __init__(self, req, rset, row=None, col=0):
+        AppRsetObject.__init__(self, req, rset)
+        dict.__init__(self)
+        self.row, self.col = row, col
+        self._related_cache = {}
+        if rset is not None:
+            self.eid = rset[row][col]
+        else:
+            self.eid = None
+        self._is_saved = True
+        
+    def __repr__(self):
+        return '<Entity %s %s %s at %s>' % (
+            self.e_schema, self.eid, self.keys(), id(self))
+
+    def __nonzero__(self):
+        return True
+
+    def __hash__(self):
+        return id(self)
+
+    def pre_add_hook(self):
+        """hook called by the repository before doing anything to add the entity
+        (before_add entity hooks have not been called yet). This give the
+        occasion to do weird stuff such as autocast (File -> Image for instance).
+        
+        This method must return the actual entity to be added.
+        """
+        return self
+    
+    def set_eid(self, eid):
+        self.eid = self['eid'] = eid
+
+    def has_eid(self):
+        """return True if the entity has an attributed eid (False
+        meaning that the entity has to be created
+        """
+        try:
+            int(self.eid)
+            return True
+        except (ValueError, TypeError):
+            return False
+
+    def is_saved(self):
+        """during entity creation, there is some time during which the entity
+        has an eid attributed though it's not saved (eg during before_add_entity
+        hooks). You can use this method to ensure the entity has an eid *and* is
+        saved in its source.
+        """
+        return self.has_eid() and self._is_saved
+    
+    @cached
+    def metainformation(self):
+        res = dict(zip(('type', 'source', 'extid'), self.req.describe(self.eid)))
+        res['source'] = self.req.source_defs()[res['source']]
+        return res
+
+    def check_perm(self, action):
+        self.e_schema.check_perm(self.req, action, self.eid)
+
+    def has_perm(self, action):
+        return self.e_schema.has_perm(self.req, action, self.eid)
+        
+    def view(self, vid, __registry='views', **kwargs):
+        """shortcut to apply a view on this entity"""
+        return self.vreg.render(__registry, vid, self.req, rset=self.rset,
+                                row=self.row, col=self.col, **kwargs)
+
+    def absolute_url(self, method=None, **kwargs):
+        """return an absolute url to view this entity"""
+        # in linksearch mode, we don't want external urls else selecting
+        # the object for use in the relation is tricky
+        # XXX search_state is web specific
+        if getattr(self.req, 'search_state', ('normal',))[0] == 'normal':
+            kwargs['base_url'] = self.metainformation()['source'].get('base-url')
+        if method is None or method == 'view':
+            kwargs['_restpath'] = self.rest_path()
+        else:
+            kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid
+        return self.build_url(method, **kwargs)
+
+    def rest_path(self):
+        """returns a REST-like (relative) path for this entity"""
+        mainattr, needcheck = self._rest_attr_info()
+        etype = str(self.e_schema)
+        if mainattr == 'eid':
+            value = self.eid
+        else:
+            value = getattr(self, mainattr)
+            if value is None:
+                return '%s/eid/%s' % (etype.lower(), self.eid)
+        if needcheck:
+            # make sure url is not ambiguous
+            rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % (etype, mainattr)
+            if value is not None:
+                nbresults = self.req.execute(rql, {'value' : value})[0][0]
+                # may an assertion that nbresults is not 0 would be a good idea
+                if nbresults != 1: # no ambiguity
+                    return '%s/eid/%s' % (etype.lower(), self.eid)
+        return '%s/%s' % (etype.lower(), self.req.url_quote(value))
+
+    @classmethod
+    def _rest_attr_info(cls):
+        mainattr, needcheck = 'eid', True
+        if cls.rest_attr:
+            mainattr = cls.rest_attr
+            needcheck = not cls.e_schema.has_unique_values(mainattr)
+        else:
+            for rschema in cls.e_schema.subject_relations():
+                if rschema.is_final() and rschema != 'eid' and cls.e_schema.has_unique_values(rschema):
+                    mainattr = str(rschema)
+                    needcheck = False
+                    break
+        if mainattr == 'eid':
+            needcheck = False
+        return mainattr, needcheck
+
+    @cached
+    def formatted_attrs(self):
+        """returns the list of attributes which have some format information
+        (i.e. rich text strings)
+        """
+        attrs = []
+        for rschema, attrschema in self.e_schema.attribute_definitions():
+            if attrschema.type == 'String' and self.has_format(rschema):
+                attrs.append(rschema.type)
+        return attrs
+        
+    def format(self, attr):
+        """return the mime type format for an attribute (if specified)"""
+        return getattr(self, '%s_format' % attr, None)
+    
+    def text_encoding(self, attr):
+        """return the text encoding for an attribute, default to site encoding
+        """
+        encoding = getattr(self, '%s_encoding' % attr, None)
+        return encoding or self.vreg.property_value('ui.encoding')
+
+    def has_format(self, attr):
+        """return true if this entity's schema has a format field for the given
+        attribute
+        """
+        return self.e_schema.has_subject_relation('%s_format' % attr)
+    
+    def has_text_encoding(self, attr):
+        """return true if this entity's schema has ab encoding field for the
+        given attribute
+        """
+        return self.e_schema.has_subject_relation('%s_encoding' % attr)
+
+    def printable_value(self, attr, value=_marker, attrtype=None,
+                        format='text/html', displaytime=True):
+        """return a displayable value (i.e. unicode string) which may contains
+        html tags
+        """
+        attr = str(attr)
+        if value is _marker:
+            value = getattr(self, attr)
+        if isinstance(value, basestring):
+            value = value.strip()
+        if value is None or value == '': # don't use "not", 0 is an acceptable value
+            return u''
+        if attrtype is None:
+            attrtype = self.e_schema.destination(attr)
+        props = self.e_schema.rproperties(attr)
+        if attrtype == 'String':
+            # internalinalized *and* formatted string such as schema
+            # description...
+            if props.get('internationalizable'):
+                value = self.req._(value)
+            attrformat = self.format(attr)
+            if attrformat:
+                return self.mtc_transform(value, attrformat, format,
+                                          self.req.encoding)
+        elif attrtype == 'Bytes':
+            attrformat = self.format(attr)
+            if attrformat:
+                try:
+                    encoding = getattr(self, '%s_encoding' % attr)
+                except AttributeError:
+                    encoding = self.req.encoding
+                return self.mtc_transform(value.getvalue(), attrformat, format,
+                                          encoding)
+            return u''
+        value = printable_value(self.req, attrtype, value, props, displaytime)
+        if format == 'text/html':
+            value = html_escape(value)
+        return value
+
+    def mtc_transform(self, data, format, target_format, encoding,
+                      _engine=ENGINE):
+        trdata = TransformData(data, format, encoding, appobject=self)
+        data = _engine.convert(trdata, target_format).decode()
+        if format == 'text/html':
+            data = soup2xhtml(data, self.req.encoding)                
+        return data
+    
+    # entity cloning ##########################################################
+
+    def copy_relations(self, ceid):
+        """copy relations of the object with the given eid on this object
+
+        By default meta and composite relations are skipped.
+        Overrides this if you want another behaviour
+        """
+        assert self.has_eid()
+        execute = self.req.execute
+        for rschema in self.e_schema.subject_relations():
+            if rschema.meta or rschema.is_final():
+                continue
+            # skip already defined relations
+            if getattr(self, rschema.type):
+                continue
+            if rschema.type in self.skip_copy_for:
+                continue
+            if rschema.type == 'in_state':
+                # if the workflow is defining an initial state (XXX AND we are
+                # not in the managers group? not done to be more consistent)
+                # don't try to copy in_state
+                if execute('Any S WHERE S state_of ET, ET initial_state S,'
+                           'ET name %(etype)s', {'etype': str(self.e_schema)}):
+                    continue
+            # skip composite relation
+            if self.e_schema.subjrproperty(rschema, 'composite'):
+                continue
+            # skip relation with card in ?1 else we either change the copied
+            # object (inlined relation) or inserting some inconsistency
+            if self.e_schema.subjrproperty(rschema, 'cardinality')[1] in '?1':
+                continue
+            rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % (
+                rschema.type, rschema.type)
+            execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+            self.clear_related_cache(rschema.type, 'subject')
+        for rschema in self.e_schema.object_relations():
+            if rschema.meta:
+                continue
+            # skip already defined relations
+            if getattr(self, 'reverse_%s' % rschema.type):
+                continue
+            # skip composite relation
+            if self.e_schema.objrproperty(rschema, 'composite'):
+                continue
+            # skip relation with card in ?1 else we either change the copied
+            # object (inlined relation) or inserting some inconsistency
+            if self.e_schema.objrproperty(rschema, 'cardinality')[0] in '?1':
+                continue
+            rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % (
+                rschema.type, rschema.type)
+            execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+            self.clear_related_cache(rschema.type, 'object')
+
+    # data fetching methods ###################################################
+
+    @cached
+    def as_rset(self):
+        """returns a resultset containing `self` information"""
+        rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
+                         {'x': self.eid}, [(self.id,)])
+        return self.req.decorate_rset(rset)
+                       
+    def to_complete_relations(self):
+        """by default complete final relations to when calling .complete()"""
+        for rschema in self.e_schema.subject_relations():
+            if rschema.is_final():
+                continue
+            if len(rschema.objects(self.e_schema)) > 1:
+                # ambigous relations, the querier doesn't handle
+                # outer join correctly in this case
+                continue
+            if rschema.inlined:
+                matching_groups = self.req.user.matching_groups
+                if matching_groups(rschema.get_groups('read')) and \
+                   all(matching_groups(es.get_groups('read'))
+                       for es in rschema.objects(self.e_schema)):
+                    yield rschema, 'subject'
+                    
+    def to_complete_attributes(self, skip_bytes=True):
+        for rschema, attrschema in self.e_schema.attribute_definitions():
+            # skip binary data by default
+            if skip_bytes and attrschema.type == 'Bytes':
+                continue
+            attr = rschema.type
+            if attr == 'eid':
+                continue
+            # password retreival is blocked at the repository server level
+            if not self.req.user.matching_groups(rschema.get_groups('read')) \
+                   or attrschema.type == 'Password':
+                self[attr] = None
+                continue
+            yield attr
+            
+    def complete(self, attributes=None, skip_bytes=True):
+        """complete this entity by adding missing attributes (i.e. query the
+        repository to fill the entity)
+
+        :type skip_bytes: bool
+        :param skip_bytes:
+          if true, attribute of type Bytes won't be considered
+        """
+        assert self.has_eid()
+        varmaker = rqlvar_maker()
+        V = varmaker.next()
+        rql = ['WHERE %s eid %%(x)s' % V]
+        selected = []
+        for attr in (attributes or self.to_complete_attributes(skip_bytes)):
+            # if attribute already in entity, nothing to do
+            if self.has_key(attr):
+                continue
+            # case where attribute must be completed, but is not yet in entity
+            var = varmaker.next()
+            rql.append('%s %s %s' % (V, attr, var))
+            selected.append((attr, var))
+        # +1 since this doen't include the main variable
+        lastattr = len(selected) + 1
+        if attributes is None:
+            # fetch additional relations (restricted to 0..1 relations)
+            for rschema, role in self.to_complete_relations():
+                rtype = rschema.type
+                if self.relation_cached(rtype, role):
+                    continue
+                var = varmaker.next()
+                if role == 'subject':
+                    targettype = rschema.objects(self.e_schema)[0]
+                    card = rschema.rproperty(self.e_schema, targettype,
+                                             'cardinality')[0]
+                    if card == '1':
+                        rql.append('%s %s %s' % (V, rtype, var))
+                    else: # '?"
+                        rql.append('%s %s %s?' % (V, rtype, var))
+                else:
+                    targettype = rschema.subjects(self.e_schema)[1]
+                    card = rschema.rproperty(self.e_schema, targettype,
+                                             'cardinality')[1]
+                    if card == '1':
+                        rql.append('%s %s %s' % (var, rtype, V))
+                    else: # '?"
+                        rql.append('%s? %s %s' % (var, rtype, V))
+                assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype,
+                                                      role, card)
+                selected.append(((rtype, role), var))
+        if selected:
+            # select V, we need it as the left most selected variable
+            # if some outer join are included to fetch inlined relations
+            rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected),
+                                    ','.join(rql))
+            execute = getattr(self.req, 'unsafe_execute', self.req.execute)
+            rset = execute(rql, {'x': self.eid}, 'x', build_descr=False)[0]
+            # handle attributes
+            for i in xrange(1, lastattr):
+                self[str(selected[i-1][0])] = rset[i]
+            # handle relations
+            for i in xrange(lastattr, len(rset)):
+                rtype, x = selected[i-1][0]
+                value = rset[i]
+                if value is None:
+                    rrset = ResultSet([], rql, {'x': self.eid})
+                    self.req.decorate_rset(rrset)
+                else:
+                    rrset = self.req.eid_rset(value)
+                self.set_related_cache(rtype, x, rrset)
+                
+    def get_value(self, name):
+        """get value for the attribute relation <name>, query the repository
+        to get the value if necessary.
+
+        :type name: str
+        :param name: name of the attribute to get
+        """
+        try:
+            value = self[name]
+        except KeyError:
+            if not self.is_saved():
+                return None
+            rql = "Any A WHERE X eid %%(x)s, X %s A" % name
+            # XXX should we really use unsafe_execute here??
+            execute = getattr(self.req, 'unsafe_execute', self.req.execute)
+            try:
+                rset = execute(rql, {'x': self.eid}, 'x')
+            except Unauthorized:
+                self[name] = value = None
+            else:
+                assert rset.rowcount <= 1, (self, rql, rset.rowcount)
+                try:
+                    self[name] = value = rset.rows[0][0]
+                except IndexError:
+                    # probably a multisource error
+                    self.critical("can't get value for attribute %s of entity with eid %s",
+                                  name, self.eid)
+                    if self.e_schema.destination(name) == 'String':
+                        self[name] = value = self.req._('unaccessible')
+                    else:
+                        self[name] = value = None
+        return value
+
+    def related(self, rtype, role='subject', limit=None, entities=False):
+        """returns a resultset of related entities
+        
+        :param role: is the role played by 'self' in the relation ('subject' or 'object')
+        :param limit: resultset's maximum size
+        :param entities: if True, the entites are returned; if False, a result set is returned
+        """
+        try:
+            return self.related_cache(rtype, role, entities, limit)
+        except KeyError:
+            pass
+        assert self.has_eid()
+        rql = self.related_rql(rtype, role)
+        rset = self.req.execute(rql, {'x': self.eid}, 'x')
+        self.set_related_cache(rtype, role, rset)
+        return self.related(rtype, role, limit, entities)
+
+    def related_rql(self, rtype, role='subject'):
+        rschema = self.schema[rtype]
+        if role == 'subject':
+            targettypes = rschema.objects(self.e_schema)
+            restriction = 'E eid %%(x)s, E %s X' % rtype
+            card = greater_card(rschema, (self.e_schema,), targettypes, 0)
+        else:
+            targettypes = rschema.subjects(self.e_schema)
+            restriction = 'E eid %%(x)s, X %s E' % rtype
+            card = greater_card(rschema, targettypes, (self.e_schema,), 1)
+        if len(targettypes) > 1:
+            fetchattrs = set()
+            for ttype in targettypes:
+                etypecls = self.vreg.etype_class(ttype)
+                fetchattrs &= frozenset(etypecls.fetch_attrs)
+            rql = etypecls.fetch_rql(self.req.user, [restriction], fetchattrs,
+                                     settype=False)
+        else:
+            etypecls = self.vreg.etype_class(targettypes[0])
+            rql = etypecls.fetch_rql(self.req.user, [restriction], settype=False)
+        # optimisation: remove ORDERBY if cardinality is 1 or ? (though
+        # greater_card return 1 for those both cases)
+        if card == '1':
+            if ' ORDERBY ' in rql:
+                rql = '%s WHERE %s' % (rql.split(' ORDERBY ', 1)[0],
+                                       rql.split(' WHERE ', 1)[1])
+        elif not ' ORDERBY ' in rql:
+            args = tuple(rql.split(' WHERE ', 1))
+            rql = '%s ORDERBY Z DESC WHERE X modification_date Z, %s' % args
+        return rql
+    
+    # generic vocabulary methods ##############################################
+
+    def vocabulary(self, rtype, role='subject', limit=None):
+        """vocabulary functions must return a list of couples
+        (label, eid) that will typically be used to fill the
+        edition view's combobox.
+        
+        If `eid` is None in one of these couples, it should be
+        interpreted as a separator in case vocabulary results are grouped
+        """
+        try:
+            vocabfunc = getattr(self, '%s_%s_vocabulary' % (role, rtype))
+        except AttributeError:
+            vocabfunc = getattr(self, '%s_relation_vocabulary' % role)
+        # NOTE: it is the responsibility of `vocabfunc` to sort the result
+        #       (direclty through RQL or via a python sort). This is also
+        #       important because `vocabfunc` might return a list with
+        #       couples (label, None) which act as separators. In these
+        #       cases, it doesn't make sense to sort results afterwards.
+        return vocabfunc(rtype, limit)
+            
+    def subject_relation_vocabulary(self, rtype, limit=None):
+        """defaut vocabulary method for the given relation, looking for
+        relation's object entities (i.e. self is the subject)
+        """
+        if isinstance(rtype, basestring):
+            rtype = self.schema.rschema(rtype)
+        done = None
+        assert not rtype.is_final(), rtype
+        if self.has_eid():
+            done = set(e.eid for e in getattr(self, str(rtype)))
+        result = []
+        rsetsize = None
+        for objtype in rtype.objects(self.e_schema):
+            if limit is not None:
+                rsetsize = limit - len(result)
+            result += self.relation_vocabulary(rtype, objtype, 'subject',
+                                               rsetsize, done)
+            if limit is not None and len(result) >= limit:
+                break
+        return result
+
+    def object_relation_vocabulary(self, rtype, limit=None):
+        """defaut vocabulary method for the given relation, looking for
+        relation's subject entities (i.e. self is the object)
+        """
+        if isinstance(rtype, basestring):
+            rtype = self.schema.rschema(rtype)
+        done = None
+        if self.has_eid():
+            done = set(e.eid for e in getattr(self, 'reverse_%s' % rtype))
+        result = []
+        rsetsize = None
+        for subjtype in rtype.subjects(self.e_schema):
+            if limit is not None:
+                rsetsize = limit - len(result)
+            result += self.relation_vocabulary(rtype, subjtype, 'object',
+                                               rsetsize, done)
+            if limit is not None and len(result) >= limit:
+                break
+        return result
+
+    def relation_vocabulary(self, rtype, targettype, role,
+                            limit=None, done=None):
+        if done is None:
+            done = set()
+        req = self.req
+        rset = self.unrelated(rtype, targettype, role, limit)
+        res = []
+        for entity in rset.entities():
+            if entity.eid in done:
+                continue
+            done.add(entity.eid)
+            res.append((entity.view('combobox'), entity.eid))
+        return res
+
+    def unrelated_rql(self, rtype, targettype, role, ordermethod=None,
+                      vocabconstraints=True):
+        """build a rql to fetch `targettype` entities unrelated to this entity
+        using (rtype, role) relation
+        """
+        ordermethod = ordermethod or 'fetch_unrelated_order'
+        if isinstance(rtype, basestring):
+            rtype = self.schema.rschema(rtype)
+        if role == 'subject':
+            evar, searchedvar = 'S', 'O'
+            subjtype, objtype = self.e_schema, targettype
+        else:
+            searchedvar, evar = 'S', 'O'
+            objtype, subjtype = self.e_schema, targettype
+        if self.has_eid():
+            restriction = ['NOT S %s O' % rtype, '%s eid %%(x)s' % evar]
+        else:
+            restriction = []
+        constraints = rtype.rproperty(subjtype, objtype, 'constraints')
+        if vocabconstraints:
+            # RQLConstraint is a subclass for RQLVocabularyConstraint, so they
+            # will be included as well
+            restriction += [cstr.restriction for cstr in constraints
+                            if isinstance(cstr, RQLVocabularyConstraint)]
+        else:
+            restriction += [cstr.restriction for cstr in constraints
+                            if isinstance(cstr, RQLConstraint)]
+        etypecls = self.vreg.etype_class(targettype)
+        rql = etypecls.fetch_rql(self.req.user, restriction,
+                                 mainvar=searchedvar, ordermethod=ordermethod)
+        # ensure we have an order defined
+        if not ' ORDERBY ' in rql:
+            before, after = rql.split(' WHERE ', 1)
+            rql = '%s ORDERBY %s WHERE %s' % (before, searchedvar, after)
+        return rql
+    
+    def unrelated(self, rtype, targettype, role='subject', limit=None,
+                  ordermethod=None):
+        """return a result set of target type objects that may be related
+        by a given relation, with self as subject or object
+        """
+        rql = self.unrelated_rql(rtype, targettype, role, ordermethod)
+        if limit is not None:
+            before, after = rql.split(' WHERE ', 1)
+            rql = '%s LIMIT %s WHERE %s' % (before, limit, after)
+        if self.has_eid():
+            return self.req.execute(rql, {'x': self.eid})
+        return self.req.execute(rql)
+        
+    # relations cache handling ################################################
+    
+    def relation_cached(self, rtype, role):
+        """return true if the given relation is already cached on the instance
+        """
+        return '%s_%s' % (rtype, role) in self._related_cache
+    
+    def related_cache(self, rtype, role, entities=True, limit=None):
+        """return values for the given relation if it's cached on the instance,
+        else raise `KeyError`
+        """
+        res = self._related_cache['%s_%s' % (rtype, role)][entities]
+        if limit:
+            if entities:
+                res = res[:limit]
+            else:
+                res = res.limit(limit)
+        return res
+    
+    def set_related_cache(self, rtype, role, rset, col=0):
+        """set cached values for the given relation"""
+        if rset:
+            related = list(rset.entities(col))
+            rschema = self.schema.rschema(rtype)
+            if role == 'subject':
+                rcard = rschema.rproperty(self.e_schema, related[0].e_schema,
+                                          'cardinality')[1]
+                target = 'object'
+            else:
+                rcard = rschema.rproperty(related[0].e_schema, self.e_schema,
+                                          'cardinality')[0]
+                target = 'subject'
+            if rcard in '?1':
+                for rentity in related:
+                    rentity._related_cache['%s_%s' % (rtype, target)] = (self.as_rset(), [self])
+        else:
+            related = []
+        self._related_cache['%s_%s' % (rtype, role)] = (rset, related)
+        
+    def clear_related_cache(self, rtype=None, role=None):
+        """clear cached values for the given relation or the entire cache if
+        no relation is given
+        """
+        if rtype is None:
+            self._related_cache = {}
+        else:
+            assert role
+            self._related_cache.pop('%s_%s' % (rtype, role), None)
+        
+    # raw edition utilities ###################################################
+    
+    def set_attributes(self, **kwargs):
+        assert kwargs
+        relations = []
+        for key in kwargs:
+            relations.append('X %s %%(%s)s' % (key, key))
+        kwargs['x'] = self.eid
+        self.req.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
+                         kwargs, 'x')
+        for key, val in kwargs.iteritems():
+            self[key] = val
+            
+    def delete(self):
+        assert self.has_eid(), self.eid
+        self.req.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema,
+                         {'x': self.eid})
+    
+    # server side utilities ###################################################
+        
+    def set_defaults(self):
+        """set default values according to the schema"""
+        self._default_set = set()
+        for attr, value in self.e_schema.defaults():
+            if not self.has_key(attr):
+                self[str(attr)] = value
+                self._default_set.add(attr)
+
+    def check(self, creation=False):
+        """check this entity against its schema. Only final relation
+        are checked here, constraint on actual relations are checked in hooks
+        """
+        # necessary since eid is handled specifically and yams require it to be
+        # in the dictionary
+        if self.req is None:
+            _ = unicode
+        else:
+            _ = self.req._
+        self.e_schema.check(self, creation=creation, _=_)
+
+    def fti_containers(self, _done=None):
+        if _done is None:
+            _done = set()
+        _done.add(self.eid)
+        containers = tuple(self.e_schema.fulltext_containers())
+        if containers:
+            for rschema, target in containers:
+                if target == 'object':
+                    targets = getattr(self, rschema.type)
+                else:
+                    targets = getattr(self, 'reverse_%s' % rschema)
+                for entity in targets:
+                    if entity.eid in _done:
+                        continue
+                    for container in entity.fti_containers(_done):
+                        yield container
+        else:
+            yield self
+                    
+    def get_words(self):
+        """used by the full text indexer to get words to index
+
+        this method should only be used on the repository side since it depends
+        on the indexer package
+        
+        :rtype: list
+        :return: the list of indexable word of this entity
+        """
+        from indexer.query_objects import tokenize
+        words = []
+        for rschema in self.e_schema.indexable_attributes():
+            try:
+                value = self.printable_value(rschema, format='text/plain')
+            except TransformError, ex:
+                continue
+            except:
+                self.exception("can't add value of %s to text index for entity %s",
+                               rschema, self.eid)
+                continue
+            if value:
+                words += tokenize(value)
+        
+        for rschema, role in self.e_schema.fulltext_relations():
+            if role == 'subject':
+                for entity in getattr(self, rschema.type):
+                    words += entity.get_words()
+            else: # if role == 'object':
+                for entity in getattr(self, 'reverse_%s' % rschema.type):
+                    words += entity.get_words()
+        return words
+
+
+# attribute and relation descriptors ##########################################
+
+class Attribute(object):
+    """descriptor that controls schema attribute access"""
+
+    def __init__(self, attrname):
+        assert attrname != 'eid'
+        self._attrname = attrname
+
+    def __get__(self, eobj, eclass):
+        if eobj is None:
+            return self
+        return eobj.get_value(self._attrname)
+
+    def __set__(self, eobj, value):
+        # XXX bw compat
+        # would be better to generate UPDATE queries than the current behaviour
+        eobj.warning("deprecated usage, don't use 'entity.attr = val' notation)")
+        eobj[self._attrname] = value
+
+
+class Relation(object):
+    """descriptor that controls schema relation access"""
+    _role = None # for pylint
+
+    def __init__(self, rschema):
+        self._rschema = rschema
+        self._rtype = rschema.type
+
+    def __get__(self, eobj, eclass):
+        if eobj is None:
+            raise AttributeError('%s cannot be only be accessed from instances'
+                                 % self._rtype)
+        return eobj.related(self._rtype, self._role, entities=True)
+    
+    def __set__(self, eobj, value):
+        raise NotImplementedError
+
+
+class SubjectRelation(Relation):
+    """descriptor that controls schema relation access"""
+    _role = 'subject'
+    
+class ObjectRelation(Relation):
+    """descriptor that controls schema relation access"""
+    _role = 'object'
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Entity, getLogger('cubicweb.entity'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/html4zope.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,153 @@
+# Author: David Goodger
+# Contact: goodger@users.sourceforge.net
+# Revision: $Revision: 1.2 $
+# Date: $Date: 2005-07-04 16:36:50 $
+# Copyright: This module has been placed in the public domain.
+
+"""
+Simple HyperText Markup Language document tree Writer.
+
+The output conforms to the HTML 4.01 Transitional DTD and to the Extensible
+HTML version 1.0 Transitional DTD (*almost* strict).  The output contains a
+minimum of formatting information.  A cascading style sheet ("default.css" by
+default) is required for proper viewing with a modern graphical browser.
+
+http://cvs.zope.org/Zope/lib/python/docutils/writers/Attic/html4zope.py?rev=1.1.2.2&only_with_tag=ajung-restructuredtext-integration-branch&content-type=text/vnd.viewcvs-markup
+"""
+
+__docformat__ = 'reStructuredText'
+
+from logilab.mtconverter import html_escape
+
+from docutils import nodes
+from docutils.writers.html4css1 import Writer as CSS1Writer
+from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator
+import os
+
+default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3))
+
+class Writer(CSS1Writer):
+    """css writer using our html translator"""
+    def __init__(self, base_url):
+        CSS1Writer.__init__(self)
+        self.translator_class = URLBinder(base_url, HTMLTranslator)
+
+    def apply_template(self):
+        """overriding this is necessary with docutils >= 0.5"""
+        return self.visitor.astext()
+
+class URLBinder:
+    def __init__(self, url, klass):
+        self.base_url = url
+        self.translator_class = HTMLTranslator
+        
+    def __call__(self, document):
+        translator = self.translator_class(document)
+        translator.base_url = self.base_url
+        return translator
+    
+class HTMLTranslator(CSS1HTMLTranslator):
+    """ReST tree to html translator"""
+
+    def astext(self):
+        """return the extracted html"""
+        return ''.join(self.body)
+    
+    def visit_title(self, node):
+        """Only 6 section levels are supported by HTML."""
+        if isinstance(node.parent, nodes.topic):
+            self.body.append(
+                  self.starttag(node, 'p', '', CLASS='topic-title'))
+            if node.parent.hasattr('id'):
+                self.body.append(
+                    self.starttag({}, 'a', '', name=node.parent['id']))
+                self.context.append('</a></p>\n')
+            else:
+                self.context.append('</p>\n')
+        elif self.section_level == 0:
+            # document title
+            self.head.append('<title>%s</title>\n'
+                             % self.encode(node.astext()))
+            self.body.append(self.starttag(node, 'h%d' % default_level, '',
+                                           CLASS='title'))
+            self.context.append('</h%d>\n' % default_level)
+        else:
+            self.body.append(
+                  self.starttag(node, 'h%s' % (
+                default_level+self.section_level-1), ''))
+            atts = {}
+            if node.hasattr('refid'):
+                atts['class'] = 'toc-backref'
+                atts['href'] = '%s#%s' % (self.base_url, node['refid'])
+            self.body.append(self.starttag({}, 'a', '', **atts))
+            self.context.append('</a></h%s>\n' % (
+                default_level+self.section_level-1))
+
+    def visit_subtitle(self, node):
+        """format a subtitle"""
+        if isinstance(node.parent, nodes.sidebar):
+            self.body.append(self.starttag(node, 'p', '',
+                                           CLASS='sidebar-subtitle'))
+            self.context.append('</p>\n')
+        else:
+            self.body.append(
+                  self.starttag(node, 'h%s' % (default_level+1), '',
+                                CLASS='subtitle'))
+            self.context.append('</h%s>\n' % (default_level+1))
+
+    def visit_document(self, node):
+        """syt: i don't want the enclosing <div class="document">"""
+    def depart_document(self, node):
+        """syt: i don't want the enclosing <div class="document">"""
+
+    def visit_reference(self, node):
+        """syt: i want absolute urls"""
+        if node.has_key('refuri'):
+            href = node['refuri']
+            if ( self.settings.cloak_email_addresses
+                 and href.startswith('mailto:')):
+                href = self.cloak_mailto(href)
+                self.in_mailto = 1
+        else:
+            assert node.has_key('refid'), \
+                   'References must have "refuri" or "refid" attribute.'
+            href = '%s#%s' % (self.base_url, node['refid'])
+        atts = {'href': href, 'class': 'reference'}
+        if not isinstance(node.parent, nodes.TextElement):
+            assert len(node) == 1 and isinstance(node[0], nodes.image)
+            atts['class'] += ' image-reference'
+        self.body.append(self.starttag(node, 'a', '', **atts))
+
+    ## override error messages to avoid XHTML problems ########################
+    def visit_problematic(self, node):
+        pass
+
+    def depart_problematic(self, node):
+        pass
+    
+    def visit_system_message(self, node):
+        backref_text = ''
+        if len(node['backrefs']):
+            backrefs = node['backrefs']
+            if len(backrefs) == 1:
+                backref_text = '; <em>backlink</em>'
+            else:
+                i = 1
+                backlinks = []
+                for backref in backrefs:
+                    backlinks.append(str(i))
+                    i += 1
+                backref_text = ('; <em>backlinks: %s</em>'
+                                % ', '.join(backlinks))
+        if node.hasattr('line'):
+            line = ', line %s' % node['line']
+        else:
+            line = ''
+        a_start = a_end = ''
+        error = u'System Message: %s%s/%s%s (%s %s)%s</p>\n' % (
+            a_start, node['type'], node['level'], a_end,
+            self.encode(node['source']), line, backref_text)
+        self.body.append(u'<div class="system-message"><b>ReST / HTML errors:</b>%s</div>' % html_escape(error))
+
+    def depart_system_message(self, node):
+        pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/i18n.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,93 @@
+"""Some i18n/gettext utilities.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import re
+import os
+from os.path import join, abspath, basename, splitext, exists
+from glob import glob
+
+from cubicweb.toolsutils import create_dir
+
+def extract_from_tal(files, output_file):
+    """extract i18n strings from tal and write them into the given output file
+    using standard python gettext marker (_)
+    """
+    output = open(output_file, 'w')
+    for filepath in files:
+        for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()):
+            print >> output, '_("%s")' % match.group(2)
+    output.close()
+
+
+def add_msg(w, msgid):
+    """write an empty pot msgid definition"""
+    if isinstance(msgid, unicode):
+        msgid = msgid.encode('utf-8')
+    msgid = msgid.replace('"', r'\"').splitlines()
+    if len(msgid) > 1:
+        w('msgid ""\n')
+        for line in msgid:
+            w('"%s"' % line.replace('"', r'\"'))
+    else:
+        w('msgid "%s"\n' % msgid[0])
+    w('msgstr ""\n\n')
+
+
+def execute(cmd):
+    """display the command, execute it and raise an Exception if returned
+    status != 0
+    """
+    print cmd.replace(os.getcwd() + os.sep, '')
+    status = os.system(cmd)
+    if status != 0:
+        raise Exception()
+
+
+def available_catalogs(i18ndir=None):
+    if i18ndir is None:
+        wildcard = '*.po'
+    else:
+        wildcard = join(i18ndir, '*.po')
+    for popath in glob(wildcard):
+        lang = splitext(basename(popath))[0]
+        yield lang, popath
+
+
+def compile_i18n_catalogs(sourcedirs, destdir, langs):
+    """generate .mo files for a set of languages into the `destdir` i18n directory
+    """
+    from logilab.common.fileutils import ensure_fs_mode
+    print 'compiling %s catalogs...' % destdir
+    errors = []
+    for lang in langs:
+        langdir = join(destdir, lang, 'LC_MESSAGES')
+        if not exists(langdir):
+            create_dir(langdir)
+        pofiles = [join(path, '%s.po' % lang) for path in sourcedirs]
+        pofiles = [pof for pof in pofiles if exists(pof)]
+        mergedpo = join(destdir, '%s_merged.po' % lang)
+        try:
+            # merge application messages' catalog with the stdlib's one
+            execute('msgcat --use-first --sort-output --strict %s > %s'
+                    % (' '.join(pofiles), mergedpo))
+            # make sure the .mo file is writeable and compile with *msgfmt*
+            applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo')
+            try:
+                ensure_fs_mode(applmo)
+            except OSError:
+                pass # suppose not exists
+            execute('msgfmt %s -o %s' % (mergedpo, applmo))
+        except Exception, ex:
+            errors.append('while handling language %s: %s' % (lang, ex))
+        try:
+            # clean everything
+            os.unlink(mergedpo)
+        except Exception:
+            continue
+    return errors
+                         
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/mail.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,94 @@
+"""Common utilies to format / semd emails.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from email.MIMEMultipart import MIMEMultipart
+from email.MIMEText import MIMEText
+from email.MIMEImage import MIMEImage
+from email.Header import Header
+
+
+def header(ustring):
+    return Header(ustring.encode('UTF-8'), 'UTF-8')
+
+def addrheader(uaddr, uname=None):
+    # even if an email address should be ascii, encode it using utf8 since
+    # application tests may generate non ascii email address
+    addr = uaddr.encode('UTF-8') 
+    if uname:
+        return '%s <%s>' % (header(uname).encode(), addr)
+    return addr
+
+
+def format_mail(uinfo, to_addrs, content, subject="",
+                cc_addrs=(), msgid=None, references=(), config=None):
+    """Sends an Email to 'e_addr' with content 'content', and subject 'subject'
+
+    to_addrs and cc_addrs are expected to be a list of email address without
+    name
+    """
+    assert type(content) is unicode, repr(content)
+    msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8')
+    # safety: keep only the first newline
+    subject = subject.splitlines()[0]
+    msg['Subject'] = header(subject)
+    if uinfo.get('email'):
+        email = uinfo['email']
+    elif config and config['sender-addr']:
+        email = unicode(config['sender-addr'])
+    else:
+        email = u''
+    if uinfo.get('name'):
+        name = uinfo['name']
+    elif config and config['sender-addr']:
+        name = unicode(config['sender-name'])
+    else:
+        name = u''
+    msg['From'] = addrheader(email, name)
+    if config and config['sender-addr'] and config['sender-addr'] != email:
+        appaddr = addrheader(config['sender-addr'], config['sender-name'])
+        msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr)
+    elif email:
+        msg['Reply-to'] = msg['From']
+    if config is not None:
+        msg['X-CW'] = config.appid
+    msg['To'] = ', '.join(addrheader(addr) for addr in to_addrs if addr is not None)
+    if cc_addrs:
+        msg['Cc'] = ', '.join(addrheader(addr) for addr in cc_addrs if addr is not None)
+    if msgid:
+        msg['Message-id'] = msgid
+    if references:
+        msg['References'] = ', '.join(references)
+    return msg
+
+
+class HtmlEmail(MIMEMultipart):
+
+    def __init__(self, subject, textcontent, htmlcontent,
+                 sendermail=None, sendername=None, recipients=None, ccrecipients=None):
+        MIMEMultipart.__init__(self, 'related')
+        self['Subject'] = header(subject)
+        self.preamble = 'This is a multi-part message in MIME format.'
+        # Attach alternative text message
+        alternative = MIMEMultipart('alternative')
+        self.attach(alternative)
+        msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8')
+        alternative.attach(msgtext)
+        # Attach html message
+        msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8')
+        alternative.attach(msghtml)
+        if sendermail or sendername:
+            self['From'] = addrheader(sendermail, sendername)
+        if recipients:
+            self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None)
+        if ccrecipients:
+            self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None)
+
+    def attach_image(self, data, htmlId):
+        image = MIMEImage(data)
+        image.add_header('Content-ID', '<%s>' % htmlId)
+        self.attach(image)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/migration.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,358 @@
+"""utility to ease migration of application version to newly installed
+version
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import logging
+from tempfile import mktemp
+from os.path import exists, join, basename, splitext
+
+from logilab.common.decorators import cached
+from logilab.common.configuration import REQUIRED, read_old_config
+
+
+def migration_files(config, toupgrade):
+    """return an orderer list of path of scripts to execute to upgrade
+    an installed application according to installed cube and cubicweb versions
+    """
+    merged = []
+    for cube, fromversion, toversion in toupgrade:
+        if cube == 'cubicweb':
+            migrdir = config.migration_scripts_dir()
+        else:
+            migrdir = config.cube_migration_scripts_dir(cube)
+        scripts = filter_scripts(config, migrdir, fromversion, toversion)
+        merged += [s[1] for s in scripts]
+    if config.accept_mode('Any'):
+        migrdir = config.migration_scripts_dir()
+        merged.insert(0, join(migrdir, 'bootstrapmigration_repository.py'))
+    return merged
+
+
+def filter_scripts(config, directory, fromversion, toversion, quiet=True):
+    """return a list of paths of migration files to consider to upgrade
+    from a version to a greater one
+    """
+    from logilab.common.changelog import Version # doesn't work with appengine
+    assert fromversion
+    assert toversion
+    assert isinstance(fromversion, tuple), fromversion.__class__
+    assert isinstance(toversion, tuple), toversion.__class__
+    assert fromversion <= toversion, (fromversion, toversion)
+    if not exists(directory):
+        if not quiet:
+            print directory, "doesn't exists, no migration path"
+        return []
+    if fromversion == toversion:
+        return []
+    result = []
+    for fname in os.listdir(directory):
+        if fname.endswith('.pyc') or fname.endswith('.pyo') \
+               or fname.endswith('~'):
+            continue
+        fpath = join(directory, fname)
+        try:
+            tver, mode = fname.split('_', 1)
+        except ValueError:
+            continue
+        mode = mode.split('.', 1)[0]
+        if not config.accept_mode(mode):
+            continue
+        try:
+            tver = Version(tver)
+        except ValueError:
+            continue
+        if tver <= fromversion:
+            continue
+        if tver > toversion:
+            continue
+        result.append((tver, fpath))
+    # be sure scripts are executed in order
+    return sorted(result)
+
+
+IGNORED_EXTENSIONS = ('.swp', '~')
+
+
+def execscript_confirm(scriptpath):
+    """asks for confirmation before executing a script and provides the
+    ability to show the script's content
+    """
+    while True:
+        confirm = raw_input('** execute %r (Y/n/s[how]) ?' % scriptpath)
+        confirm = confirm.strip().lower()
+        if confirm in ('n', 'no'):
+            return False
+        elif confirm in ('s', 'show'):
+            stream = open(scriptpath)
+            scriptcontent = stream.read()
+            stream.close()
+            print
+            print scriptcontent
+            print
+        else:
+            return True
+
+def yes(*args, **kwargs):
+    return True
+
+
+class MigrationHelper(object):
+    """class holding CubicWeb Migration Actions used by migration scripts"""
+
+    def __init__(self, config, interactive=True, verbosity=1):
+        self.config = config
+        self.config.init_log(logthreshold=logging.ERROR, debug=True)
+        # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything
+        self.verbosity = verbosity
+        self.need_wrap = True
+        if not interactive or not verbosity:
+            self.confirm = yes
+            self.execscript_confirm = yes
+        else:
+            self.execscript_confirm = execscript_confirm
+        self._option_changes = []
+        self.__context = {'confirm': self.confirm,
+                          'config': self.config,
+                          'interactive_mode': interactive,
+                          }
+
+    def repo_connect(self):
+        return self.config.repository()
+        
+    def migrate(self, vcconf, toupgrade, options):
+        """upgrade the given set of cubes
+        
+        `cubes` is an ordered list of 3-uple:
+        (cube, fromversion, toversion)
+        """
+        if options.fs_only:
+            # monkey path configuration.accept_mode so database mode (e.g. Any)
+            # won't be accepted
+            orig_accept_mode = self.config.accept_mode
+            def accept_mode(mode):
+                if mode == 'Any':
+                    return False
+                return orig_accept_mode(mode)
+            self.config.accept_mode = accept_mode
+        scripts = migration_files(self.config, toupgrade)
+        if scripts:
+            vmap = dict( (pname, (fromver, tover)) for pname, fromver, tover in toupgrade)
+            self.__context.update({'applcubicwebversion': vcconf['cubicweb'],
+                                   'cubicwebversion': self.config.cubicweb_version(),
+                                   'versions_map': vmap})
+            self.scripts_session(scripts)
+        else:
+            print 'no migration script to execute'            
+
+    def shutdown(self):
+        pass
+    
+    def __getattribute__(self, name):
+        try:
+            return object.__getattribute__(self, name)
+        except AttributeError:
+            cmd = 'cmd_%s' % name
+            if hasattr(self, cmd):
+                meth = getattr(self, cmd) 
+                return lambda *args, **kwargs: self.interact(args, kwargs,
+                                                             meth=meth)
+            raise
+        raise AttributeError(name)
+            
+    def interact(self, args, kwargs, meth):
+        """execute the given method according to user's confirmation"""
+        msg = 'execute command: %s(%s) ?' % (
+            meth.__name__[4:],
+            ', '.join([repr(arg) for arg in args] +
+                      ['%s=%r' % (n,v) for n,v in kwargs.items()]))
+        if 'ask_confirm' in kwargs:
+            ask_confirm = kwargs.pop('ask_confirm')
+        else:
+            ask_confirm = True
+        if not ask_confirm or self.confirm(msg):
+            return meth(*args, **kwargs)
+
+    def confirm(self, question, shell=True, abort=True, retry=False):
+        """ask for confirmation and return true on positive answer
+
+        if `retry` is true the r[etry] answer may return 2
+        """
+        print question,
+        possibleanswers = 'Y/n'
+        if abort:
+            possibleanswers += '/a[bort]'
+        if shell:
+            possibleanswers += '/s[hell]'
+        if retry:
+            possibleanswers += '/r[etry]'
+        try:
+            confirm = raw_input('(%s): ' % ( possibleanswers, ))
+            answer = confirm.strip().lower()
+        except (EOFError, KeyboardInterrupt):
+            answer = 'abort'
+        if answer in ('n', 'no'):
+            return False
+        if answer in ('r', 'retry'):
+            return 2
+        if answer in ('a', 'abort'):
+            self.rollback()
+            raise SystemExit(1)
+        if shell and answer in ('s', 'shell'):
+            self.interactive_shell()
+            return self.confirm(question)
+        return True
+
+    def interactive_shell(self):
+        self.confirm = yes
+        self.need_wrap = False
+        # avoid '_' to be added to builtins by sys.display_hook
+        def do_not_add___to_builtins(obj):
+            if obj is not None:
+                print repr(obj)
+        sys.displayhook = do_not_add___to_builtins
+        local_ctx = self._create_context()
+        try:
+            import readline
+            from rlcompleter import Completer
+        except ImportError:
+            # readline not available
+            pass
+        else:        
+            readline.set_completer(Completer(local_ctx).complete)
+            readline.parse_and_bind('tab: complete')
+            histfile = os.path.join(os.environ["HOME"], ".eshellhist")
+            try:
+                readline.read_history_file(histfile)
+            except IOError:
+                pass
+        from code import interact
+        banner = """entering the migration python shell
+just type migration commands or arbitrary python code and type ENTER to execute it
+type "exit" or Ctrl-D to quit the shell and resume operation"""
+        # give custom readfunc to avoid http://bugs.python.org/issue1288615
+        def unicode_raw_input(prompt):
+            return unicode(raw_input(prompt), sys.stdin.encoding)
+        interact(banner, readfunc=unicode_raw_input, local=local_ctx)
+        readline.write_history_file(histfile)
+        # delete instance's confirm attribute to avoid questions
+        del self.confirm
+        self.need_wrap = True
+
+    @cached
+    def _create_context(self):
+        """return a dictionary to use as migration script execution context"""
+        context = self.__context
+        for attr in dir(self):
+            if attr.startswith('cmd_'):
+                if self.need_wrap:
+                    context[attr[4:]] = getattr(self, attr[4:])
+                else:
+                    context[attr[4:]] = getattr(self, attr)
+        return context
+    
+    def process_script(self, migrscript, funcname=None, *args, **kwargs):
+        """execute a migration script
+        in interactive mode,  display the migration script path, ask for
+        confirmation and execute it if confirmed
+        """
+        assert migrscript.endswith('.py'), migrscript
+        if self.execscript_confirm(migrscript):
+            scriptlocals = self._create_context().copy()
+            if funcname is None:
+                pyname = '__main__'
+            else:
+                pyname = splitext(basename(migrscript))[0]
+            scriptlocals.update({'__file__': migrscript, '__name__': pyname})
+            execfile(migrscript, scriptlocals)
+            if funcname is not None:
+                try:
+                    func = scriptlocals[funcname]
+                    self.info('found %s in locals', funcname)
+                    assert callable(func), '%s (%s) is not callable' % (func, funcname)
+                except KeyError:
+                    self.critical('no %s in script %s', funcname, migrscript)
+                    return None
+                return func(*args, **kwargs)
+                    
+    def scripts_session(self, migrscripts):
+        """execute some scripts in a transaction"""
+        try:
+            for migrscript in migrscripts:
+                self.process_script(migrscript)
+            self.commit()
+        except:
+            self.rollback()
+            raise
+
+    def cmd_option_renamed(self, oldname, newname):
+        """a configuration option has been renamed"""
+        self._option_changes.append(('renamed', oldname, newname))
+
+    def cmd_option_group_change(self, option, oldgroup, newgroup):
+        """a configuration option has been moved in another group"""
+        self._option_changes.append(('moved', option, oldgroup, newgroup))
+
+    def cmd_option_added(self, optname):
+        """a configuration option has been added"""
+        self._option_changes.append(('added', optname))
+
+    def cmd_option_removed(self, optname):
+        """a configuration option has been removed"""
+        # can safely be ignored
+        #self._option_changes.append(('removed', optname))
+
+    def cmd_option_type_changed(self, optname, oldtype, newvalue):
+        """a configuration option's type has changed"""
+        self._option_changes.append(('typechanged', optname, oldtype, newvalue))
+        
+    def cmd_add_cube(self, cube):
+        origcubes = self.config.cubes()
+        newcubes = [p for p in self.config.expand_cubes([cube]) 
+                       if not p in origcubes]
+        if newcubes:
+            assert cube in newcubes
+            self.config.add_cubes(newcubes)
+        return newcubes
+
+    def cmd_remove_cube(self, cube):
+        origcubes = self.config._cubes
+        basecubes = list(origcubes)
+        for pkg in self.config.expand_cubes([cube]):
+            try:
+                basecubes.remove(pkg)
+            except ValueError:
+                continue
+        self.config._cubes = tuple(self.config.expand_cubes(basecubes))
+        removed = [p for p in origcubes if not p in self.config._cubes]
+        assert cube in removed, \
+               "can't remove cube %s, used as a dependancy" % cube
+        return removed
+    
+    def rewrite_configuration(self):
+        # import locally, show_diffs unavailable in gae environment
+        from cubicweb.toolsutils import show_diffs
+        configfile = self.config.main_config_file()
+        if self._option_changes:
+            read_old_config(self.config, self._option_changes, configfile)
+        newconfig = mktemp()
+        for optdescr in self._option_changes:
+            if optdescr[0] == 'added':
+                optdict = self.config.get_option_def(optdescr[1])
+                if optdict.get('default') is REQUIRED:
+                    self.config.input_option(option, optdict)
+        self.config.generate_config(open(newconfig, 'w'))
+        show_diffs(configfile, newconfig)
+        if exists(newconfig):
+            os.unlink(newconfig)
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/mixins.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,393 @@
+"""mixins of entity/views organized somewhat in a graph or tree structure
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached
+
+from cubicweb.common.selectors import interface_selector
+from cubicweb.interfaces import IWorkflowable, IEmailable, ITree
+
+
+class TreeMixIn(object):
+    """base tree-mixin providing the tree interface
+
+    This mixin has to be inherited explicitly and configured using the
+    tree_attribute, parent_target and children_target class attribute to
+    benefit from this default implementation
+    """
+    tree_attribute = None
+    # XXX misnamed
+    parent_target = 'subject'
+    children_target = 'object'
+    
+    def different_type_children(self, entities=True):
+        """return children entities of different type as this entity.
+        
+        according to the `entities` parameter, return entity objects or the
+        equivalent result set
+        """
+        res = self.related(self.tree_attribute, self.children_target,
+                           entities=entities)
+        if entities:
+            return [e for e in res if e.e_schema != self.e_schema]
+        return res.filtered_rset(lambda x: x.e_schema != self.e_schema, self.col)
+
+    def same_type_children(self, entities=True):
+        """return children entities of the same type as this entity.
+        
+        according to the `entities` parameter, return entity objects or the
+        equivalent result set
+        """
+        res = self.related(self.tree_attribute, self.children_target,
+                           entities=entities)
+        if entities:
+            return [e for e in res if e.e_schema == self.e_schema]
+        return res.filtered_rset(lambda x: x.e_schema == self.e_schema, self.col)
+    
+    def iterchildren(self, _done=None):
+        if _done is None:
+            _done = set()
+        for child in self.children():
+            if child.eid in _done:
+                self.error('loop in %s tree', self.id.lower())
+                continue
+            yield child
+            _done.add(child.eid)
+
+    def prefixiter(self, _done=None):
+        if _done is None:
+            _done = set()
+        if self.eid in _done:
+            return
+        yield self
+        _done.add(self.eid)
+        for child in self.iterchildren(_done):
+            try:
+                for entity in child.prefixiter(_done):
+                    yield entity
+            except AttributeError:
+                pass
+    
+    @cached
+    def path(self):
+        """returns the list of eids from the root object to this object"""
+        path = []
+        parent = self
+        while parent:
+            if parent.eid in path:
+                self.error('loop in %s tree', self.id.lower())
+                break
+            path.append(parent.eid)
+            try:
+                # check we are not leaving the tree
+                if (parent.tree_attribute != self.tree_attribute or
+                    parent.parent_target != self.parent_target):
+                    break
+                parent = parent.parent()
+            except AttributeError:
+                break
+
+        path.reverse()
+        return path
+    
+    def notification_references(self, view):
+        """used to control References field of email send on notification
+        for this entity. `view` is the notification view.
+        
+        Should return a list of eids which can be used to generate message ids
+        of previously sent email
+        """
+        return self.path()[:-1]
+
+
+    ## ITree interface ########################################################
+    def parent(self):
+        """return the parent entity if any, else None (e.g. if we are on the
+        root
+        """
+        try:
+            return self.related(self.tree_attribute, self.parent_target,
+                                entities=True)[0]
+        except (KeyError, IndexError):
+            return None
+
+    def children(self, entities=True, sametype=False):
+        """return children entities
+
+        according to the `entities` parameter, return entity objects or the
+        equivalent result set
+        """
+        if sametype:
+            return self.same_type_children(entities)
+        else:
+            return self.related(self.tree_attribute, self.children_target,
+                                entities=entities)
+
+    def children_rql(self):
+        return self.related_rql(self.tree_attribute, self.children_target)
+    
+    def __iter__(self):
+        return self.iterchildren()
+
+    def is_leaf(self):
+        print '*' * 80
+        return len(self.children()) == 0
+
+    def is_root(self):
+        return self.parent() is None
+
+    def root(self):
+        """return the root object"""
+        return self.req.eid_rset(self.path()[0]).get_entity(0, 0)
+
+
+class WorkflowableMixIn(object):
+    """base mixin providing workflow helper methods for workflowable entities.
+    This mixin will be automatically set on class supporting the 'in_state'
+    relation (which implies supporting 'wf_info_for' as well)
+    """
+    __implements__ = (IWorkflowable,)
+    
+    @property
+    def state(self):
+        return self.in_state[0].name
+    
+    @property
+    def displayable_state(self):
+        return self.req._(self.state)
+
+    def wf_state(self, statename):
+        rset = self.req.execute('Any S, SN WHERE S name %(n)s, S state_of E, E name %(e)s',
+                                {'n': statename, 'e': str(self.e_schema)})
+        if rset:
+            return rset.get_entity(0, 0)
+        return None
+    
+    def wf_transition(self, trname):
+        rset = self.req.execute('Any T, TN WHERE T name %(n)s, T transition_of E, E name %(e)s',
+                                {'n': trname, 'e': str(self.e_schema)})
+        if rset:
+            return rset.get_entity(0, 0)
+        return None
+    
+    def change_state(self, stateeid, trcomment=None, trcommentformat=None):
+        """change the entity's state according to a state defined in given
+        parameters
+        """
+        if trcomment:
+            self.req.set_shared_data('trcomment', trcomment)
+        if trcommentformat:
+            self.req.set_shared_data('trcommentformat', trcommentformat)
+        self.req.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                         {'x': self.eid, 's': stateeid}, 'x')
+    
+    def can_pass_transition(self, trname):
+        """return the Transition instance if the current user can pass the
+        transition with the given name, else None
+        """
+        stateeid = self.in_state[0].eid
+        rset = self.req.execute('Any T,N,DS WHERE S allowed_transition T,'
+                                'S eid %(x)s,T name %(trname)s,ET name %(et)s,'
+                                'T name N,T destination_state DS,T transition_of ET',
+                                {'x': stateeid, 'et': str(self.e_schema),
+                                 'trname': trname}, 'x')
+        for tr in rset.entities():
+            if tr.may_be_passed(self.eid, stateeid):
+                return tr
+    
+    def latest_trinfo(self):
+        """return the latest transition information for this entity"""
+        return self.reverse_wf_info_for[-1]
+            
+    # specific vocabulary methods #############################################
+
+    def subject_in_state_vocabulary(self, rschema, limit=None):
+        """vocabulary method for the in_state relation, looking for
+        relation's object entities (i.e. self is the subject) according
+        to initial_state, state_of and next_state relation
+        """
+        if not self.has_eid() or not self.in_state:
+            # get the initial state
+            rql = 'Any S where S state_of ET, ET name %(etype)s, ET initial_state S'
+            rset = self.req.execute(rql, {'etype': str(self.e_schema)})
+            if rset:
+                return [(rset.get_entity(0, 0).view('combobox'), rset[0][0])]
+            return []
+        results = []
+        for tr in self.in_state[0].transitions(self):
+            state = tr.destination_state[0]
+            results.append((state.view('combobox'), state.eid))
+        return sorted(results)
+            
+    # __method methods ########################################################
+    
+    def set_state(self, params=None):
+        """change the entity's state according to a state defined in given
+        parameters, used to be called using __method controler facility
+        """
+        params = params or self.req.form
+        self.change_state(int(params.pop('state')), params.get('trcomment'),
+                          params.get('trcommentformat'))
+        self.req.set_message(self.req._('__msg state changed'))
+
+
+
+class EmailableMixIn(object):
+    """base mixin providing the default get_email() method used by
+    the massmailing view
+
+    NOTE: The default implementation is based on the
+    primary_email / use_email scheme
+    """
+    __implements__ = (IEmailable,)
+    
+    def get_email(self):
+        if getattr(self, 'primary_email', None):
+            return self.primary_email[0].address
+        if getattr(self, 'use_email', None):
+            return self.use_email[0].address
+        return None
+
+    @classmethod
+    def allowed_massmail_keys(cls):
+        """returns a set of allowed email substitution keys
+
+        The default is to return the entity's attribute list but an
+        entity class might override this method to allow extra keys.
+        For instance, the Person class might want to return a `companyname`
+        key.
+        """
+        return set(rs.type for rs, _ in cls.e_schema.attribute_definitions())
+
+    def as_email_context(self):
+        """returns the dictionary as used by the sendmail controller to
+        build email bodies.
+        
+        NOTE: the dictionary keys should match the list returned by the
+        `allowed_massmail_keys` method.
+        """
+        return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() )
+
+
+    
+MI_REL_TRIGGERS = {
+    ('in_state',    'subject'): WorkflowableMixIn,
+    ('primary_email',   'subject'): EmailableMixIn,
+    ('use_email',   'subject'): EmailableMixIn,
+    }
+
+
+
+def _done_init(done, view, row, col):
+    """handle an infinite recursion safety belt"""
+    if done is None:
+        done = set()
+    entity = view.entity(row, col)
+    if entity.eid in done:
+        msg = entity.req._('loop in %s relation (%s)'
+                           % (entity.tree_attribute, entity.eid))
+        return None, msg
+    done.add(entity.eid)
+    return done, entity
+
+
+class TreeViewMixIn(object):
+    """a recursive tree view"""
+    id = 'tree'
+    item_vid = 'treeitem'
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ITree,)
+
+    def call(self, done=None, **kwargs):
+        if done is None:
+            done = set()
+        super(TreeViewMixIn, self).call(done=done, **kwargs)
+            
+    def cell_call(self, row, col=0, vid=None, done=None, **kwargs):
+        done, entity = _done_init(done, self, row, col)
+        if done is None:
+            # entity is actually an error message
+            self.w(u'<li class="badcontent">%s</li>' % entity)
+            return
+        self.open_item(entity)
+        entity.view(vid or self.item_vid, w=self.w, **kwargs)
+        relatedrset = entity.children(entities=False)
+        self.wview(self.id, relatedrset, 'null', done=done, **kwargs)
+        self.close_item(entity)
+
+    def open_item(self, entity):
+        self.w(u'<li class="%s">\n' % entity.id.lower())
+    def close_item(self, entity):
+        self.w(u'</li>\n')
+
+
+class TreePathMixIn(object):
+    """a recursive path view"""
+    id = 'path'
+    item_vid = 'oneline'
+    separator = u'&nbsp;&gt;&nbsp;'
+
+    def call(self, **kwargs):
+        self.w(u'<div class="pathbar">')
+        super(TreePathMixIn, self).call(**kwargs)
+        self.w(u'</div>')
+        
+    def cell_call(self, row, col=0, vid=None, done=None, **kwargs):
+        done, entity = _done_init(done, self, row, col)
+        if done is None:
+            # entity is actually an error message
+            self.w(u'<span class="badcontent">%s</span>' % entity)
+            return
+        parent = entity.parent()
+        if parent:
+            parent.view(self.id, w=self.w, done=done)
+            self.w(self.separator)
+        entity.view(vid or self.item_vid, w=self.w)
+
+
+class ProgressMixIn(object):
+    """provide default implementations for IProgress interface methods"""
+
+    @property
+    @cached
+    def cost(self):
+        return self.progress_info()['estimated']
+
+    @property
+    @cached
+    def revised_cost(self):
+        return self.progress_info().get('estimatedcorrected', self.cost)
+
+    @property
+    @cached
+    def done(self):
+        return self.progress_info()['done']
+
+    @property
+    @cached
+    def todo(self):
+        return self.progress_info()['todo']
+
+    @cached
+    def progress_info(self):
+        raise NotImplementedError()
+
+    def finished(self):
+        return not self.in_progress()
+
+    def in_progress(self):
+        raise NotImplementedError()
+    
+    def progress(self):
+        try:
+            return 100. * self.done / self.revised_cost
+        except ZeroDivisionError:
+            # total cost is 0 : if everything was estimated, task is completed
+            if self.progress_info().get('notestmiated'):
+                return 0.
+            return 100
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/mttransforms.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,82 @@
+"""mime type transformation engine for cubicweb, based on mtconverter
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab import mtconverter
+
+from logilab.mtconverter.engine import TransformEngine
+from logilab.mtconverter.transform import Transform
+from logilab.mtconverter import (register_base_transforms,
+                                 register_pil_transforms, 
+                                 register_pygments_transforms)
+
+from cubicweb.common.uilib import rest_publish, html_publish, remove_html_tags
+
+HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml')
+
+# CubicWeb specific transformations
+
+class rest_to_html(Transform):
+    inputs = ('text/rest', 'text/x-rst')
+    output = 'text/html'
+    def _convert(self, trdata):
+        return rest_publish(trdata.appobject, trdata.decode())
+
+class html_to_html(Transform):
+    inputs = HTML_MIMETYPES
+    output = 'text/html'
+    def _convert(self, trdata):
+        return html_publish(trdata.appobject, trdata.data)
+
+class ept_to_html(Transform):
+    inputs = ('text/cubicweb-page-template',)
+    output = 'text/html'
+    output_encoding = 'utf-8'
+    def _convert(self, trdata):
+        from cubicweb.common.tal import compile_template
+        value = trdata.encode(self.output_encoding)
+        return trdata.appobject.tal_render(compile_template(value), {})
+
+
+# Instantiate and configure the transformation engine
+
+mtconverter.UNICODE_POLICY = 'replace'
+
+ENGINE = TransformEngine()
+ENGINE.add_transform(rest_to_html())
+ENGINE.add_transform(html_to_html())
+ENGINE.add_transform(ept_to_html())
+
+if register_pil_transforms(ENGINE, verb=False):
+    HAS_PIL_TRANSFORMS = True
+else:
+    HAS_PIL_TRANSFORMS = False
+    
+try:
+    from logilab.mtconverter.transforms import pygmentstransforms
+    for mt in ('text/plain',) + HTML_MIMETYPES:
+        try:
+            pygmentstransforms.mimetypes.remove(mt)
+        except ValueError:
+            continue
+    register_pygments_transforms(ENGINE, verb=False)
+
+    def patch_convert(cls):
+        def _convert(self, trdata, origconvert=cls._convert):
+            try:
+                trdata.appobject.req.add_css('pygments.css')
+            except AttributeError: # session has no add_css, only http request
+                pass
+            return origconvert(self, trdata)
+        cls._convert = _convert
+    patch_convert(pygmentstransforms.PygmentsHTMLTransform)
+    
+    HAS_PYGMENTS_TRANSFORMS = True
+except ImportError:
+    HAS_PYGMENTS_TRANSFORMS = False
+    
+register_base_transforms(ENGINE, verb=False)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/registerers.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,205 @@
+"""This file contains some basic registerers required by application objects
+registry to handle registration at startup time.
+
+A registerer is responsible to tell if an object should be registered according
+to the application's schema or to already registered object
+
+:organization: Logilab
+:copyright: 2006-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.vregistry import registerer
+
+
+def _accepts_interfaces(obj):
+    return sorted(getattr(obj, 'accepts_interfaces', ()))
+
+
+class yes_registerer(registerer):
+    """register without any other action"""
+    def do_it_yourself(self, registered):
+        return self.vobject
+
+class priority_registerer(registerer):
+    """systematically kick previous registered class and register the
+    wrapped class (based on the fact that directory containing vobjects
+    are loaded from the most generic to the most specific).
+
+    This is usually for templates or startup views where we want to
+    keep only the latest in the load path
+    """
+    def do_it_yourself(self, registered):
+        if registered:
+            if len(registered) > 1:
+                self.warning('priority_registerer found more than one registered objects '
+                             '(registerer monkey patch ?)')
+            for regobj in registered[:]:
+                self.kick(registered, regobj)
+        return self.vobject
+    
+    def remove_equivalents(self, registered):
+        for _obj in registered[:]:
+            if self.equivalent(_obj):
+                self.kick(registered, _obj)
+                break
+            
+    def remove_all_equivalents(self, registered):
+        for _obj in registered[:]:
+            if _obj is self.vobject:
+                continue
+            if self.equivalent(_obj):
+                self.kick(registered, _obj)
+
+    def equivalent(self, other):
+        raise NotImplementedError(self, self.vobject)
+
+
+class kick_registerer(registerer):
+    """systematically kick previous registered class and don't register the
+    wrapped class. This is temporarily used to discard library object registrable
+    but that we don't want to use
+    """
+    def do_it_yourself(self, registered):
+        if registered:
+            self.kick(registered, registered[-1])
+        return 
+    
+
+class accepts_registerer(priority_registerer):
+    """register according to the .accepts attribute of the wrapped
+    class, which should be a tuple refering some entity's types
+
+    * if no type is defined the application'schema, skip the wrapped
+      class
+    * if the class defines a requires attribute, each entity type defined
+      in the requires list must be in the schema
+    * if an object previously registered has equivalent .accepts
+      attribute, kick it out
+    * register
+    """
+    def do_it_yourself(self, registered):
+        # if object is accepting interface, we have register it now and
+        # remove it latter if no object is implementing accepted interfaces
+        if _accepts_interfaces(self.vobject):
+            return self.vobject
+        if not 'Any' in self.vobject.accepts:
+            for ertype in self.vobject.accepts:
+                if ertype in self.schema:
+                    break
+            else:
+                self.skip()
+                return None
+        for required in getattr(self.vobject, 'requires', ()):
+            if required not in self.schema:
+                self.skip()
+                return
+        self.remove_equivalents(registered)
+        return self.vobject
+    
+    def equivalent(self, other):
+        if _accepts_interfaces(self.vobject) != _accepts_interfaces(other):
+            return False
+        try:
+            newaccepts = list(other.accepts)
+            for etype in self.vobject.accepts:
+                try:
+                    newaccepts.remove(etype)
+                except ValueError:
+                    continue
+            if newaccepts:
+                other.accepts = tuple(newaccepts)
+                return False
+            return True
+        except AttributeError:
+            return False
+
+
+class id_registerer(priority_registerer):
+    """register according to the "id" attribute of the wrapped class,
+    refering to an entity type.
+    
+    * if the type is not Any and is not defined the application'schema,
+      skip the wrapped class
+    * if an object previously registered has the same .id attribute,
+      kick it out
+    * register
+    """
+    def do_it_yourself(self, registered):
+        etype = self.vobject.id
+        if etype != 'Any' and not self.schema.has_entity(etype):
+            self.skip()
+            return
+        self.remove_equivalents(registered)
+        return self.vobject
+    
+    def equivalent(self, other):
+        return other.id == self.vobject.id
+
+
+class etype_rtype_registerer(registerer):
+    """registerer handling optional .etype and .rtype attributes.:
+    
+    * if .etype is set and is not an entity type defined in the
+      application schema, skip the wrapped class
+    * if .rtype or .relname is set and is not a relation type defined in
+      the application schema, skip the wrapped class
+    * register
+    """
+    def do_it_yourself(self, registered):
+        cls = self.vobject
+        if hasattr(cls, 'etype'):
+            if not self.schema.has_entity(cls.etype):
+                return
+        rtype = getattr(cls, 'rtype', None)
+        if rtype and not self.schema.has_relation(rtype):
+            return
+        return cls
+
+class etype_rtype_priority_registerer(etype_rtype_registerer):
+    """add priority behaviour to the etype_rtype_registerer
+    """
+    def do_it_yourself(self, registered):
+        cls = super(etype_rtype_priority_registerer, self).do_it_yourself(registered)
+        if cls:
+            registerer = priority_registerer(self.registry, cls)
+            cls = registerer.do_it_yourself(registered)
+        return cls
+
+class action_registerer(etype_rtype_registerer):
+    """'all in one' actions registerer, handling optional .accepts,
+    .etype and .rtype attributes:
+    
+    * if .etype is set and is not an entity type defined in the
+      application schema, skip the wrapped class
+    * if .rtype or .relname is set and is not a relation type defined in
+      the application schema, skip the wrapped class
+    * if .accepts is set, delegate to the accepts_registerer
+    * register
+    """
+    def do_it_yourself(self, registered):
+        cls = super(action_registerer, self).do_it_yourself(registered)
+        if hasattr(cls, 'accepts'):
+            registerer = accepts_registerer(self.registry, cls)
+            cls = registerer.do_it_yourself(registered)
+        return cls
+
+
+class extresources_registerer(priority_registerer):
+    """'registerer according to a .need_resources attributes which
+    should list necessary resource identifiers for the wrapped object.
+    If one of its resources is missing, don't register
+    """
+    def do_it_yourself(self, registered):
+        if not hasattr(self.config, 'has_resource'):
+            return
+        for resourceid in self.vobject.need_resources:
+            if not self.config.has_resource(resourceid):
+                return
+        return super(extresources_registerer, self).do_it_yourself(registered)
+    
+
+__all__ = [cls.__name__ for cls in globals().values()
+           if isinstance(cls, type) and issubclass(cls, registerer)
+           and not cls is registerer]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/rest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,223 @@
+"""rest publishing functions
+
+contains some functions and setup of docutils for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cStringIO import StringIO
+from itertools import chain
+from logging import getLogger
+from os.path import join
+
+from docutils import statemachine, nodes, utils, io
+from docutils.core import publish_string
+from docutils.parsers.rst import Parser, states, directives
+from docutils.parsers.rst.roles import register_canonical_role, set_classes
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.html4zope import Writer
+
+# We provide our own parser as an attempt to get rid of
+# state machine reinstanciation
+
+import re
+# compile states.Body patterns
+for k, v in states.Body.patterns.items():
+    if isinstance(v, str):
+        states.Body.patterns[k] = re.compile(v)
+
+# register ReStructured Text mimetype / extensions
+import mimetypes
+mimetypes.add_type('text/rest', '.rest')
+mimetypes.add_type('text/rest', '.rst')
+
+
+LOGGER = getLogger('cubicweb.rest')
+
+def eid_reference_role(role, rawtext, text, lineno, inliner,
+                       options={}, content=[]):
+    try:
+        try:
+            eid_num, rest = text.split(u':', 1)
+        except:
+            eid_num, rest = text, '#'+text
+        eid_num = int(eid_num)
+        if eid_num < 0:
+            raise ValueError
+    except ValueError:
+        msg = inliner.reporter.error(
+            'EID number must be a positive number; "%s" is invalid.'
+            % text, line=lineno)
+        prb = inliner.problematic(rawtext, rawtext, msg)
+        return [prb], [msg]
+    # Base URL mainly used by inliner.pep_reference; so this is correct:
+    context = inliner.document.settings.context
+    refedentity = context.req.eid_rset(eid_num).get_entity(0, 0)
+    ref = refedentity.absolute_url()
+    set_classes(options)
+    return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref,
+                            **options)], []
+
+register_canonical_role('eid', eid_reference_role)
+
+
+def card_reference_role(role, rawtext, text, lineno, inliner,
+                       options={}, content=[]):
+    text = text.strip()
+    try:
+        wikiid, rest = text.split(u':', 1)
+    except:
+        wikiid, rest = text, text
+    context = inliner.document.settings.context
+    cardrset = context.req.execute('Card X WHERE X wikiid %(id)s',
+                                   {'id': wikiid})
+    if cardrset:
+        ref = cardrset.get_entity(0, 0).absolute_url()
+    else:
+        schema = context.schema
+        if schema.eschema('Card').has_perm(context.req, 'add'):
+            ref = context.req.build_url('view', vid='creation', etype='Card', wikiid=wikiid)
+        else:
+            ref = '#'
+    set_classes(options)
+    return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref,
+                            **options)], []
+
+register_canonical_role('card', card_reference_role)
+
+
+def winclude_directive(name, arguments, options, content, lineno,
+                       content_offset, block_text, state, state_machine):
+    """Include a reST file as part of the content of this reST file.
+
+    same as standard include directive but using config.locate_doc_resource to
+    get actual file to include.
+
+    Most part of this implementation is copied from `include` directive defined
+    in `docutils.parsers.rst.directives.misc`
+    """
+    context = state.document.settings.context
+    source = state_machine.input_lines.source(
+        lineno - state_machine.input_offset - 1)
+    #source_dir = os.path.dirname(os.path.abspath(source))
+    fid = arguments[0]
+    for lang in chain((context.req.lang, context.vreg.property_value('ui.language')),
+                      context.config.available_languages()):
+        rid = '%s_%s.rst' % (fid, lang)
+        resourcedir = context.config.locate_doc_file(rid)
+        if resourcedir:
+            break
+    else:
+        severe = state_machine.reporter.severe(
+              'Problems with "%s" directive path:\nno resource matching %s.'
+              % (name, fid),
+              nodes.literal_block(block_text, block_text), line=lineno)
+        return [severe]
+    path = join(resourcedir, rid)
+    encoding = options.get('encoding', state.document.settings.input_encoding)
+    try:
+        state.document.settings.record_dependencies.add(path)
+        include_file = io.FileInput(
+            source_path=path, encoding=encoding,
+            error_handler=state.document.settings.input_encoding_error_handler,
+            handle_io_errors=None)
+    except IOError, error:
+        severe = state_machine.reporter.severe(
+              'Problems with "%s" directive path:\n%s: %s.'
+              % (name, error.__class__.__name__, error),
+              nodes.literal_block(block_text, block_text), line=lineno)
+        return [severe]
+    try:
+        include_text = include_file.read()
+    except UnicodeError, error:
+        severe = state_machine.reporter.severe(
+              'Problem with "%s" directive:\n%s: %s'
+              % (name, error.__class__.__name__, error),
+              nodes.literal_block(block_text, block_text), line=lineno)
+        return [severe]
+    if options.has_key('literal'):
+        literal_block = nodes.literal_block(include_text, include_text,
+                                            source=path)
+        literal_block.line = 1
+        return literal_block
+    else:
+        include_lines = statemachine.string2lines(include_text,
+                                                  convert_whitespace=1)
+        state_machine.insert_input(include_lines, path)
+        return []
+
+winclude_directive.arguments = (1, 0, 1)
+winclude_directive.options = {'literal': directives.flag,
+                              'encoding': directives.encoding}
+directives.register_directive('winclude', winclude_directive)
+
+class CubicWebReSTParser(Parser):
+    """The (customized) reStructuredText parser."""
+
+    def __init__(self):
+        self.initial_state = 'Body'
+        self.state_classes = states.state_classes
+        self.inliner = states.Inliner()
+        self.statemachine = states.RSTStateMachine(
+              state_classes=self.state_classes,
+              initial_state=self.initial_state,
+              debug=0)
+
+    def parse(self, inputstring, document):
+        """Parse `inputstring` and populate `document`, a document tree."""
+        self.setup_parse(inputstring, document)
+        inputlines = statemachine.string2lines(inputstring,
+                                               convert_whitespace=1)
+        self.statemachine.run(inputlines, document, inliner=self.inliner)
+        self.finish_parse()
+
+
+_REST_PARSER = CubicWebReSTParser()
+
+def rest_publish(context, data):
+    """publish a string formatted as ReStructured Text to HTML
+    
+    :type context: a cubicweb application object
+
+    :type data: str
+    :param data: some ReST text
+
+    :rtype: unicode
+    :return:
+      the data formatted as HTML or the original data if an error occured
+    """
+    req = context.req
+    if isinstance(data, unicode):
+        encoding = 'unicode'
+    else:
+        encoding = req.encoding
+    settings = {'input_encoding': encoding, 'output_encoding': 'unicode',
+                'warning_stream': StringIO(), 'context': context,
+                # dunno what's the max, severe is 4, and we never want a crash
+                # (though try/except may be a better option...)
+                'halt_level': 10, 
+                }
+    if context:
+        if hasattr(req, 'url'):
+            base_url = req.url()
+        elif hasattr(context, 'absolute_url'):
+            base_url = context.absolute_url()
+        else:
+            base_url = req.base_url()
+    else:
+        base_url = None
+    try:
+        return publish_string(writer=Writer(base_url=base_url),
+                              parser=_REST_PARSER, source=data,
+                              settings_overrides=settings)
+    except Exception:
+        LOGGER.exception('error while publishing ReST text')
+        if not isinstance(data, unicode):
+            data = unicode(data, encoding, 'replace')
+        return html_escape(req._('error while publishing ReST text')
+                           + '\n\n' + data)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+from warnings import warn
+warn('moved to cubicweb.schema', DeprecationWarning, stacklevel=2)
+from cubicweb.schema import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/selectors.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,413 @@
+"""This file contains some basic selectors required by application objects.
+
+A selector is responsible to score how well an object may be used with a
+given result set (publishing time selection)
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.compat import all
+
+from cubicweb import Unauthorized
+from cubicweb.cwvreg import DummyCursorError
+from cubicweb.vregistry import chainall, chainfirst
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.schema import split_expression
+
+
+def lltrace(selector):
+    # don't wrap selectors if not in development mode
+    if CubicWebConfiguration.mode == 'installed':
+        return selector
+    def traced(cls, *args, **kwargs):
+        ret = selector(cls, *args, **kwargs)
+        cls.lldebug('selector %s returned %s for %s', selector.__name__, ret, cls)
+        return ret
+    return traced
+    
+# very basic selectors ########################################################
+
+def yes_selector(cls, *args, **kwargs):
+    """accept everything"""
+    return 1
+
+@lltrace
+def norset_selector(cls, req, rset, *args, **kwargs):
+    """accept no result set"""
+    if rset is None:
+        return 1
+    return 0
+
+@lltrace
+def rset_selector(cls, req, rset, *args, **kwargs):
+    """accept result set, whatever the number of result"""
+    if rset is not None:
+        return 1
+    return 0
+
+@lltrace
+def anyrset_selector(cls, req, rset, *args, **kwargs):
+    """accept any non empty result set"""
+    if rset and rset.rowcount: # XXX if rset is not None and rset.rowcount > 0:
+        return 1
+    return 0
+    
+@lltrace
+def emptyrset_selector(cls, req, rset, *args, **kwargs):
+    """accept empty result set"""
+    if rset is not None and rset.rowcount == 0:
+        return 1
+    return 0
+
+@lltrace
+def onelinerset_selector(cls, req, rset, row=None, *args, **kwargs):
+    """accept result set with a single line of result"""
+    if rset is not None and (row is not None or rset.rowcount == 1):
+        return 1
+    return 0
+
+@lltrace
+def twolinerset_selector(cls, req, rset, *args, **kwargs):
+    """accept result set with at least two lines of result"""
+    if rset is not None and rset.rowcount > 1:
+        return 1
+    return 0
+
+@lltrace
+def twocolrset_selector(cls, req, rset, *args, **kwargs):
+    """accept result set with at least one line and two columns of result"""
+    if rset is not None and rset.rowcount > 0 and len(rset.rows[0]) > 1:
+        return 1
+    return 0
+
+@lltrace
+def largerset_selector(cls, req, rset, *args, **kwargs):
+    """accept result sets with more rows than the page size
+    """
+    if rset is None or len(rset) <= req.property_value('navigation.page-size'):
+        return 0
+    return 1
+
+@lltrace
+def sortedrset_selector(cls, req, rset, row=None, col=None):
+    """accept sorted result set"""
+    rqlst = rset.syntax_tree()
+    if len(rqlst.children) > 1 or not rqlst.children[0].orderby:
+        return 0
+    return 2
+
+@lltrace
+def oneetyperset_selector(cls, req, rset, *args, **kwargs):
+    """accept result set where entities in the first columns are all of the
+    same type
+    """
+    if len(rset.column_types(0)) != 1:
+        return 0
+    return 1
+
+@lltrace
+def multitype_selector(cls, req, rset, **kwargs):
+    """accepts resultsets containing several entity types"""
+    if rset:
+        etypes = rset.column_types(0)
+        if len(etypes) > 1:
+            return 1
+    return 0
+
+@lltrace
+def searchstate_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """extend the anyrset_selector by checking if the current search state
+    is in a .search_states attribute of the wrapped class
+
+    search state should be either 'normal' or 'linksearch' (eg searching for an
+    object to create a relation with another)
+    """
+    try:
+        if not req.search_state[0] in cls.search_states:
+            return 0
+    except AttributeError:
+        return 1 # class don't care about search state, accept it
+    return 1
+
+@lltrace
+def anonymous_selector(cls, req, *args, **kwargs):
+    """accept if user is anonymous"""
+    if req.cnx.anonymous_connection:
+        return 1
+    return 0
+
+@lltrace
+def not_anonymous_selector(cls, req, *args, **kwargs):
+    """accept if user is anonymous"""
+    return not anonymous_selector(cls, req, *args, **kwargs)
+
+
+# not so basic selectors ######################################################
+
+@lltrace
+def req_form_params_selector(cls, req, *args, **kwargs):
+    """check if parameters specified by the form_params attribute on
+    the wrapped class are specified in request form parameters
+    """
+    score = 0
+    for param in cls.form_params:
+        val = req.form.get(param)
+        if not val:
+            return 0
+        score += 1
+    return score + 1
+
+@lltrace
+def kwargs_selector(cls, req, *args, **kwargs):
+    """check if arguments specified by the expected_kwargs attribute on
+    the wrapped class are specified in given named parameters
+    """
+    values = []
+    for arg in cls.expected_kwargs:
+        if not arg in kwargs:
+            return 0
+    return 1
+
+@lltrace
+def etype_form_selector(cls, req, *args, **kwargs):
+    """check etype presence in request form *and* accepts conformance"""
+    if 'etype' not in req.form and 'etype' not in kwargs:
+        return 0
+    try:
+        etype = req.form['etype']
+    except KeyError:
+        etype = kwargs['etype']
+    # value is a list or a tuple if web request form received several
+    # values for etype parameter
+    assert isinstance(etype, basestring), "got multiple etype parameters in req.form"
+    if 'Any' in cls.accepts:
+        return 1
+    # no Any found, we *need* exact match
+    if etype not in cls.accepts:
+        return 0
+    # exact match must return a greater value than 'Any'-match
+    return 2
+
+@lltrace
+def _nfentity_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """accept non final entities
+    if row is not specified, use the first one
+    if col is not specified, use the first one
+    """
+    etype = rset.description[row or 0][col or 0]
+    if etype is None: # outer join
+        return 0
+    if cls.schema.eschema(etype).is_final():
+        return 0
+    return 1
+
+@lltrace
+def _rqlcondition_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """accept single entity result set if the entity match an rql condition
+    """
+    if cls.condition:
+        eid = rset[row or 0][col or 0]
+        if 'U' in frozenset(split_expression(cls.condition)):
+            rql = 'Any X WHERE X eid %%(x)s, U eid %%(u)s, %s' % cls.condition
+        else:
+            rql = 'Any X WHERE X eid %%(x)s, %s' % cls.condition
+        try:
+            return len(req.execute(rql, {'x': eid, 'u': req.user.eid}, 'x'))
+        except Unauthorized:
+            return 0
+        
+    return 1
+
+@lltrace
+def _interface_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """accept uniform result sets, and apply the following rules:
+
+    * wrapped class must have a accepts_interfaces attribute listing the
+      accepted ORed interfaces
+    * if row is None, return the sum of values returned by the method
+      for each entity's class in the result set. If any score is 0,
+      return 0.
+    * if row is specified, return the value returned by the method with
+      the entity's class of this row
+    """
+    score = 0
+    # check 'accepts' to give priority to more specific classes
+    if row is None:
+        for etype in rset.column_types(col or 0):
+            eclass = cls.vreg.etype_class(etype)
+            escore = 0
+            for iface in cls.accepts_interfaces:
+                escore += iface.is_implemented_by(eclass)
+            if not escore:
+                return 0
+            score += escore
+            if eclass.id in getattr(cls, 'accepts', ()):
+                score += 2
+        return score + 1
+    etype = rset.description[row][col or 0]
+    if etype is None: # outer join
+        return 0
+    eclass = cls.vreg.etype_class(etype)
+    for iface in cls.accepts_interfaces:
+        score += iface.is_implemented_by(eclass)
+    if score:
+        if eclass.id in getattr(cls, 'accepts', ()):
+            score += 2
+        else:
+            score += 1
+    return score
+
+@lltrace
+def score_entity_selector(cls, req, rset, row=None, col=None, **kwargs):
+    if row is None:
+        rows = xrange(rset.rowcount)
+    else:
+        rows = (row,)
+    for row in rows:
+        try:
+            score = cls.score_entity(rset.get_entity(row, col or 0))
+        except DummyCursorError:
+            # get a dummy cursor error, that means we are currently
+            # using a dummy rset to list possible views for an entity
+            # type, not for an actual result set. In that case, we
+            # don't care of the value, consider the object as selectable
+            return 1
+        if not score:
+            return 0
+    return 1
+
+@lltrace
+def accept_rset_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """simply delegate to cls.accept_rset method"""
+    return cls.accept_rset(req, rset, row=row, col=col)
+
+@lltrace
+def but_etype_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """restrict the searchstate_accept_one_selector to exclude entity's type
+    refered by the .etype attribute
+    """
+    if rset.description[row or 0][col or 0] == cls.etype:
+        return 0
+    return 1
+
+@lltrace
+def etype_rtype_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """only check if the user has read access on the entity's type refered
+    by the .etype attribute and on the relations's type refered by the
+    .rtype attribute if set.
+    """
+    schema = cls.schema
+    perm = getattr(cls, 'require_permission', 'read')
+    if hasattr(cls, 'etype'):
+        eschema = schema.eschema(cls.etype)
+        if not (eschema.has_perm(req, perm) or eschema.has_local_role(perm)):
+            return 0
+    if hasattr(cls, 'rtype'):
+        if not schema.rschema(cls.rtype).has_perm(req, perm):
+            return 0
+    return 1
+
+@lltrace
+def accept_rtype_selector(cls, req, rset, row=None, col=None, **kwargs):
+    if hasattr(cls, 'rtype'):
+        if row is None:
+            for etype in rset.column_types(col or 0):
+                if not cls.relation_possible(etype):
+                    return 0
+        elif not cls.relation_possible(rset.description[row][col or 0]):
+            return 0
+    return 1
+
+@lltrace
+def one_has_relation_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """check if the user has read access on the relations's type refered by the
+    .rtype attribute of the class, and if at least one entity type in the
+    result set has this relation.
+    """
+    schema = cls.schema
+    perm = getattr(cls, 'require_permission', 'read')
+    if not schema.rschema(cls.rtype).has_perm(req, perm):
+        return 0
+    if row is None:
+        for etype in rset.column_types(col or 0):
+            if cls.relation_possible(etype):
+                return 1
+    elif cls.relation_possible(rset.description[row][col or 0]):
+        return 1
+    return 0
+
+@lltrace
+def in_group_selector(cls, req, rset=None, row=None, col=None, **kwargs):
+    """select according to user's groups"""
+    if not cls.require_groups:
+        return 1
+    user = req.user
+    if user is None:
+        return int('guests' in cls.require_groups)
+    score = 0
+    if 'owners' in cls.require_groups and rset:
+        if row is not None:
+            eid = rset[row][col or 0]
+            if user.owns(eid):
+                score = 1
+        else:
+            score = all(user.owns(r[col or 0]) for r in rset)
+    score += user.matching_groups(cls.require_groups)
+    if score:
+        # add 1 so that an object with one matching group take priority
+        # on an object without require_groups
+        return score + 1 
+    return 0
+
+@lltrace
+def add_etype_selector(cls, req, rset, row=None, col=None, **kwargs):
+    """only check if the user has add access on the entity's type refered
+    by the .etype attribute.
+    """
+    if not cls.schema.eschema(cls.etype).has_perm(req, 'add'):
+        return 0
+    return 1
+
+@lltrace
+def contextprop_selector(cls, req, rset, row=None, col=None, context=None,
+                          **kwargs):
+    propval = req.property_value('%s.%s.context' % (cls.__registry__, cls.id))
+    if not propval:
+        propval = cls.context
+    if context is not None and propval is not None and context != propval:
+        return 0
+    return 1
+
+@lltrace
+def primaryview_selector(cls, req, rset, row=None, col=None, view=None,
+                          **kwargs):
+    if view is not None and not view.is_primary():
+        return 0
+    return 1
+
+
+# compound selectors ##########################################################
+
+nfentity_selector = chainall(anyrset_selector, _nfentity_selector)
+interface_selector = chainall(nfentity_selector, _interface_selector)
+
+accept_selector = chainall(nfentity_selector, accept_rset_selector)
+accept_one_selector = chainall(onelinerset_selector, accept_selector)
+
+rqlcondition_selector = chainall(nfentity_selector,
+                                 onelinerset_selector,
+                                 _rqlcondition_selector)
+
+searchstate_accept_selector = chainall(anyrset_selector, searchstate_selector,
+                                       accept_selector)
+searchstate_accept_one_selector = chainall(anyrset_selector, searchstate_selector,
+                                           accept_selector, rqlcondition_selector)
+searchstate_accept_one_but_etype_selector = chainall(searchstate_accept_one_selector,
+                                                     but_etype_selector)
+
+__all__ = [name for name in globals().keys() if name.endswith('selector')]
+__all__ += ['chainall', 'chainfirst']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/tal.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,256 @@
+"""provides simpleTAL extensions for CubicWeb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import re
+from os.path import exists, isdir, join
+from logging import getLogger
+from StringIO import StringIO
+        
+from simpletal import simpleTAL, simpleTALES
+
+from logilab.common.decorators import cached
+
+LOGGER = getLogger('cubicweb.tal')
+
+
+class LoggerAdapter(object):
+    def __init__(self, tal_logger):
+        self.tal_logger = tal_logger
+        
+    def debug(self, msg):
+        LOGGER.debug(msg)
+
+    def warn(self, msg):
+        LOGGER.warning(msg)
+
+    def __getattr__(self, attrname):
+        return getattr(self.tal_logger, attrname)
+
+
+class CubicWebContext(simpleTALES.Context):
+    """add facilities to access entity / resultset"""
+
+    def __init__(self, options=None, allowPythonPath=1):
+        simpleTALES.Context.__init__(self, options, allowPythonPath)
+        self.log = LoggerAdapter(self.log)
+
+    def update(self, context):
+        for varname, value in context.items():
+            self.addGlobal(varname, value)
+
+    def addRepeat(self, name, var, initialValue):
+        simpleTALES.Context.addRepeat(self, name, var, initialValue)
+
+# XXX FIXME need to find a clean to define OPCODE values for extensions
+I18N_CONTENT = 18  
+I18N_REPLACE = 19
+RQL_EXECUTE  = 20
+# simpleTAL uses the OPCODE values to define priority over commands.
+# TAL_ITER should have the same priority than TAL_REPEAT (i.e. 3), but
+# we can't use the same OPCODE for two different commands without changing
+# the simpleTAL implementation. Another solution would be to totally override
+# the REPEAT implementation with the ITER one, but some specific operations
+# (involving len() for instance) are not implemented for ITER, so we prefer
+# to keep both implementations for now, and to fool simpleTAL by using a float
+# number between 3 and 4
+TAL_ITER     = 3.1
+
+
+# FIX simpleTAL HTML 4.01 stupidity
+# (simpleTAL never closes tags like INPUT, IMG, HR ...)
+simpleTAL.HTML_FORBIDDEN_ENDTAG.clear()
+
+class CubicWebTemplateCompiler(simpleTAL.HTMLTemplateCompiler):
+    """extends default compiler by adding i18n:content commands"""
+
+    def __init__(self):
+        simpleTAL.HTMLTemplateCompiler.__init__(self)
+        self.commandHandler[I18N_CONTENT] = self.compile_cmd_i18n_content
+        self.commandHandler[I18N_REPLACE] = self.compile_cmd_i18n_replace
+        self.commandHandler[RQL_EXECUTE] = self.compile_cmd_rql
+        self.commandHandler[TAL_ITER] = self.compile_cmd_tal_iter
+
+    def setTALPrefix(self, prefix):
+        simpleTAL.TemplateCompiler.setTALPrefix(self, prefix)
+        self.tal_attribute_map['i18n:content'] = I18N_CONTENT
+        self.tal_attribute_map['i18n:replace'] = I18N_REPLACE
+        self.tal_attribute_map['rql:execute'] = RQL_EXECUTE
+        self.tal_attribute_map['tal:iter'] = TAL_ITER
+
+    def compile_cmd_i18n_content(self, argument):
+        # XXX tal:content structure=, text= should we support this ?
+        structure_flag = 0
+        return (I18N_CONTENT, (argument, False, structure_flag, self.endTagSymbol))
+
+    def compile_cmd_i18n_replace(self, argument):
+        # XXX tal:content structure=, text= should we support this ?
+        structure_flag = 0
+        return (I18N_CONTENT, (argument, True, structure_flag, self.endTagSymbol))
+
+    def compile_cmd_rql(self, argument):
+        return (RQL_EXECUTE, (argument, self.endTagSymbol))
+
+    def compile_cmd_tal_iter(self, argument):
+        original_id, (var_name, expression, end_tag_symbol) = \
+                     simpleTAL.HTMLTemplateCompiler.compileCmdRepeat(self, argument)
+        return (TAL_ITER, (var_name, expression, self.endTagSymbol))
+
+    def getTemplate(self):
+        return CubicWebTemplate(self.commandList, self.macroMap, self.symbolLocationTable)
+
+    def compileCmdAttributes (self, argument):
+        """XXX modified to support single attribute
+        definition ending by a ';'
+
+        backport this to simpleTAL
+        """
+        # Compile tal:attributes into attribute command
+        # Argument: [(attributeName, expression)]
+        
+        # Break up the list of attribute settings first
+        commandArgs = []
+        # We only want to match semi-colons that are not escaped
+        argumentSplitter =  re.compile(r'(?<!;);(?!;)')
+        for attributeStmt in argumentSplitter.split(argument):
+            if not attributeStmt.strip():
+                continue
+            #  remove any leading space and un-escape any semi-colons
+            attributeStmt = attributeStmt.lstrip().replace(';;', ';')
+            # Break each attributeStmt into name and expression
+            stmtBits = attributeStmt.split(' ')
+            if (len (stmtBits) < 2):
+                # Error, badly formed attributes command
+                msg = "Badly formed attributes command '%s'.  Attributes commands must be of the form: 'name expression[;name expression]'" % argument
+                self.log.error(msg)
+                raise simpleTAL.TemplateParseException(self.tagAsText(self.currentStartTag), msg)
+            attName = stmtBits[0]
+            attExpr = " ".join(stmtBits[1:])
+            commandArgs.append((attName, attExpr))
+        return (simpleTAL.TAL_ATTRIBUTES, commandArgs)
+
+
+class CubicWebTemplateInterpreter(simpleTAL.TemplateInterpreter):
+    """provides implementation for interpreting cubicweb extensions"""
+    def __init__(self):
+        simpleTAL.TemplateInterpreter.__init__(self)
+        self.commandHandler[I18N_CONTENT] = self.cmd_i18n
+        self.commandHandler[TAL_ITER] = self.cmdRepeat
+        # self.commandHandler[RQL_EXECUTE] = self.cmd_rql
+
+    def cmd_i18n(self, command, args):
+        """i18n:content and i18n:replace implementation"""
+        string, replace_flag, structure_flag, end_symbol = args
+        if replace_flag:
+            self.outputTag = 0
+        result = self.context.globals['_'](string)
+        self.tagContent = (0, result)
+        self.movePCForward = self.symbolTable[end_symbol]
+        self.programCounter += 1
+
+
+class CubicWebTemplate(simpleTAL.HTMLTemplate):
+    """overrides HTMLTemplate.expand() to systematically use CubicWebInterpreter
+    """
+    def expand(self, context, outputFile):
+        interpreter = CubicWebTemplateInterpreter()
+        interpreter.initialise(context, outputFile)
+        simpleTAL.HTMLTemplate.expand(self, context, outputFile,# outputEncoding='unicode',
+                                      interpreter=interpreter)
+
+    def expandInline(self, context, outputFile, interpreter):
+        """ Internally used when expanding a template that is part of a context."""
+        try:
+            interpreter.execute(self)
+        except UnicodeError, unierror:
+            LOGGER.exception(str(unierror))
+            raise simpleTALES.ContextContentException("found non-unicode %r string in Context!" % unierror.args[1]), None, sys.exc_info()[-1]
+
+
+def compile_template(template):
+    """compiles a TAL template string
+    :type template: unicode
+    :param template: a TAL-compliant template string
+    """
+    string_buffer = StringIO(template)
+    compiler = CubicWebTemplateCompiler()
+    compiler.parseTemplate(string_buffer) # , inputEncoding='unicode')
+    return compiler.getTemplate()
+
+
+def compile_template_file(filepath):
+    """compiles a TAL template file
+    :type filepath: str
+    :param template: path of the file to compile 
+    """
+    fp = file(filepath)
+    file_content = unicode(fp.read()) # template file should be pure ASCII
+    fp.close()
+    return compile_template(file_content)
+
+
+def evaluatePython (self, expr):
+    if not self.allowPythonPath:
+        return self.false
+    globals = {}
+    for name, value in self.globals.items():
+        if isinstance (value, simpleTALES.ContextVariable):
+            value = value.rawValue()
+        globals[name] = value
+    globals['path'] = self.pythonPathFuncs.path
+    globals['string'] = self.pythonPathFuncs.string
+    globals['exists'] = self.pythonPathFuncs.exists
+    globals['nocall'] = self.pythonPathFuncs.nocall
+    globals['test'] = self.pythonPathFuncs.test
+    locals = {}
+    for name, value in self.locals.items():
+        if (isinstance (value, simpleTALES.ContextVariable)):
+            value = value.rawValue()
+        locals[name] = value
+    # XXX precompile expr will avoid late syntax error
+    try:
+        result = eval(expr, globals, locals)
+    except Exception, ex:
+        ex = ex.__class__('in %r: %s' % (expr, ex))
+        raise ex, None, sys.exc_info()[-1]
+    if (isinstance (result, simpleTALES.ContextVariable)):
+        return result.value()
+    return result
+
+simpleTALES.Context.evaluatePython = evaluatePython
+
+
+class talbased(object):
+    def __init__(self, filename, write=True):
+##         if not osp.isfile(filepath):
+##             # print "[tal.py] just for tests..."
+##             # get parent frame
+##             directory = osp.abspath(osp.dirname(sys._getframe(1).f_globals['__file__']))
+##             filepath = osp.join(directory, filepath)
+        self.filename = filename
+        self.write = write
+
+    def __call__(self, viewfunc):
+        def wrapped(instance, *args, **kwargs):
+            variables = viewfunc(instance, *args, **kwargs)
+            html = instance.tal_render(self._compiled_template(instance), variables)
+            if self.write:
+                instance.w(html)
+            else:
+                return html
+        return wrapped
+
+    def _compiled_template(self, instance):
+        for fileordirectory in instance.config.vregistry_path():
+            filepath = join(fileordirectory, self.filename)
+            if isdir(fileordirectory) and exists(filepath):
+                return compile_template_file(filepath)
+        raise Exception('no such template %s' % self.filename)
+    _compiled_template = cached(_compiled_template, 0)
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+efile, eclasstags
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/entities.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+from cubicweb.entities import AnyEntity, fetch_config
+
+class Personne(AnyEntity):
+    """customized class forne Person entities"""
+    id = 'Personne'
+    fetch_attrs, fetch_order = fetch_config(['nom', 'prenom'])
+    rest_attr = 'nom'
+
+
+class Societe(AnyEntity):
+    id = 'Societe'
+    fetch_attrs = ('nom',)
+    
+class AnotherNote(AnyEntity):
+    id = 'AnotherNote'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.0.3_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.0.4_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.1.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.1.0_common.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""common to all configuration"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.1.0_repository.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""repository specific"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.1.0_web.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""web only"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/0.1.2_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/migration/depends.map	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+0.0.2: 2.3.0
+0.0.3: 2.4.0
+# missing 0.0.4 entry, that's alright
+0.1.0: 2.6.0
+0.1.2: 2.10.0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/Affaire.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+sujet varchar(128)
+ref   varchar(12)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/Note.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+from cubicweb.schema import format_constraint
+
+class AnotherNote(EntityType):
+    descr_format = String(meta=True, internationalizable=True,
+                                default='text/rest', constraints=[format_constraint])
+    descr = String(fulltextindexed=True,
+                   description=_('more detailed description'))
+    descr2_format = String(meta=True, internationalizable=True,
+                                default='text/rest', constraints=[format_constraint])
+    descr2 = String(fulltextindexed=True,
+                    description=_('more detailed description'))
+    
+
+class SubNote(AnotherNote):
+    __specializes_schema__ = True
+    descr3 = String()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/Note.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+date varchar(10)
+type char(1)
+para varchar(512)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/Personne.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+nom    ivarchar(64) NOT NULL
+prenom ivarchar(64)
+sexe   char(1) DEFAULT 'M' 
+promo  choice('bon','pasbon')
+titre  ivarchar(128)
+adel   varchar(128)
+ass    varchar(128)
+web    varchar(128)
+tel    integer
+fax    integer
+datenaiss datetime
+test   boolean 
+description text
+salary float
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/Societe.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+nom  ivarchar(64)
+web varchar(128)
+tel  integer
+fax  integer
+rncs varchar(32)
+ad1  varchar(128)
+ad2  varchar(128)
+ad3  varchar(128)
+cp   varchar(12)
+ville varchar(32)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/schema/relations.rel	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+Personne travaille Societe
+Personne evaluee Note
+EUser evaluee Note
+Societe evaluee Note
+Personne concerne Affaire
+Affaire concerne Societe
+Personne evaluee Personne
+
+Note ecrit_par Personne inline CONSTRAINT E concerns P, X version_of P
+Personne connait Personne symetric
+
+Tag tags Note
+Tag tags Personne
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/data/server_migration/bootstrapmigration_repository.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""allways executed before all others in server migration"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_entity.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,477 @@
+# -*- coding: utf-8 -*-
+"""unit tests for cubicweb.web.views.entities module"""
+
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from mx.DateTime import DateTimeType, now
+from cubicweb import Binary
+
+class EntityTC(EnvBasedTC):
+
+##     def setup_database(self):
+##         self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+##         self.add_entity('Task', title=u'fait ca !', description=u'et plus vite', start=now())
+##         self.add_entity('Tag', name=u'x')
+##         self.add_entity('Link', title=u'perdu', url=u'http://www.perdu.com',
+##                         embed=False)
+    
+    def test_boolean_value(self):
+        e = self.etype_instance('Tag')
+        self.failUnless(e)
+
+    def test_yams_inheritance(self):
+        from entities import AnotherNote
+        e = self.etype_instance('SubNote')
+        self.assertIsInstance(e, AnotherNote)
+        e2 = self.etype_instance('SubNote')
+        self.assertIs(e.__class__, e2.__class__)
+
+    def test_has_eid(self):
+        e = self.etype_instance('Tag')
+        self.assertEquals(e.eid, None)
+        self.assertEquals(e.has_eid(), False)
+        e.eid = 'X'
+        self.assertEquals(e.has_eid(), False)
+        e.eid = 0
+        self.assertEquals(e.has_eid(), True)
+        e.eid = 2
+        self.assertEquals(e.has_eid(), True)
+        
+    def test_copy(self):
+        self.add_entity('Tag', name=u'x')
+        p = self.add_entity('Personne', nom=u'toto')
+        oe = self.add_entity('Note', type=u'x')
+        self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
+                     {'t': oe.eid, 'u': p.eid}, ('t','u'))
+        self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x')
+        e = self.add_entity('Note', type=u'z')
+        e.copy_relations(oe.eid)
+        self.assertEquals(len(e.ecrit_par), 1)
+        self.assertEquals(e.ecrit_par[0].eid, p.eid)
+        self.assertEquals(len(e.reverse_tags), 0)
+        
+    def test_copy_with_nonmeta_composite_inlined(self):
+        p = self.add_entity('Personne', nom=u'toto')
+        oe = self.add_entity('Note', type=u'x')
+        self.schema['ecrit_par'].set_rproperty('Note', 'Personne', 'composite', 'subject')
+        self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
+                     {'t': oe.eid, 'u': p.eid}, ('t','u'))
+        e = self.add_entity('Note', type=u'z')
+        e.copy_relations(oe.eid)
+        self.failIf(e.ecrit_par)
+        self.failUnless(oe.ecrit_par)
+            
+    def test_copy_with_composite(self):
+        user = self.user()
+        adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
+        e = self.entity('Any X WHERE X eid %(x)s', {'x':user.eid}, 'x')
+        self.assertEquals(e.use_email[0].address, "toto@logilab.org")
+        self.assertEquals(e.use_email[0].eid, adeleid)
+        usereid = self.execute('INSERT EUser X: X login "toto", X upassword "toto", X in_group G, X in_state S '
+                               'WHERE G name "users", S name "activated"')[0][0]
+        e = self.entity('Any X WHERE X eid %(x)s', {'x':usereid}, 'x')
+        e.copy_relations(user.eid)
+        self.failIf(e.use_email)
+        self.failIf(e.primary_email)
+        
+    def test_copy_with_non_initial_state(self):
+        user = self.user()
+        eid = self.execute('INSERT EUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"',
+                           {'pwd': 'toto'})[0][0]
+        self.commit()
+        self.execute('SET X in_state S WHERE X eid %(x)s, S name "deactivated"', {'x': eid}, 'x')
+        self.commit()
+        eid2 = self.execute('INSERT EUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0]
+        e = self.entity('Any X WHERE X eid %(x)s', {'x': eid2}, 'x')
+        e.copy_relations(eid)
+        self.commit()
+        e.clear_related_cache('in_state', 'subject')
+        self.assertEquals(e.state, 'activated')
+
+    def test_related_cache_both(self):
+        user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x')
+        adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
+        self.commit()
+        self.assertEquals(user._related_cache.keys(), [])
+        email = user.primary_email[0]
+        self.assertEquals(sorted(user._related_cache), ['primary_email_subject'])
+        self.assertEquals(email._related_cache.keys(), ['primary_email_object'])
+        groups = user.in_group
+        self.assertEquals(sorted(user._related_cache), ['in_group_subject', 'primary_email_subject'])
+        for group in groups:
+            self.failIf('in_group_subject' in group._related_cache, group._related_cache.keys())
+            
+    def test_related_limit(self):
+        p = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+        for tag in u'abcd':
+            self.add_entity('Tag', name=tag)
+        self.execute('SET X tags Y WHERE X is Tag, Y is Personne')
+        self.assertEquals(len(p.related('tags', 'object', limit=2)), 2)
+        self.assertEquals(len(p.related('tags', 'object')), 4)
+
+        
+    def test_fetch_rql(self):
+        user = self.user()
+        Personne = self.vreg.etype_class('Personne')
+        Societe = self.vreg.etype_class('Societe')
+        Note = self.vreg.etype_class('Note')
+        peschema = Personne.e_schema
+        seschema = Societe.e_schema
+        peschema.subject_relation('travaille').set_rproperty(peschema, seschema, 'cardinality', '1*')
+        peschema.subject_relation('connait').set_rproperty(peschema, peschema, 'cardinality', '11')
+        peschema.subject_relation('evaluee').set_rproperty(peschema, Note.e_schema, 'cardinality', '1*')
+        seschema.subject_relation('evaluee').set_rproperty(seschema, Note.e_schema, 'cardinality', '1*')
+        # testing basic fetch_attrs attribute
+        self.assertEquals(Personne.fetch_rql(user),
+                          'Any X,AA,AB,AC ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB, X modification_date AC')
+        pfetch_attrs = Personne.fetch_attrs
+        sfetch_attrs = Societe.fetch_attrs
+        try:
+            # testing unknown attributes
+            Personne.fetch_attrs = ('bloug', 'beep')
+            self.assertEquals(Personne.fetch_rql(user), 'Any X WHERE X is Personne')            
+            # testing one non final relation
+            Personne.fetch_attrs = ('nom', 'prenom', 'travaille')
+            self.assertEquals(Personne.fetch_rql(user),
+                              'Any X,AA,AB,AC,AD ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB, X travaille AC, AC nom AD')
+            # testing two non final relations
+            Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee')
+            self.assertEquals(Personne.fetch_rql(user),
+                              'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ASC,AF DESC WHERE X is Personne, X nom AA, '
+                              'X prenom AB, X travaille AC, AC nom AD, X evaluee AE, AE modification_date AF')
+            # testing one non final relation with recursion
+            Personne.fetch_attrs = ('nom', 'prenom', 'travaille')
+            Societe.fetch_attrs = ('nom', 'evaluee')
+            self.assertEquals(Personne.fetch_rql(user),
+                              'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ASC,AF DESC WHERE X is Personne, X nom AA, X prenom AB, '
+                              'X travaille AC, AC nom AD, AC evaluee AE, AE modification_date AF'
+                              )
+            # testing symetric relation
+            Personne.fetch_attrs = ('nom', 'connait')
+            self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC WHERE X is Personne, X nom AA, X connait AB')
+            # testing optional relation
+            peschema.subject_relation('travaille').set_rproperty(peschema, seschema, 'cardinality', '?*')
+            Personne.fetch_attrs = ('nom', 'prenom', 'travaille')
+            Societe.fetch_attrs = ('nom',)
+            self.assertEquals(Personne.fetch_rql(user),
+                              'Any X,AA,AB,AC,AD ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD')
+            # testing relation with cardinality > 1
+            peschema.subject_relation('travaille').set_rproperty(peschema, seschema, 'cardinality', '**')
+            self.assertEquals(Personne.fetch_rql(user),
+                              'Any X,AA,AB ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB')
+            # XXX test unauthorized attribute
+        finally:
+            Personne.fetch_attrs = pfetch_attrs
+            Societe.fetch_attrs = sfetch_attrs
+
+            
+    def test_entity_unrelated(self):
+        p = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+        e = self.add_entity('Tag', name=u'x')
+        rschema = e.e_schema.subject_relation('tags')
+        related = [r.eid for r in e.tags]
+        self.failUnlessEqual(related, [])
+        unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+        self.failUnless(p.eid in unrelated)
+        self.execute('SET X tags Y WHERE X is Tag, Y is Personne')
+        e = self.entity('Any X WHERE X is Tag')
+        unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+        self.failIf(p.eid in unrelated)
+
+    def test_entity_unrelated_limit(self):
+        e = self.add_entity('Tag', name=u'x')
+        self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+        self.add_entity('Personne', nom=u'di mascio', prenom=u'gwen')
+        rschema = e.e_schema.subject_relation('tags')
+        self.assertEquals(len(e.vocabulary(rschema, 'subject', limit=1)),
+                          1)
+        
+    def test_new_entity_unrelated(self):
+        e = self.etype_instance('EUser')
+        rschema = e.e_schema.subject_relation('in_group')
+        unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+        # should be default groups but owners, i.e. managers, users, guests
+        self.assertEquals(len(unrelated), 3)
+
+
+    def test_rtags_expansion(self):
+        from cubicweb.entities import AnyEntity
+        class Personne(AnyEntity):
+            id = 'Personne'
+            __rtags__ = {
+                ('travaille', 'Societe', 'subject') : set(('primary',)),
+                ('evaluee', '*', 'subject') : set(('secondary',)),
+                'ecrit_par' : set(('inlineview',)),
+                }
+        self.vreg.register_vobject_class(Personne)
+        rtags = Personne.rtags
+        self.assertEquals(rtags.get_tags('evaluee', 'Note', 'subject'), set(('secondary', 'link')))
+        self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), False)
+        self.assertEquals(rtags.get_tags('evaluee', 'Personne', 'subject'), set(('secondary', 'link')))
+        self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False)
+        self.assertEquals(rtags.get_tags('ecrit_par', 'Note', 'object'), set(('inlineview', 'link')))
+        self.assertEquals(rtags.is_inlined('ecrit_par', 'Note', 'object'), True)
+        class Personne2(Personne):
+            id = 'Personne'
+            __rtags__ = {
+                ('evaluee', 'Note', 'subject') : set(('inlineview',)),
+                }
+        self.vreg.register_vobject_class(Personne2)
+        rtags = Personne2.rtags
+        self.assertEquals(rtags.get_tags('evaluee', 'Note', 'subject'), set(('inlineview', 'link')))
+        self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), True)
+        self.assertEquals(rtags.get_tags('evaluee', 'Personne', 'subject'), set(('secondary', 'link')))
+        self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False)
+    
+    def test_relations_by_category(self):
+        e = self.etype_instance('EUser')
+        def rbc(iterable):
+            return [(rschema.type, x) for rschema, tschemas, x in iterable]
+        self.assertEquals(rbc(e.relations_by_category('primary')),
+                          [('login', 'subject'), ('upassword', 'subject'),
+                           ('in_group', 'subject'), ('in_state', 'subject'),
+                           ('eid', 'subject'),])
+        # firstname and surname are put in secondary category in views.entities.EUserEntity
+        self.assertListEquals(rbc(e.relations_by_category('secondary')),
+                              [('firstname', 'subject'), ('surname', 'subject')])
+        self.assertListEquals(rbc(e.relations_by_category('generic')),
+                              [('primary_email', 'subject'),
+                               ('evaluee', 'subject'),
+                               ('for_user', 'object'),
+                               ('bookmarked_by', 'object')])
+        # owned_by is defined both as subject and object relations on EUser
+        self.assertListEquals(rbc(e.relations_by_category('generated')),
+                              [('last_login_time', 'subject'),
+                               ('created_by', 'subject'),
+                               ('creation_date', 'subject'),
+                               ('is', 'subject'),
+                               ('is_instance_of', 'subject'),
+                               ('modification_date', 'subject'),
+                               ('owned_by', 'subject'),
+                               ('created_by', 'object'),
+                               ('wf_info_for', 'object'),
+                               ('owned_by', 'object')])
+        e = self.etype_instance('Personne')
+        self.assertListEquals(rbc(e.relations_by_category('primary')),
+                              [('nom', 'subject'), ('eid', 'subject')])
+        self.assertListEquals(rbc(e.relations_by_category('secondary')),
+                              [('prenom', 'subject'),
+                               ('sexe', 'subject'),
+                               ('promo', 'subject'),
+                               ('titre', 'subject'),
+                               ('adel', 'subject'),
+                               ('ass', 'subject'),
+                               ('web', 'subject'),
+                               ('tel', 'subject'),
+                               ('fax', 'subject'),
+                               ('datenaiss', 'subject'),
+                               ('test', 'subject'),
+                               ('description', 'subject'),
+                               ('salary', 'subject')])
+        self.assertListEquals(rbc(e.relations_by_category('generic')),
+                              [('concerne', 'subject'),
+                               ('connait', 'subject'),
+                               ('evaluee', 'subject'),
+                               ('travaille', 'subject'),
+                               ('ecrit_par', 'object'),
+                               ('evaluee', 'object'),
+                               ('tags', 'object')])
+        self.assertListEquals(rbc(e.relations_by_category('generated')),
+                              [('created_by', 'subject'),
+                               ('creation_date', 'subject'),
+                               ('is', 'subject'),
+                               ('is_instance_of', 'subject'),
+                               ('modification_date', 'subject'),
+                               ('owned_by', 'subject')])
+        
+
+    def test_printable_value_string(self):
+        e = self.add_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`',
+                            content_format=u'text/rest')
+        self.assertEquals(e.printable_value('content'),
+                          '<p>du <a class="reference" href="http://testing.fr/cubicweb/egroup/managers">*ReST*</a></p>\n')
+        e['content'] = 'du <em>html</em> <ref rql="EUser X">users</ref>'
+        e['content_format'] = 'text/html'
+        self.assertEquals(e.printable_value('content'),
+                          'du <em>html</em> <a href="http://testing.fr/cubicweb/view?rql=EUser%20X">users</a>')
+        e['content'] = 'du *texte*'
+        e['content_format'] = 'text/plain'
+        self.assertEquals(e.printable_value('content'),
+                          '<p>\ndu *texte*\n</p>')
+        e['title'] = 'zou'
+        e['content'] = '<h1 tal:content="self/title">titre</h1>'
+        e['content_format'] = 'text/cubicweb-page-template'
+        self.assertEquals(e.printable_value('content'),
+                          '<h1>zou</h1>')
+        
+        #e = self.etype_instance('Task')
+        e['content'] = '''\
+a title
+=======
+du :eid:`1:*ReST*`'''
+        e['content_format'] = 'text/rest'
+        self.assertEquals(e.printable_value('content', format='text/plain'),
+                          e['content'])
+
+        e['content'] = u'<b>yo (zou éà ;)</b>'
+        e['content_format'] = 'text/html'
+        self.assertEquals(e.printable_value('content', format='text/plain').strip(),
+                          u'**yo (zou éà ;)**')
+
+    def test_printable_value_bytes(self):
+        e = self.add_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python',
+                            data_encoding=u'ascii', name=u'toto.py')
+        from cubicweb.common import mttransforms
+        if mttransforms.HAS_PYGMENTS_TRANSFORMS:
+            self.assertEquals(e.printable_value('data'),
+                              '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mf">1</span>
+</pre></div>
+''')
+        else:
+            self.assertEquals(e.printable_value('data'),
+                              '''<pre class="python">
+<span style="color: #C00000;">lambda</span> <span style="color: #000000;">x</span><span style="color: #0000C0;">:</span> <span style="color: #0080C0;">1</span>
+</pre>
+''')
+        
+        e = self.add_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest',
+                            data_encoding=u'utf-8', name=u'toto.txt')
+        self.assertEquals(e.printable_value('data'),
+                          u'<p><em>héhéhé</em></p>\n')
+
+    def test_printable_value_bad_html(self):
+        """make sure we don't crash if we try to render invalid XHTML strings"""
+        e = self.add_entity('Card', title=u'bad html', content=u'<div>R&D<br>',
+                            content_format=u'text/html')
+        tidy = lambda x: x.replace('\n', '')
+        self.assertEquals(tidy(e.printable_value('content')),
+                          '<div>R&amp;D<br/></div>')
+        e['content'] = u'yo !! R&D <div> pas fermé'
+        self.assertEquals(tidy(e.printable_value('content')),
+                          u'yo !! R&amp;D <div> pas fermé</div>')
+        e['content'] = u'R&D'
+        self.assertEquals(tidy(e.printable_value('content')), u'R&amp;D')
+        e['content'] = u'R&D;'
+        self.assertEquals(tidy(e.printable_value('content')), u'R&amp;D;')
+        e['content'] = u'yo !! R&amp;D <div> pas fermé'
+        self.assertEquals(tidy(e.printable_value('content')),
+                          u'yo !! R&amp;D <div> pas fermé</div>')
+        e['content'] = u'été <div> été'
+        self.assertEquals(tidy(e.printable_value('content')),
+                          u'été <div> été</div>')
+        e['content'] = u'C&apos;est un exemple s&eacute;rieux'
+        self.assertEquals(tidy(e.printable_value('content')),
+                          u"C'est un exemple sérieux")
+        # make sure valid xhtml is left untouched
+        e['content'] = u'<div>R&amp;D<br/></div>'
+        self.assertEquals(e.printable_value('content'), e['content'])
+        e['content'] = u'<div>été</div>'
+        self.assertEquals(e.printable_value('content'), e['content'])
+        e['content'] = u'été'
+        self.assertEquals(e.printable_value('content'), e['content'])
+        
+
+    def test_entity_formatted_attrs(self):
+        e = self.etype_instance('Note')
+        self.assertEquals(e.formatted_attrs(), [])
+        e = self.etype_instance('File')
+        self.assertEquals(e.formatted_attrs(), ['description'])
+        e = self.etype_instance('AnotherNote')
+        self.assertEquals(e.formatted_attrs(), ['descr', 'descr2'])
+        
+        
+    def test_fulltextindex(self):
+        e = self.etype_instance('File')
+        e['name'] = 'an html file'
+        e['description'] = 'du <em>html</em>'
+        e['description_format'] = 'text/html'
+        e['data'] = Binary('some <em>data</em>')
+        e['data_format'] = 'text/html'
+        e['data_encoding'] = 'ascii'
+        self.assertEquals(set(e.get_words()), 
+                          set(['an', 'html', 'file', 'du', 'html', 'some', 'data']))
+
+        
+    def test_nonregr_relation_cache(self):
+        p1 = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+        p2 = self.add_entity('Personne', nom=u'toto')
+        self.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"')
+        self.assertEquals(p1.evaluee[0].nom, "toto")
+        self.failUnless(not p1.reverse_evaluee)
+        
+    def test_complete_relation(self):
+        self.execute('SET RT add_permission G WHERE RT name "wf_info_for", G name "managers"')
+        self.commit()
+        try:
+            eid = self.execute('INSERT TrInfo X: X comment "zou", X wf_info_for U,'
+                               'X from_state S1, X to_state S2 WHERE '
+                               'U login "admin", S1 name "activated", S2 name "deactivated"')[0][0]
+            trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+            trinfo.complete()
+            self.failUnless(trinfo.relation_cached('from_state', 'subject'))
+            self.failUnless(trinfo.relation_cached('to_state', 'subject'))
+            self.failUnless(trinfo.relation_cached('wf_info_for', 'subject'))
+            # check with a missing relation
+            eid = self.execute('INSERT TrInfo X: X comment "zou", X wf_info_for U,'
+                               'X to_state S2 WHERE '
+                               'U login "admin", S2 name "activated"')[0][0]
+            trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+            trinfo.complete()
+            self.failUnless(isinstance(trinfo.creation_date, DateTimeType))
+            self.failUnless(trinfo.relation_cached('from_state', 'subject'))
+            self.failUnless(trinfo.relation_cached('to_state', 'subject'))
+            self.failUnless(trinfo.relation_cached('wf_info_for', 'subject'))
+            self.assertEquals(trinfo.from_state, [])
+        finally:
+            self.rollback()
+            self.execute('DELETE RT add_permission G WHERE RT name "wf_info_for", G name "managers"')
+            self.commit()
+
+    def test_request_cache(self):
+        req = self.request()
+        user = self.entity('EUser X WHERE X login "admin"', req=req)
+        state = user.in_state[0]
+        samestate = self.entity('State X WHERE X name "activated"', req=req)
+        self.failUnless(state is samestate)
+
+    def test_rest_path(self):
+        note = self.add_entity('Note', type=u'z')
+        self.assertEquals(note.rest_path(), 'note/%s' % note.eid)
+        # unique attr
+        tag = self.add_entity('Tag', name=u'x')
+        self.assertEquals(tag.rest_path(), 'tag/x')
+        # test explicit rest_attr
+        person = self.add_entity('Personne', prenom=u'john', nom=u'doe')
+        self.assertEquals(person.rest_path(), 'personne/doe')
+        # ambiguity test
+        person2 = self.add_entity('Personne', prenom=u'remi', nom=u'doe')
+        self.assertEquals(person.rest_path(), 'personne/eid/%s' % person.eid)
+        self.assertEquals(person2.rest_path(), 'personne/eid/%s' % person2.eid)
+        # unique attr with None value (wikiid in this case)
+        card1 = self.add_entity('Card', title=u'hop')
+        self.assertEquals(card1.rest_path(), 'card/eid/%s' % card1.eid)
+        card2 = self.add_entity('Card', title=u'pod', wikiid=u'zob/i')
+        self.assertEquals(card2.rest_path(), 'card/zob%2Fi')
+
+    def test_set_attributes(self):
+        person = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+        self.assertEquals(person.prenom, u'adrien')
+        self.assertEquals(person.nom, u'di mascio')
+        person.set_attributes(prenom=u'sylvain', nom=u'thénault')
+        person = self.entity('Personne P') # XXX retreival needed ?
+        self.assertEquals(person.prenom, u'sylvain')
+        self.assertEquals(person.nom, u'thénault')
+
+    def test_metainformation(self):
+        note = self.add_entity('Note', type=u'z')
+        metainf = note.metainformation()
+        self.assertEquals(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
+        self.assertEquals(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid)
+        metainf['source'] = metainf['source'].copy()
+        metainf['source']['base-url']  = 'http://cubicweb2.com/'
+        self.assertEquals(note.absolute_url(), 'http://cubicweb2.com/note/%s' % note.eid)
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_mail.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+"""unit tests for module cubicweb.common.mail"""
+
+import os
+import pwd
+
+from logilab.common.testlib import unittest_main
+from logilab.common.umessage import message_from_string
+
+from cubicweb.devtools.apptest import EnvBasedTC
+from cubicweb.common.mail import format_mail
+
+
+def getlogin():
+    """avoid usinng os.getlogin() because of strange tty / stdin problems
+    (man 3 getlogin)
+    Another solution would be to use $LOGNAME, $USER or $USERNAME
+    """
+    return pwd.getpwuid(os.getuid())[0]
+    
+
+class EmailTC(EnvBasedTC):
+
+    def test_format_mail(self):
+        self.set_option('sender-addr', 'bim@boum.fr')
+        self.set_option('sender-name', 'BimBam')
+        
+        mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'},
+                           ['test@logilab.fr'], u'un petit cöucou', u'bïjour',
+                           config=self.config)
+        self.assertLinesEquals(mail.as_string(), """\
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: base64
+Subject: =?utf-8?q?b=C3=AFjour?=
+From: =?utf-8?q?oim?= <oim@logilab.fr>
+Reply-to: =?utf-8?q?oim?= <oim@logilab.fr>, =?utf-8?q?BimBam?= <bim@boum.fr>
+X-CW: data
+To: test@logilab.fr
+
+dW4gcGV0aXQgY8O2dWNvdQ==
+""")
+        msg = message_from_string(mail.as_string())
+        self.assertEquals(msg.get('subject'), u'bïjour')
+        self.assertEquals(msg.get('from'), u'oim <oim@logilab.fr>')
+        self.assertEquals(msg.get('to'), u'test@logilab.fr')
+        self.assertEquals(msg.get('reply-to'), u'oim <oim@logilab.fr>, BimBam <bim@boum.fr>')
+        self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou')
+
+        
+    def test_format_mail_euro(self):
+        mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'},
+                           ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €')
+        self.assertLinesEquals(mail.as_string(), """\
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: base64
+Subject: =?utf-8?b?YsOvam91ciDigqw=?=
+From: =?utf-8?q?o=C3=AEm?= <oim@logilab.fr>
+Reply-to: =?utf-8?q?o=C3=AEm?= <oim@logilab.fr>
+To: test@logilab.fr
+
+dW4gcGV0aXQgY8O2dWNvdSDigqw=
+""")
+        msg = message_from_string(mail.as_string())
+        self.assertEquals(msg.get('subject'), u'bïjour €')
+        self.assertEquals(msg.get('from'), u'oîm <oim@logilab.fr>')
+        self.assertEquals(msg.get('to'), u'test@logilab.fr')
+        self.assertEquals(msg.get('reply-to'), u'oîm <oim@logilab.fr>')
+        self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou €')
+
+
+    def test_format_mail_from_reply_to(self):
+        # no sender-name, sender-addr in the configuration
+        self.set_option('sender-name', '')
+        self.set_option('sender-addr', '')
+        msg = format_mail({'name': u'', 'email': u''},
+                          ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €',
+                          config=self.config)
+        self.assertEquals(msg.get('from'), u'')
+        self.assertEquals(msg.get('reply-to'), None)
+        msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'},
+                          ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €',
+                          config=self.config)
+        msg = message_from_string(msg.as_string())
+        self.assertEquals(msg.get('from'), u'tutu <tutu@logilab.fr>')
+        self.assertEquals(msg.get('reply-to'), u'tutu <tutu@logilab.fr>')
+        msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'},
+                          ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €')
+        msg = message_from_string(msg.as_string())
+        self.assertEquals(msg.get('from'), u'tutu <tutu@logilab.fr>')
+        self.assertEquals(msg.get('reply-to'), u'tutu <tutu@logilab.fr>')
+        # set sender name and address as expected
+        self.set_option('sender-name', 'cubicweb-test')
+        self.set_option('sender-addr', 'cubicweb-test@logilab.fr') 
+        # anonymous notification: no name and no email specified
+        msg = format_mail({'name': u'', 'email': u''},
+                           ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €',
+                           config=self.config)
+        msg = message_from_string(msg.as_string())
+        self.assertEquals(msg.get('from'), u'cubicweb-test <cubicweb-test@logilab.fr>')
+        self.assertEquals(msg.get('reply-to'), u'cubicweb-test <cubicweb-test@logilab.fr>')
+        # anonymous notification: only email specified
+        msg = format_mail({'email': u'tutu@logilab.fr'},
+                           ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €',
+                           config=self.config)
+        msg = message_from_string(msg.as_string())
+        self.assertEquals(msg.get('from'), u'cubicweb-test <tutu@logilab.fr>')
+        self.assertEquals(msg.get('reply-to'), u'cubicweb-test <tutu@logilab.fr>, cubicweb-test <cubicweb-test@logilab.fr>')
+        # anonymous notification: only name specified
+        msg = format_mail({'name': u'tutu'},
+                          ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €',
+                          config=self.config)
+        msg = message_from_string(msg.as_string())
+        self.assertEquals(msg.get('from'), u'tutu <cubicweb-test@logilab.fr>')
+        self.assertEquals(msg.get('reply-to'), u'tutu <cubicweb-test@logilab.fr>')
+
+
+
+if __name__ == '__main__':
+    unittest_main()
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_migration.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,126 @@
+"""cubicweb.common.migration unit tests"""
+
+from os.path import abspath
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools.apptest import TestEnvironment
+
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.common.migration import migration_files, filter_scripts
+
+
+class Schema(dict):
+    def has_entity(self, e_type):
+        return self.has_key(e_type)
+
+SMIGRDIR = abspath('data/server_migration') + '/'
+TMIGRDIR = abspath('data/migration') + '/'
+
+class MigrTestConfig(TestServerConfiguration):
+    verbosity = 0
+    def migration_scripts_dir(cls):
+        return SMIGRDIR
+
+    def cube_migration_scripts_dir(cls, cube):
+        return TMIGRDIR
+    
+class MigrationToolsTC(TestCase):
+    def setUp(self):
+        self.config = MigrTestConfig('data')
+        from yams.schema import Schema
+        self.config.load_schema = lambda expand_cubes=False: Schema('test')
+        
+    def test_migration_files_base(self):
+        self.assertListEquals(migration_files(self.config, [('cubicweb', (2,3,0), (2,4,0)),
+                                                            ('TEMPLATE', (0,0,2), (0,0,3))]),
+                              [SMIGRDIR+'bootstrapmigration_repository.py',
+                               TMIGRDIR+'0.0.3_Any.py'])
+        self.assertListEquals(migration_files(self.config, [('cubicweb', (2,4,0), (2,5,0)),
+                                                            ('TEMPLATE', (0,0,2), (0,0,3))]),
+                              [SMIGRDIR+'bootstrapmigration_repository.py',
+                               SMIGRDIR+'2.5.0_Any.sql',
+                               TMIGRDIR+'0.0.3_Any.py'])
+        self.assertListEquals(migration_files(self.config, [('cubicweb', (2,5,0), (2,6,0)),
+                                                            ('TEMPLATE', (0,0,3), (0,0,4))]),
+                              [SMIGRDIR+'bootstrapmigration_repository.py',
+                               SMIGRDIR+'2.6.0_Any.sql',
+                               TMIGRDIR+'0.0.4_Any.py'])
+        
+##     def test_migration_files_overlap(self):
+##         self.assertListEquals(migration_files(self.config, (2,4,0), (2,10,2),
+##                                               (0,0,2), (0,1,2)),
+##                               [SMIGRDIR+'bootstrapmigration_repository.py',
+##                                TMIGRDIR+'0.0.3_Any.py',
+##                                TMIGRDIR+'0.0.4_Any.py',
+##                                SMIGRDIR+'2.4.0_2.5.0_Any.sql',
+##                                SMIGRDIR+'2.5.1_2.6.0_Any.sql',
+##                                TMIGRDIR+'0.1.0_Any.py',
+##                                TMIGRDIR+'0.1.0_common.py',
+##                                TMIGRDIR+'0.1.0_repository.py',
+##                                TMIGRDIR+'0.1.2_Any.py',
+##                                SMIGRDIR+'2.10.1_2.10.2_Any.sql'])
+        
+    def test_migration_files_for_mode(self):
+        from cubicweb.server.migractions import ServerMigrationHelper
+        self.assertIsInstance(self.config.migration_handler(), ServerMigrationHelper)
+        from cubicweb.common.migration import MigrationHelper
+        config = CubicWebConfiguration('data')
+        config.verbosity = 0
+        self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper))
+        self.assertIsInstance(config.migration_handler(), MigrationHelper)
+        config = self.config
+        config.__class__.name = 'twisted'
+        self.assertListEquals(migration_files(config, [('TEMPLATE', (0,0,4), (0,1,0))]),
+                              [TMIGRDIR+'0.1.0_common.py',
+                               TMIGRDIR+'0.1.0_web.py'])
+        config.__class__.name = 'repository'
+        self.assertListEquals(migration_files(config, [('TEMPLATE', (0,0,4), (0,1,0))]),
+                              [SMIGRDIR+'bootstrapmigration_repository.py',
+                               TMIGRDIR+'0.1.0_Any.py',
+                               TMIGRDIR+'0.1.0_common.py',
+                               TMIGRDIR+'0.1.0_repository.py'])
+        config.__class__.name = 'all-in-one'
+        self.assertListEquals(migration_files(config, [('TEMPLATE', (0,0,4), (0,1,0))]),
+                              [SMIGRDIR+'bootstrapmigration_repository.py',
+                               TMIGRDIR+'0.1.0_Any.py',
+                               TMIGRDIR+'0.1.0_common.py',
+                               TMIGRDIR+'0.1.0_repository.py',
+                               TMIGRDIR+'0.1.0_web.py'])
+        config.__class__.name = 'repository'
+
+    def test_filter_scripts(self):
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)),
+                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)),
+                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'),
+                               ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)),
+                              [])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'),
+                               ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,10,2)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'),
+                               ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')])
+
+
+from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite
+
+class BaseCreationTC(TestCase):
+
+    def test_db_creation(self):
+        """make sure database can be created"""
+        config = ApptestConfiguration('data')
+        source = config.sources()['system']
+        self.assertEquals(source['db-driver'], 'sqlite')
+        cleanup_sqlite(source['db-name'], removecube=True)
+        init_test_database(driver=source['db-driver'], config=config)
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_rest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,49 @@
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.common.rest import rest_publish
+        
+class RestTC(EnvBasedTC):
+    def context(self):
+        return self.execute('EUser X WHERE X login "admin"').get_entity(0, 0)
+    
+    def test_eid_role(self):
+        context = self.context()
+        self.assertEquals(rest_publish(context, ':eid:`%s`' % context.eid),
+                          '<p><a class="reference" href="http://testing.fr/cubicweb/euser/admin">#%s</a></p>\n' % context.eid)
+        self.assertEquals(rest_publish(context, ':eid:`%s:some text`' %  context.eid),
+                          '<p><a class="reference" href="http://testing.fr/cubicweb/euser/admin">some text</a></p>\n')
+        
+    def test_card_role_create(self):
+        self.assertEquals(rest_publish(self.context(), ':card:`index`'),
+                          '<p><a class="reference" href="http://testing.fr/cubicweb/view?etype=Card&amp;wikiid=index&amp;vid=creation">index</a></p>\n')
+
+    def test_card_role_link(self):
+        self.add_entity('Card', wikiid=u'index', title=u'Site index page', synopsis=u'yo')
+        self.assertEquals(rest_publish(self.context(), ':card:`index`'),
+                          '<p><a class="reference" href="http://testing.fr/cubicweb/card/index">index</a></p>\n')
+
+    def test_bad_rest_no_crash(self):
+        data = rest_publish(self.context(), '''
+| card | implication     |
+--------------------------
+| 1-1  | N1 = N2         |
+| 1-?  | N1 <= N2        |
+| 1-+  | N1 >= N2        |
+| 1-*  | N1>0 => N2>0    |       
+--------------------------
+| ?-?  | N1 # N2         |
+| ?-+  | N1 >= N2        |
+| ?-*  | N1 #  N2        |
+--------------------------
+| +-+  | N1>0 => N2>0 et |
+|      | N2>0 => N1>0    |
+| +-*  | N1>+ => N2>0    |
+--------------------------
+| *-*  | N1#N2           |
+--------------------------
+
+''')
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_uilib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,154 @@
+# -*- coding: utf-8 -*-
+"""unittests for cubicweb.common.uilib"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.tree import Node
+
+from cubicweb.common import uilib
+
+class UILIBTC(TestCase):
+
+
+    def test_remove_tags(self):
+        """make sure remove_tags remove all tags"""
+        data = [
+            ('<h1>Hello</h1>', 'Hello'),
+            ('<h1>Hello <a href="foo/bar"><b>s</b>pam</a></h1>', 'Hello spam'),
+            ('<br>Hello<img src="doh.png"/>', 'Hello'),
+            ('<p></p>', ''),
+            ]
+        for text, expected in data:
+            got = uilib.remove_html_tags(text)
+            self.assertEquals(got, expected)
+
+    def test_safe_cut(self):
+        """tests uilib.safe_cut() behaviour"""
+        data = [
+            ('hello', 'hello'),
+            ('hello world', 'hello...'),
+            ("hell<b>O'</b> world", "hellO..."),
+            ('<h1>hello</h1>', '<h1>hello</h1>'),
+            ]
+        for text, expected in data:
+            got = uilib.safe_cut(text, 8)
+            self.assertEquals(got, expected)
+
+    def test_cut(self):
+        """tests uilib.safe_cut() behaviour"""
+        data = [
+            ('hello', 'hello'),
+            ('hello world', 'hello...'),
+            ("hell<b>O'</b> world", "hell<..."),
+            ]
+        for text, expected in data:
+            got = uilib.cut(text, 8)
+            self.assertEquals(got, expected)
+
+    def test_text_cut_no_text(self):
+        """tests uilib.text_cut() behaviour with no text"""
+        data = [('','')]
+        for text, expected in data:
+            got = uilib.text_cut(text, 8)
+            self.assertEquals(got, expected)
+
+    def test_text_cut_long_text(self):
+        """tests uilib.text_cut() behaviour with long text"""
+        data = [("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+""","""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat.""")]
+        for text, expected in data:
+            got = uilib.text_cut(text, 30)
+            self.assertEquals(got, expected)
+
+    def  test_text_cut_no_point(self):
+        """tests uilib.text_cut() behaviour with no point"""
+        data = [("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum
+Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum
+""","""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi""")]
+        for text, expected in data:
+            got = uilib.text_cut(text, 30)
+            self.assertEquals(got, expected)
+
+    def test_ajax_replace_url(self):
+        # NOTE: for the simplest use cases, we could use doctest
+        arurl = uilib.ajax_replace_url
+        self.assertEquals(arurl('foo', 'Person P'),
+                          "javascript: replacePageChunk('foo', 'Person%20P');")
+        self.assertEquals(arurl('foo', 'Person P', 'oneline'),
+                          "javascript: replacePageChunk('foo', 'Person%20P', 'oneline');")
+        self.assertEquals(arurl('foo', 'Person P', 'oneline', name='bar', age=12),
+                          'javascript: replacePageChunk(\'foo\', \'Person%20P\', \'oneline\', {"age": 12, "name": "bar"});')
+        self.assertEquals(arurl('foo', 'Person P', name='bar', age=12),
+                          'javascript: replacePageChunk(\'foo\', \'Person%20P\', \'null\', {"age": 12, "name": "bar"});')
+
+tree = ('root', (
+    ('child_1_1', (
+    ('child_2_1', ()), ('child_2_2', (
+    ('child_3_1', ()),
+    ('child_3_2', ()),
+    ('child_3_3', ()),
+    )))),
+    ('child_1_2', (('child_2_3', ()),))))
+
+generated_html = """\
+<table class="tree">
+<tr><td class="tree_cell" rowspan="2"><div class="tree_cell">root</div></td><td class="tree_cell_1_1">&nbsp;</td><td class="tree_cell_1_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_1_1</div></td><td class="tree_cell_1_1">&nbsp;</td><td class="tree_cell_1_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_2_1</div></td><td class="tree_cell_0_1">&nbsp;</td><td class="tree_cell_0_2">&nbsp;</td><td rowspan="2">&nbsp;</td></tr>
+<tr><td class="tree_cell_1_3">&nbsp;</td><td class="tree_cell_1_4">&nbsp;</td><td class="tree_cell_1_3">&nbsp;</td><td class="tree_cell_1_4">&nbsp;</td><td class="tree_cell_0_3">&nbsp;</td><td class="tree_cell_0_4">&nbsp;</td></tr>
+<tr><td rowspan="2">&nbsp;</td><td class="tree_cell_2_1">&nbsp;</td><td class="tree_cell_2_2">&nbsp;</td><td rowspan="2">&nbsp;</td><td class="tree_cell_4_1">&nbsp;</td><td class="tree_cell_4_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div id="selected" class="tree_cell">child_2_2</div></td><td class="tree_cell_1_1">&nbsp;</td><td class="tree_cell_1_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_3_1</div></td></tr>
+<tr><td class="tree_cell_2_3">&nbsp;</td><td class="tree_cell_2_4">&nbsp;</td><td class="tree_cell_4_3">&nbsp;</td><td class="tree_cell_4_4">&nbsp;</td><td class="tree_cell_1_3">&nbsp;</td><td class="tree_cell_1_4">&nbsp;</td></tr>
+<tr><td rowspan="2">&nbsp;</td><td class="tree_cell_2_1">&nbsp;</td><td class="tree_cell_2_2">&nbsp;</td><td rowspan="2">&nbsp;</td><td class="tree_cell_0_1">&nbsp;</td><td class="tree_cell_0_2">&nbsp;</td><td rowspan="2">&nbsp;</td><td class="tree_cell_3_1">&nbsp;</td><td class="tree_cell_3_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_3_2</div></td></tr>
+<tr><td class="tree_cell_2_3">&nbsp;</td><td class="tree_cell_2_4">&nbsp;</td><td class="tree_cell_0_3">&nbsp;</td><td class="tree_cell_0_4">&nbsp;</td><td class="tree_cell_3_3">&nbsp;</td><td class="tree_cell_3_4">&nbsp;</td></tr>
+<tr><td rowspan="2">&nbsp;</td><td class="tree_cell_2_1">&nbsp;</td><td class="tree_cell_2_2">&nbsp;</td><td rowspan="2">&nbsp;</td><td class="tree_cell_0_1">&nbsp;</td><td class="tree_cell_0_2">&nbsp;</td><td rowspan="2">&nbsp;</td><td class="tree_cell_4_1">&nbsp;</td><td class="tree_cell_4_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_3_3</div></td></tr>
+<tr><td class="tree_cell_2_3">&nbsp;</td><td class="tree_cell_2_4">&nbsp;</td><td class="tree_cell_0_3">&nbsp;</td><td class="tree_cell_0_4">&nbsp;</td><td class="tree_cell_4_3">&nbsp;</td><td class="tree_cell_4_4">&nbsp;</td></tr>
+<tr><td rowspan="2">&nbsp;</td><td class="tree_cell_4_1">&nbsp;</td><td class="tree_cell_4_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_1_2</div></td><td class="tree_cell_5_1">&nbsp;</td><td class="tree_cell_5_2">&nbsp;</td><td class="tree_cell" rowspan="2"><div class="tree_cell">child_2_3</div></td><td class="tree_cell_0_1">&nbsp;</td><td class="tree_cell_0_2">&nbsp;</td><td rowspan="2">&nbsp;</td></tr>
+<tr><td class="tree_cell_4_3">&nbsp;</td><td class="tree_cell_4_4">&nbsp;</td><td class="tree_cell_5_3">&nbsp;</td><td class="tree_cell_5_4">&nbsp;</td><td class="tree_cell_0_3">&nbsp;</td><td class="tree_cell_0_4">&nbsp;</td></tr>
+</table>\
+"""
+
+def make_tree(tuple):
+    n = Node(tuple[0])
+    for child in tuple[1]:
+        n.append(make_tree(child))
+    return n
+    
+class UIlibHTMLGenerationTC(TestCase):
+    """ a basic tree node, caracterised by an id"""
+    def setUp(self):
+        """ called before each test from this class """        
+        self.o = make_tree(tree)
+
+    def test_generated_html(self):
+        s = uilib.render_HTML_tree(self.o, selected_node="child_2_2")
+        self.assertTextEqual(s, generated_html)
+    
+    
+if __name__ == '__main__':
+    unittest_main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_utils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+"""unit tests for module cubicweb.common.utils"""
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.common.utils import make_uid, UStringIO, SizeConstrainedList
+
+
+class MakeUidTC(TestCase):
+    def test_1(self):
+        self.assertNotEquals(make_uid('xyz'), make_uid('abcd'))
+        self.assertNotEquals(make_uid('xyz'), make_uid('xyz'))
+        
+    def test_2(self):
+        d = {}
+        while len(d)<10000:
+            uid = make_uid('xyz')
+            if d.has_key(uid):
+                self.fail(len(d))
+            d[uid] = 1
+
+        
+class UStringIOTC(TestCase):
+    def test_boolean_value(self):
+        self.assert_(UStringIO())
+
+
+class SizeConstrainedListTC(TestCase):
+
+    def test_append(self):
+        l = SizeConstrainedList(10)
+        for i in xrange(12):
+            l.append(i)
+        self.assertEquals(l, range(2, 12))
+    
+    def test_extend(self):
+        testdata = [(range(5), range(5)),
+                    (range(10), range(10)),
+                    (range(12), range(2, 12)),
+                    ]
+        for extension, expected in testdata:
+            l = SizeConstrainedList(10)
+            l.extend(extension)
+            yield self.assertEquals, l, expected
+
+   
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/uilib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,473 @@
+# -*- coding: utf-8 -*-
+"""user interface libraries
+
+contains some functions designed to help implementation of cubicweb user interface
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import csv
+import decimal
+import locale
+import re
+from urllib import quote as urlquote
+from cStringIO import StringIO
+from xml.parsers.expat import ExpatError
+
+import simplejson
+
+from mx.DateTime import DateTimeType, DateTimeDeltaType
+
+from logilab.common.textutils import unormalize
+
+def ustrftime(date, fmt='%Y-%m-%d'):
+    """like strftime, but returns a unicode string instead of an encoded
+    string which may be problematic with localized date.
+    
+    encoding is guessed by locale.getpreferredencoding()
+    """
+    # date format may depend on the locale
+    encoding = locale.getpreferredencoding(do_setlocale=False) or 'UTF-8'
+    return unicode(date.strftime(fmt), encoding)
+
+
+def rql_for_eid(eid):
+    """return the rql query necessary to fetch entity with the given eid.  This
+    function should only be used to generate link with rql inside, not to give
+    to cursor.execute (in which case you won't benefit from rql cache).
+
+    :Parameters:
+      - `eid`: the eid of the entity we should search
+    :rtype: str
+    :return: the rql query
+    """
+    return 'Any X WHERE X eid %s' % eid
+
+
+def printable_value(req, attrtype, value, props=None, displaytime=True):
+    """return a displayable value (i.e. unicode string)"""
+    if value is None or attrtype == 'Bytes':
+        return u''
+    if attrtype == 'String':
+        # don't translate empty value if you don't want strange results
+        if props is not None and value and props.get('internationalizable'):
+            return req._(value)
+        
+        return value
+    if attrtype == 'Date':
+        return ustrftime(value, req.property_value('ui.date-format'))
+    if attrtype == 'Time':
+        return ustrftime(value, req.property_value('ui.time-format'))
+    if attrtype == 'Datetime':
+        if not displaytime:
+            return ustrftime(value, req.property_value('ui.date-format'))
+        return ustrftime(value, req.property_value('ui.datetime-format'))
+    if attrtype == 'Boolean':
+        if value:
+            return req._('yes')
+        return req._('no')
+    if attrtype == 'Float':
+        value = req.property_value('ui.float-format') % value
+    return unicode(value)
+
+
+# text publishing #############################################################
+
+try:
+    from cubicweb.common.rest import rest_publish # pylint: disable-msg=W0611
+except ImportError:
+    def rest_publish(entity, data):
+        """default behaviour if docutils was not found"""
+        return data
+    
+TAG_PROG = re.compile(r'</?.*?>', re.U)
+def remove_html_tags(text):
+    """Removes HTML tags from text
+
+    >>> remove_html_tags('<td>hi <a href="http://www.google.fr">world</a></td>')
+    'hi world'
+    >>>
+    """
+    return TAG_PROG.sub('', text)
+
+
+REF_PROG = re.compile(r"<ref\s+rql=([\'\"])([^\1]*?)\1\s*>([^<]*)</ref>", re.U)
+def _subst_rql(view, obj):
+    delim, rql, descr = obj.groups()
+    return u'<a href="%s">%s</a>' % (view.build_url(rql=rql), descr)
+
+def html_publish(view, text):
+    """replace <ref rql=''> links by <a href="...">"""
+    if not text:
+        return u''
+    return REF_PROG.sub(lambda obj, view=view:_subst_rql(view, obj), text)
+
+try:
+    from lxml import etree
+except ImportError:
+    # gae environment: lxml not availabel
+    
+    def soup2xhtml(data, encoding):
+        return data
+    
+else:
+
+    def soup2xhtml(data, encoding):
+        """tidy (at least try) html soup and return the result
+        Note: the function considers a string with no surrounding tag as valid
+              if <div>`data`</div> can be parsed by an XML parser
+        """
+        xmltree = etree.HTML('<div>%s</div>' % data)
+        # NOTE: lxml 1.1 (etch platforms) doesn't recognize
+        #       the encoding=unicode parameter (lxml 2.0 does), this is
+        #       why we specify an encoding and re-decode to unicode later
+        body = etree.tostring(xmltree[0], encoding=encoding)
+        # remove <body> and </body> and decode to unicode
+        return body[11:-13].decode(encoding)
+
+    
+# HTML generation helper functions ############################################
+
+from logilab.mtconverter import html_escape
+
+def tooltipize(text, tooltip, url=None):
+    """make an HTML tooltip"""
+    url = url or '#'
+    return u'<a href="%s" title="%s">%s</a>' % (url, tooltip, text)
+
+def toggle_action(nodeid):
+    """builds a HTML link that uses the js toggleVisibility function"""
+    return u"javascript: toggleVisibility('%s')" % nodeid
+
+def toggle_link(nodeid, label):
+    """builds a HTML link that uses the js toggleVisibility function"""
+    return u'<a href="%s">%s</a>' % (toggle_action(nodeid), label)
+
+def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams):
+    """builds a replacePageChunk-like url
+    >>> ajax_replace_url('foo', 'Person P')
+    "javascript: replacePageChunk('foo', 'Person%20P');"
+    >>> ajax_replace_url('foo', 'Person P', 'oneline')
+    "javascript: replacePageChunk('foo', 'Person%20P', 'oneline');"
+    >>> ajax_replace_url('foo', 'Person P', 'oneline', name='bar', age=12)
+    "javascript: replacePageChunk('foo', 'Person%20P', 'oneline', {'age':12, 'name':'bar'});"
+    >>> ajax_replace_url('foo', 'Person P', name='bar', age=12)
+    "javascript: replacePageChunk('foo', 'Person%20P', 'null', {'age':12, 'name':'bar'});"    
+    """
+    params = [repr(nodeid), repr(urlquote(rql))]
+    if extraparams and not vid:
+        params.append("'null'")
+    elif vid:
+        params.append(repr(vid))
+    if extraparams:
+        params.append(simplejson.dumps(extraparams))
+    if swap:
+        params.append('true')
+    return "javascript: replacePageChunk(%s);" % ', '.join(params)
+
+def safe_cut(text, length):
+    """returns a string of length <length> based on <text>, removing any html
+    tags from given text if cut is necessary.
+    """
+    if text is None:
+        return u''
+    text_nohtml = remove_html_tags(text)
+    # try to keep html tags if text is short enough
+    if len(text_nohtml) <= length:
+        return text
+    # else if un-tagged text is too long, cut it
+    return text_nohtml[:length-3] + u'...'
+
+def text_cut(text, nbwords=30):
+    if text is None:
+        return u''
+    minlength = len(' '.join(text.split()[:nbwords]))
+    textlength = text.find('.', minlength) + 1
+    if textlength == 0: # no point found
+        textlength = minlength 
+    return text[:textlength]
+
+
+def cut(text, length):
+    """returns a string of length <length> based on <text>
+    post:
+      len(__return__) <= length
+    """
+    if text is None:
+        return u''
+    if len(text) <= length:
+        return text
+    # else if un-tagged text is too long, cut it
+    return text[:length-3] + u'...'
+
+
+from StringIO import StringIO
+
+def ureport_as_html(layout):
+    from logilab.common.ureports import HTMLWriter
+    formater = HTMLWriter(True)
+    stream = StringIO() #UStringIO() don't want unicode assertion
+    formater.format(layout, stream)
+    res = stream.getvalue()
+    if isinstance(res, str):
+        res = unicode(res, 'UTF8')
+    return res
+
+def render_HTML_tree(tree, selected_node=None, render_node=None, caption=None):
+    """
+    Generate a pure HTML representation of a tree given as an instance
+    of a logilab.common.tree.Node
+
+    selected_node is the currently selected node (if any) which will
+    have its surrounding <div> have id="selected" (which default
+    to a bold border libe with the default CSS).
+
+    render_node is a function that should take a Node content (Node.id)
+    as parameter and should return a string (what will be displayed
+    in the cell).
+
+    Warning: proper rendering of the generated html code depends on html_tree.css
+    """
+    tree_depth = tree.depth_down()
+    if render_node is None:
+        render_node = str
+
+    # helper function that build a matrix from the tree, like:
+    # +------+-----------+-----------+
+    # | root | child_1_1 | child_2_1 |
+    # | root | child_1_1 | child_2_2 |
+    # | root | child_1_2 |           |
+    # | root | child_1_3 | child_2_3 |
+    # | root | child_1_3 | child_2_4 |
+    # +------+-----------+-----------+
+    # from:
+    # root -+- child_1_1 -+- child_2_1
+    #       |             |
+    #       |             +- child_2_2
+    #       +- child_1_2
+    #       |
+    #       +- child1_3 -+- child_2_3
+    #                    |
+    #                    +- child_2_2
+    def build_matrix(path, matrix):
+        if path[-1].is_leaf():
+            matrix.append(path[:])
+        else:
+            for child in path[-1].children:
+                build_matrix(path[:] + [child], matrix)
+        
+    matrix = []
+    build_matrix([tree], matrix)
+
+    # make all lines in the matrix have the same number of columns
+    for line in matrix:
+        line.extend([None]*(tree_depth-len(line)))
+    for i in range(len(matrix)-1, 0, -1):
+        prev_line, line = matrix[i-1:i+1]
+        for j in range(len(line)):
+            if line[j] == prev_line[j]:
+                line[j] = None
+
+    # We build the matrix of link types (between 2 cells on a line of the matrix)
+    # link types are :
+    link_types = {(True,  True,  True ): 1, # T
+                  (False, False, True ): 2, # |
+                  (False, True,  True ): 3, # + (actually, vert. bar with horiz. bar on the right)
+                  (False, True,  False): 4, # L
+                  (True,  True,  False): 5, # -
+                  }
+    links = []
+    for i, line in enumerate(matrix):
+        links.append([])
+        for j in range(tree_depth-1):
+            cell_11 = line[j] is not None
+            cell_12 = line[j+1] is not None
+            cell_21 = line[j+1] is not None and line[j+1].next_sibling() is not None
+            link_type = link_types.get((cell_11, cell_12, cell_21), 0)
+            if link_type == 0 and i > 0 and links[i-1][j] in (1,2,3):
+                link_type = 2
+            links[-1].append(link_type)
+    
+
+    # We can now generate the HTML code for the <table> 
+    s = u'<table class="tree">\n'
+    if caption:
+        s += '<caption>%s</caption>\n' % caption
+
+    for i, link_line in enumerate(links):
+        line = matrix[i]
+
+        s += '<tr>'
+        for j, link_cell in enumerate(link_line):
+            cell = line[j]
+            if cell:
+                if cell.id == selected_node:
+                    s += '<td class="tree_cell" rowspan="2"><div id="selected" class="tree_cell">%s</div></td>' % (render_node(cell.id))
+                else:
+                    s += '<td class="tree_cell" rowspan="2"><div class="tree_cell">%s</div></td>' % (render_node(cell.id))
+            else:
+                s += '<td rowspan="2">&nbsp;</td>'
+            s += '<td class="tree_cell_%d_1">&nbsp;</td>' % link_cell
+            s += '<td class="tree_cell_%d_2">&nbsp;</td>' % link_cell
+                
+        cell = line[-1]
+        if cell:
+            if cell.id == selected_node:
+                s += '<td class="tree_cell" rowspan="2"><div id="selected" class="tree_cell">%s</div></td>' % (render_node(cell.id))
+            else:
+                s += '<td class="tree_cell" rowspan="2"><div class="tree_cell">%s</div></td>' % (render_node(cell.id))
+        else:
+            s += '<td rowspan="2">&nbsp;</td>'
+
+        s += '</tr>\n'
+        if link_line:
+            s += '<tr>'
+            for j, link_cell in enumerate(link_line):
+                s += '<td class="tree_cell_%d_3">&nbsp;</td>' % link_cell
+                s += '<td class="tree_cell_%d_4">&nbsp;</td>' % link_cell
+            s += '</tr>\n'
+
+    s += '</table>'
+    return s
+
+
+
+# traceback formatting ########################################################
+
+import traceback
+
+def rest_traceback(info, exception):
+    """return a ReST formated traceback"""
+    res = [u'Traceback\n---------\n::\n']
+    for stackentry in traceback.extract_tb(info[2]):
+        res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3]))
+        if stackentry[3]:
+            res.append(u'\t  %s' % stackentry[3].decode('utf-8', 'replace'))
+    res.append(u'\n')
+    try:
+        res.append(u'\t Error: %s\n' % exception)
+    except:
+        pass
+    return u'\n'.join(res)
+
+
+def html_traceback(info, exception, title='',
+                   encoding='ISO-8859-1', body=''):
+    """ return an html formatted traceback from python exception infos.
+    """
+    tcbk = info[2]
+    stacktb = traceback.extract_tb(tcbk)
+    strings = []
+    if body:
+        strings.append(u'<div class="error_body">')
+        # FIXME
+        strings.append(body)
+        strings.append(u'</div>')
+    if title:
+        strings.append(u'<h1 class="error">%s</h1>'% html_escape(title))
+    try:
+        strings.append(u'<p class="error">%s</p>' % html_escape(str(exception)).replace("\n","<br />"))
+    except UnicodeError:
+        pass
+    strings.append(u'<div class="error_traceback">')
+    for index, stackentry in enumerate(stacktb):
+        strings.append(u'<b>File</b> <b class="file">%s</b>, <b>line</b> '
+                       u'<b class="line">%s</b>, <b>function</b> '
+                       u'<b class="function">%s</b>:<br/>'%(
+            html_escape(stackentry[0]), stackentry[1], html_escape(stackentry[2])))
+        if stackentry[3]:
+            string = html_escape(stackentry[3]).decode('utf-8', 'replace')
+            strings.append(u'&nbsp;&nbsp;%s<br/>\n' % (string))
+        # add locals info for each entry
+        try:
+            local_context = tcbk.tb_frame.f_locals
+            html_info = []
+            chars = 0
+            for name, value in local_context.iteritems():
+                value = html_escape(repr(value))
+                info = u'<span class="name">%s</span>=%s, ' % (name, value)
+                line_length = len(name) + len(value)
+                chars += line_length
+                # 150 is the result of *years* of research ;-) (CSS might be helpful here)
+                if chars > 150:
+                    info = u'<br/>' + info
+                    chars = line_length
+                html_info.append(info)
+            boxid = 'ctxlevel%d' % index
+            strings.append(u'[%s]' % toggle_link(boxid, '+'))
+            strings.append(u'<div id="%s" class="pycontext hidden">%s</div>' %
+                           (boxid, ''.join(html_info)))
+            tcbk = tcbk.tb_next
+        except Exception:
+            pass # doesn't really matter if we have no context info    
+    strings.append(u'</div>')
+    return '\n'.join(strings)
+
+# csv files / unicode support #################################################
+
+class UnicodeCSVWriter:
+    """proxies calls to csv.writer.writerow to be able to deal with unicode"""
+    
+    def __init__(self, wfunc, encoding, **kwargs):
+        self.writer = csv.writer(self, **kwargs)
+        self.wfunc = wfunc
+        self.encoding = encoding
+
+    def write(self, data):
+        self.wfunc(data)
+
+    def writerow(self, row):
+        csvrow = []
+        for elt in row:
+            if isinstance(elt, unicode):
+                csvrow.append(elt.encode(self.encoding))
+            else:
+                csvrow.append(str(elt))
+        self.writer.writerow(csvrow)
+
+    def writerows(self, rows):
+        for row in rows:
+            self.writerow(row)
+
+
+# some decorators #############################################################
+
+class limitsize(object):
+    def __init__(self, maxsize):
+        self.maxsize = maxsize
+
+    def __call__(self, function):
+        def newfunc(*args, **kwargs):
+            ret = function(*args, **kwargs)
+            if isinstance(ret, basestring):
+                return ret[:self.maxsize]
+            return ret
+        return newfunc
+
+
+def jsonize(function):
+    import simplejson
+    def newfunc(*args, **kwargs):
+        ret = function(*args, **kwargs)
+        if isinstance(ret, decimal.Decimal):
+            ret = float(ret)
+        elif isinstance(ret, DateTimeType):
+            ret = ret.strftime('%Y-%m-%d %H:%M')
+        elif isinstance(ret, DateTimeDeltaType):
+            ret = ret.seconds
+        try:
+            return simplejson.dumps(ret)
+        except TypeError:
+            return simplejson.dumps(repr(ret))
+    return newfunc
+
+
+def htmlescape(function):
+    def newfunc(*args, **kwargs):
+        ret = function(*args, **kwargs)
+        assert isinstance(ret, basestring)
+        return html_escape(ret)
+    return newfunc
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/utils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,257 @@
+"""Some utilities for CubicWeb server/clients.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from md5 import md5
+from time import time
+from random import randint, seed
+
+# initialize random seed from current time
+seed()
+
+def make_uid(key):
+    """forge a unique identifier"""
+    msg = str(key) + "%.10f"%time() + str(randint(0, 1000000))
+    return md5(msg).hexdigest()
+
+def working_hours(mxdate):
+    """
+    Predicate returning True is the date's hour is in working hours (8h->20h)
+    """
+    if mxdate.hour > 7 and mxdate.hour < 21:
+        return True
+    return False
+    
+def date_range(begin, end, incr=1, include=None):
+    """yields each date between begin and end
+    :param begin: the start date
+    :param end: the end date
+    :param incr: the step to use to iterate over dates. Default is
+                 one day.                 
+    :param include: None (means no exclusion) or a function taking a
+                    date as parameter, and returning True if the date
+                    should be included.
+    """
+    date = begin
+    while date <= end:
+        if include is None or include(date): 
+            yield date
+        date += incr
+
+
+def dump_class(cls, clsname):
+    """create copy of a class by creating an empty class inheriting
+    from the given cls.
+
+    Those class will be used as place holder for attribute and relation
+    description
+    """
+    # type doesn't accept unicode name
+    # return type.__new__(type, str(clsname), (cls,), {})
+    # __autogenerated__ attribute is just a marker
+    return type(str(clsname), (cls,), {'__autogenerated__': True})
+
+
+def merge_dicts(dict1, dict2):
+    """update a copy of `dict1` with `dict2`"""
+    dict1 = dict(dict1)
+    dict1.update(dict2)
+    return dict1
+                
+
+class SizeConstrainedList(list):
+    """simple list that makes sure the list does not get bigger
+    than a given size.
+
+    when the list is full and a new element is added, the first
+    element of the list is removed before appending the new one
+
+    >>> l = SizeConstrainedList(2)
+    >>> l.append(1)
+    >>> l.append(2)
+    >>> l
+    [1, 2]
+    >>> l.append(3)
+    [2, 3]
+    """
+    def __init__(self, maxsize):
+        self.maxsize = maxsize
+
+    def append(self, element):
+        if len(self) == self.maxsize:
+            del self[0]
+        super(SizeConstrainedList, self).append(element)
+
+    def extend(self, sequence):
+        super(SizeConstrainedList, self).extend(sequence)
+        keepafter = len(self) - self.maxsize
+        if keepafter > 0:
+            del self[:keepafter]
+
+    __iadd__ = extend
+
+
+class UStringIO(list):
+    """a file wrapper which automatically encode unicode string to an encoding
+    specifed in the constructor
+    """
+
+    def __nonzero__(self):
+        return True
+    
+    def write(self, value):
+        assert isinstance(value, unicode), u"unicode required not %s : %s"\
+                                     % (type(value).__name__, repr(value))
+        self.append(value)
+        
+    def getvalue(self):
+        return u''.join(self)
+
+    def __repr__(self):
+        return '<%s at %#x>' % (self.__class__.__name__, id(self))
+
+
+class HTMLHead(UStringIO):
+    """wraps HTML header's stream
+
+    Request objects use a HTMLHead instance to ease adding of
+    javascripts and stylesheets
+    """
+    js_unload_code = u'jQuery(window).unload(unloadPageData);'
+
+    def __init__(self):
+        super(HTMLHead, self).__init__()
+        self.jsvars = []
+        self.jsfiles = []
+        self.cssfiles = []
+        self.ie_cssfiles = []
+        self.post_inlined_scripts = []
+        self.pagedata_unload = False
+
+
+    def add_raw(self, rawheader):
+        self.write(rawheader)
+
+    def define_var(self, var, value):
+        self.jsvars.append( (var, value) )
+
+    def add_post_inline_script(self, content):
+        self.post_inlined_scripts.append(content)
+    
+    def add_js(self, jsfile):
+        """adds `jsfile` to the list of javascripts used in the webpage
+
+        This function checks if the file has already been added
+        :param jsfile: the script's URL
+        """
+        if jsfile not in self.jsfiles:
+            self.jsfiles.append(jsfile)
+
+    def add_css(self, cssfile, media):
+        """adds `cssfile` to the list of javascripts used in the webpage
+
+        This function checks if the file has already been added
+        :param cssfile: the stylesheet's URL
+        """
+        if (cssfile, media) not in self.cssfiles:
+            self.cssfiles.append( (cssfile, media) )
+
+    def add_ie_css(self, cssfile, media='all'):
+        """registers some IE specific CSS"""
+        if (cssfile, media) not in self.ie_cssfiles:
+            self.ie_cssfiles.append( (cssfile, media) )
+
+    def add_unload_pagedata(self):
+        """registers onunload callback to clean page data on server"""
+        if not self.pagedata_unload:
+            self.post_inlined_scripts.append(self.js_unload_code)
+            self.pagedata_unload = True
+
+    def getvalue(self):
+        """reimplement getvalue to provide a consistent (and somewhat browser
+        optimzed cf. http://stevesouders.com/cuzillion) order in external
+        resources declaration
+        """
+        w = self.write
+        # 1/ variable declaration if any
+        if self.jsvars:
+            from simplejson import dumps
+            w(u'<script type="text/javascript">\n')
+            for var, value in self.jsvars:
+                w(u'%s = %s;\n' % (var, dumps(value)))
+            w(u'</script>\n')
+        # 2/ css files
+        for cssfile, media in self.cssfiles:
+            w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
+              (media, cssfile))
+        # 3/ ie css if necessary
+        if self.ie_cssfiles:
+            w(u'<!--[if lt IE 8]>\n')
+            for cssfile, media in self.ie_cssfiles:
+                w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
+                  (media, cssfile))
+            w(u'<![endif]--> \n')
+        # 4/ js files
+        for jsfile in self.jsfiles:
+            w(u'<script type="text/javascript" src="%s"></script>\n' % jsfile)
+        # 5/ post inlined scripts (i.e. scripts depending on other JS files)
+        if self.post_inlined_scripts:
+            w(u'<script type="text/javascript">\n')
+            w(u'\n\n'.join(self.post_inlined_scripts))
+            w(u'\n</script>\n')
+        return u'<head>\n%s</head>\n' % super(HTMLHead, self).getvalue()
+        
+
+class HTMLStream(object):
+    """represents a HTML page.
+
+    This is used my main templates so that HTML headers can be added
+    at any time during the page generation.
+    
+    HTMLStream uses the (U)StringIO interface to be compliant with
+    existing code.
+    """
+    
+    def __init__(self, req):
+        # stream for <head>
+        self.head = req.html_headers
+        # main stream
+        self.body = UStringIO()
+        self.doctype = u''
+        # xmldecl and html opening tag
+        self.xmldecl = u'<?xml version="1.0" encoding="%s"?>\n' % req.encoding
+        self.htmltag = u'<html xmlns="http://www.w3.org/1999/xhtml" ' \
+                       'xmlns:cubicweb="http://www.logilab.org/2008/cubicweb" ' \
+                       'xml:lang="%s" lang="%s">' % (req.lang, req.lang)
+
+
+    def write(self, data):
+        """StringIO interface: this method will be assigned to self.w
+        """
+        self.body.write(data)
+
+    def getvalue(self):
+        """writes HTML headers, closes </head> tag and writes HTML body"""
+        return u'%s\n%s\n%s\n%s\n%s\n</html>' % (self.xmldecl, self.doctype,
+                                                 self.htmltag,
+                                                 self.head.getvalue(),
+                                                 self.body.getvalue())
+
+
+class AcceptMixIn(object):
+    """Mixin class for vobjects defining the 'accepts' attribute describing
+    a set of supported entity type (Any by default).
+    """
+    # XXX deprecated, no more necessary
+
+
+from logilab.common.deprecation import moved, class_moved
+rql_for_eid = moved('cubicweb.common.uilib', 'rql_for_eid')
+ajax_replace_url = moved('cubicweb.common.uilib', 'ajax_replace_url')
+
+import cubicweb
+Binary = class_moved(cubicweb.Binary)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/common/view.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,462 @@
+"""abstract views and templates classes for CubicWeb web client
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cStringIO import StringIO
+
+from logilab.mtconverter import html_escape
+
+from cubicweb import NotAnEntity, NoSelectableObject
+from cubicweb.common.registerers import accepts_registerer, priority_registerer
+from cubicweb.common.selectors import (in_group_selector, anyrset_selector, 
+                                    emptyrset_selector, accept_selector,
+                                    norset_selector, chainfirst)
+from cubicweb.common.appobject import AppRsetObject, ComponentMixIn
+from cubicweb.common.utils import UStringIO, HTMLStream
+
+_ = unicode
+
+# robots control
+NOINDEX = u'<meta name="ROBOTS" content="NOINDEX" />'
+NOFOLLOW = u'<meta name="ROBOTS" content="NOFOLLOW" />'
+
+CW_XHTML_EXTENSIONS = '''[
+  <!ATTLIST html xmlns:cubicweb CDATA  #FIXED \'http://www.logilab.org/2008/cubicweb\'  >
+
+<!ENTITY % coreattrs
+ "id          ID             #IMPLIED
+  class       CDATA          #IMPLIED
+  style       CDATA   #IMPLIED
+  title       CDATA         #IMPLIED
+
+ cubicweb:sortvalue         CDATA   #IMPLIED
+ cubicweb:target            CDATA   #IMPLIED
+ cubicweb:limit             CDATA   #IMPLIED
+ cubicweb:type              CDATA   #IMPLIED
+ cubicweb:loadtype          CDATA   #IMPLIED
+ cubicweb:wdgtype           CDATA   #IMPLIED
+ cubicweb:initfunc          CDATA   #IMPLIED
+ cubicweb:inputid           CDATA   #IMPLIED
+ cubicweb:tindex            CDATA   #IMPLIED
+ cubicweb:inputname         CDATA   #IMPLIED
+ cubicweb:value             CDATA   #IMPLIED
+ cubicweb:required          CDATA   #IMPLIED
+ cubicweb:accesskey         CDATA   #IMPLIED
+ cubicweb:maxlength         CDATA   #IMPLIED
+ cubicweb:variables         CDATA   #IMPLIED
+ cubicweb:displayactions    CDATA   #IMPLIED
+ cubicweb:fallbackvid       CDATA   #IMPLIED
+ cubicweb:vid               CDATA   #IMPLIED
+ cubicweb:rql               CDATA   #IMPLIED
+ cubicweb:actualrql         CDATA   #IMPLIED
+ cubicweb:rooteid           CDATA   #IMPLIED   
+ cubicweb:dataurl           CDATA   #IMPLIED
+ cubicweb:size              CDATA   #IMPLIED   
+ cubicweb:tlunit            CDATA   #IMPLIED
+ cubicweb:loadurl           CDATA   #IMPLIED
+ cubicweb:uselabel          CDATA   #IMPLIED
+  "> ] '''
+
+TRANSITIONAL_DOCTYPE = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd" %s>\n'
+
+STRICT_DOCTYPE = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd" %s>\n'
+
+class View(AppRsetObject):
+    """abstract view class, used as base for every renderable object such
+    as views, templates, some components...web
+
+    A view is instantiated to render a [part of a] result set. View
+    subclasses may be parametred using the following class attributes:
+    
+    * `templatable` indicates if the view may be embeded in a main
+      template or if it has to be rendered standalone (i.e. XML for
+      instance)
+    * if the view is not templatable, it should set the `content_type` class
+      attribute to the correct MIME type (text/xhtml by default)
+    * the `category` attribute may be used in the interface to regroup related
+      objects together
+
+    At instantiation time, the standard `req`, `rset`, and `cursor`
+    attributes are added and the `w` attribute will be set at rendering
+    time to a write function to use.
+    """
+    __registry__ = 'views'
+    
+    templatable = True
+    need_navigation = True
+    # content_type = 'application/xhtml+xml' # text/xhtml'
+    binary = False
+    add_to_breadcrumbs = True
+    category = 'view'
+    
+    def __init__(self, req, rset):
+        super(View, self).__init__(req, rset)
+        self.w = None
+
+    @property
+    def content_type(self):
+        if self.req.xhtml_browser():
+            return 'application/xhtml+xml'
+        return 'text/html'
+    
+    def set_stream(self, w=None):
+        if self.w is not None:
+            return
+        if w is None:
+            if self.binary:
+                self._stream = stream = StringIO()
+            else:
+                self._stream = stream = UStringIO()
+            w = stream.write
+        else:
+            stream = None
+        self.w = w
+        return stream
+
+    # main view interface #####################################################
+            
+    def dispatch(self, w=None, **context):
+        """called to render a view object for a result set.
+
+        This method is a dispatched to an actual method selected
+        according to optional row and col parameters, which are locating
+        a particular row or cell in the result set:
+        
+        * if row [and col] are specified, `cell_call` is called
+        * if none of them is supplied, the view is considered to apply on
+          the whole result set (which may be None in this case), `call` is
+          called
+        """
+        row, col = context.get('row'), context.get('col')
+        if row is not None:
+            context.setdefault('col', 0)
+            view_func = self.cell_call
+        else:
+            view_func = self.call
+        stream = self.set_stream(w)
+        # stream = self.set_stream(context)
+        view_func(**context)
+        # return stream content if we have created it
+        if stream is not None:
+            return self._stream.getvalue()
+
+    # should default .call() method add a <div classs="section"> around each
+    # rset item
+    add_div_section = True
+    
+    def call(self, **kwargs):
+        """the view is called for an entire result set, by default loop
+        other rows of the result set and call the same view on the
+        particular row
+
+        Views applicable on None result sets have to override this method
+        """
+        rset = self.rset
+        if rset is None:
+            raise NotImplementedError, self
+        wrap = self.templatable and len(rset) > 1 and self.add_div_section
+        for i in xrange(len(rset)):
+            if wrap:
+                self.w(u'<div class="section">')
+            self.wview(self.id, rset, row=i, **kwargs)
+            if wrap:
+                self.w(u"</div>")
+
+    def cell_call(self, row, col, **kwargs):
+        """the view is called for a particular result set cell"""
+        raise NotImplementedError, self
+        
+    def linkable(self):
+        """return True if the view may be linked in a menu
+        
+        by default views without title are not meant to be displayed
+        """
+        if not getattr(self, 'title', None):
+            return False
+        return True
+
+    def is_primary(self):
+        return self.id == 'primary'
+    
+    def url(self):
+        """return the url associated with this view. Should not be
+        necessary for non linkable views, but a default implementation
+        is provided anyway.
+        """
+        try:
+            return self.build_url(vid=self.id, rql=self.req.form['rql'])
+        except KeyError:
+            return self.build_url(vid=self.id)
+
+    def set_request_content_type(self):
+        """set the content type returned by this view"""
+        self.req.set_content_type(self.content_type)
+
+    # view utilities ##########################################################
+    
+    def view(self, __vid, rset, __fallback_vid=None, **kwargs):
+        """shortcut to self.vreg.render method avoiding to pass self.req"""
+        try:
+            view = self.vreg.select_view(__vid, self.req, rset, **kwargs)
+        except NoSelectableObject:
+            if __fallback_vid is None:
+                raise
+            view = self.vreg.select_view(__fallback_vid, self.req, rset, **kwargs)
+        return view.dispatch(**kwargs)
+    
+    def wview(self, __vid, rset, __fallback_vid=None, **kwargs):
+        """shortcut to self.view method automatically passing self.w as argument
+        """
+        self.view(__vid, rset, __fallback_vid, w=self.w, **kwargs)
+
+    def whead(self, data):
+        self.req.html_headers.write(data)
+
+    def wdata(self, data):
+        """simple helper that escapes `data` and writes into `self.w`"""
+        self.w(html_escape(data))
+
+    def action(self, actionid, row=0):
+        """shortcut to get action object with id `actionid`"""
+        return self.vreg.select_action(actionid, self.req, self.rset,
+                                       row=row)
+
+    def action_url(self, actionid, label=None, row=0):
+        """simple method to be able to display `actionid` as a link anywhere
+        """
+        action = self.vreg.select_action(actionid, self.req, self.rset,
+                                         row=row)
+        if action:
+            label = label or self.req._(action.title)
+            return u'<a href="%s">%s</a>' % (html_escape(action.url()), label)
+        return u''
+    
+    def html_headers(self):
+        """return a list of html headers (eg something to be inserted between
+        <head> and </head> of the returned page
+
+        by default return a meta tag to disable robot indexation of the page
+        """
+        return [NOINDEX]
+    
+    def page_title(self):
+        """returns a title according to the result set - used for the
+        title in the HTML header
+        """
+        vtitle = self.req.form.get('vtitle')
+        if vtitle:
+            return self.req._(vtitle)
+        # class defined title will only be used if the resulting title doesn't
+        # seem clear enough
+        vtitle = getattr(self, 'title', None) or u''
+        if vtitle:
+            vtitle = self.req._(vtitle)
+        rset = self.rset
+        if rset and rset.rowcount:
+            if rset.rowcount == 1:
+                try:
+                    entity = self.complete_entity(0)
+                    # use long_title to get context information if any
+                    clabel = entity.dc_long_title()
+                except NotAnEntity:
+                    clabel = display_name(self.req, rset.description[0][0])
+                    clabel = u'%s (%s)' % (clabel, vtitle)
+            else :
+                etypes = rset.column_types(0)
+                if len(etypes) == 1:
+                    etype = iter(etypes).next()
+                    clabel = display_name(self.req, etype, 'plural')
+                else :
+                    clabel = u'#[*] (%s)' % vtitle
+        else:
+            clabel = vtitle
+        return u'%s (%s)' % (clabel, self.req.property_value('ui.site-title'))
+
+    def output_url_builder( self, name, url, args ):
+        self.w(u'<script language="JavaScript"><!--\n' \
+               u'function %s( %s ) {\n' % (name, ','.join(args) ) )
+        url_parts = url.split("%s")
+        self.w(u' url="%s"' % url_parts[0] )
+        for arg, part in zip(args, url_parts[1:]):
+            self.w(u'+str(%s)' % arg )
+            if part:
+                self.w(u'+"%s"' % part)
+        self.w('\n document.window.href=url;\n')
+        self.w('}\n-->\n</script>\n')
+
+    def create_url(self, etype, **kwargs):
+        """ return the url of the entity creation form for a given entity type"""
+        return self.req.build_url('add/%s'%etype, **kwargs)
+
+        
+# concrete views base classes #################################################
+
+class EntityView(View):
+    """base class for views applying on an entity (i.e. uniform result set)
+    """
+    __registerer__ = accepts_registerer
+    __selectors__ = (accept_selector,)
+    category = 'entityview'
+    
+    def field(self, label, value, row=True, show_label=True, w=None, tr=True):
+        """ read-only field """
+        if w is None:
+            w = self.w
+        if row:
+            w(u'<div class="row">')
+        if show_label:
+            if tr:
+                label = display_name(self.req, label)
+            w(u'<span class="label">%s</span>' % label)
+        w(u'<div class="field">%s</div>' % value)
+        if row:
+            w(u'</div>')
+
+        
+class StartupView(View):
+    """base class for views which doesn't need a particular result set
+    to be displayed (so they can always be displayed !)
+    """
+    __registerer__ = priority_registerer
+    __selectors__ = (in_group_selector, norset_selector)
+    require_groups = ()
+    category = 'startupview'
+    
+    def url(self):
+        """return the url associated with this view. We can omit rql here"""
+        return self.build_url('view', vid=self.id)
+
+    def html_headers(self):
+        """return a list of html headers (eg something to be inserted between
+        <head> and </head> of the returned page
+
+        by default startup views are indexed
+        """
+        return []
+
+
+class EntityStartupView(EntityView):
+    """base class for entity views which may also be applied to None
+    result set (usually a default rql is provided by the view class)
+    """
+    __registerer__ = accepts_registerer
+    __selectors__ = (chainfirst(norset_selector, accept_selector),)
+    
+    default_rql = None
+    
+    def __init__(self, req, rset):
+        super(EntityStartupView, self).__init__(req, rset)
+        if rset is None:
+            # this instance is not in the "entityview" category
+            self.category = 'startupview'
+
+    def startup_rql(self):
+        """return some rql to be executedif the result set is None"""
+        return self.default_rql
+    
+    def call(self, **kwargs):
+        """override call to execute rql returned by the .startup_rql
+        method if necessary
+        """
+        if self.rset is None:
+            self.rset = self.req.execute(self.startup_rql())
+        rset = self.rset
+        for i in xrange(len(rset)):
+            self.wview(self.id, rset, row=i, **kwargs)
+
+    def url(self):
+        """return the url associated with this view. We can omit rql if we
+        are on a result set on which we do not apply.
+        """
+        if not self.__select__(self.req, self.rset):
+            return self.build_url(vid=self.id)
+        return super(EntityStartupView, self).url()
+
+    
+class AnyRsetView(View):
+    """base class for views applying on any non empty result sets"""
+    __registerer__ = priority_registerer
+    __selectors__ = (anyrset_selector,)
+    
+    category = 'anyrsetview'
+    
+
+class EmptyRsetView(View):
+    """base class for views applying on any empty result sets"""
+    __registerer__ = priority_registerer
+    __selectors__ = (emptyrset_selector,)
+
+
+# concrete template base classes ##############################################
+
+class Template(View):
+    """a template is almost like a view, except that by default a template
+    is only used globally (i.e. no result set adaptation)
+    """
+    __registry__ = 'templates'
+    __registerer__ = priority_registerer
+    __selectors__ = (in_group_selector,)
+
+    require_groups = ()
+    
+    def template(self, oid, **kwargs):
+        """shortcut to self.registry.render method on the templates registry"""
+        w = kwargs.pop('w', self.w)
+        self.vreg.render('templates', oid, self.req, w=w, **kwargs)
+
+
+class MainTemplate(Template):
+    """main template are primary access point to render a full HTML page.
+    There is usually at least a regular main template and a simple fallback
+    one to display error if the first one failed
+    """
+
+    base_doctype = STRICT_DOCTYPE
+
+    @property
+    def doctype(self):
+        if self.req.xhtml_browser():
+            return self.base_doctype % CW_XHTML_EXTENSIONS
+        return self.base_doctype % ''
+
+    def set_stream(self, w=None, templatable=True):
+        if templatable and self.w is not None:
+            return
+
+        if w is None:
+            if self.binary:
+                self._stream = stream = StringIO()
+            elif not templatable:
+                # not templatable means we're using a non-html view, we don't
+                # want the HTMLStream stuff to interfere during data generation
+                self._stream = stream = UStringIO()
+            else:
+                self._stream = stream = HTMLStream(self.req)
+            w = stream.write
+        else:
+            stream = None
+        self.w = w
+        return stream
+
+    def write_doctype(self, xmldecl=True):
+        assert isinstance(self._stream, HTMLStream)
+        self._stream.doctype = self.doctype
+        if not xmldecl:
+            self._stream.xmldecl = u''
+
+# viewable components base classes ############################################
+
+class VComponent(ComponentMixIn, View):
+    """base class for displayable components"""
+    property_defs = {
+        'visible':  dict(type='Boolean', default=True,
+                         help=_('display the component or not')),}
+
+class SingletonVComponent(VComponent):
+    """base class for displayable unique components"""
+    __registerer__ = priority_registerer
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cwconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,856 @@
+"""common configuration utilities for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import logging
+from os.path import exists, join, expanduser, abspath, basename
+
+from logilab.common.decorators import cached
+from logilab.common.configuration import (Configuration, Method,
+                                          ConfigurationMixIn, merge_options)
+
+from cubicweb import CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, ConfigurationError
+from cubicweb.toolsutils import env_path, read_config, create_dir
+
+CONFIGURATIONS = []
+
+_ = unicode
+
+class metaconfiguration(type):
+    """metaclass to automaticaly register configuration"""
+    def __new__(mcs, name, bases, classdict):
+        cls = super(metaconfiguration, mcs).__new__(mcs, name, bases, classdict)
+        if classdict.get('name'):
+            CONFIGURATIONS.append(cls)
+        return cls
+
+def configuration_cls(name):
+    """return the configuration class registered with the given name"""
+    try:
+        return [c for c in CONFIGURATIONS if c.name == name][0]
+    except IndexError:
+        raise ConfigurationError('no such config %r (check it exists with "cubicweb-ctl list")' % name)
+
+def possible_configurations(directory):
+    """return a list of installed configurations in a directory
+    according to *-ctl files
+    """
+    return [name for name in ('repository', 'twisted', 'all-in-one')
+            if exists(join(directory, '%s.conf' % name))]
+
+def guess_configuration(directory):
+    """try to guess the configuration to use for a directory. If multiple
+    configurations are found, ConfigurationError is raised
+    """
+    modes = possible_configurations(directory)
+    if len(modes) != 1:
+        raise ConfigurationError('unable to guess configuration from %r %s'
+                                 % (directory, modes))
+    return modes[0]
+
+# XXX generate this according to the configuration (repository/all-in-one/web)
+VREGOPTIONS = []
+for registry in ('etypes', 'hooks', 'controllers', 'actions', 'components',
+                 'views', 'templates', 'boxes', 'contentnavigation', 'urlrewriting',
+                 'facets'):
+    VREGOPTIONS.append(('disable-%s'%registry,
+                        {'type' : 'csv', 'default': (),
+                         'help': 'list of identifier of application objects from the %s registry to disable'%registry,
+                         'group': 'appobjects', 'inputlevel': 2,
+                         }))
+VREGOPTIONS = tuple(VREGOPTIONS)
+
+# persistent options definition
+PERSISTENT_OPTIONS = (
+    ('encoding',
+     {'type' : 'string',
+      'default': 'UTF-8',
+      'help': _('user interface encoding'),
+      'group': 'ui', 'sitewide': True,
+      }),    
+    ('language',
+     {'type' : 'string',
+      'default': 'en',
+      'vocabulary': Method('available_languages'),
+      'help': _('language of the user interface'),
+      'group': 'ui', 
+      }),
+    ('date-format',
+     {'type' : 'string',
+      'default': '%Y/%m/%d',
+      'help': _('how to format date in the ui ("man strftime" for format description)'),
+      'group': 'ui', 
+      }),
+    ('datetime-format',
+     {'type' : 'string',
+      'default': '%Y/%m/%d %H:%M',
+      'help': _('how to format date and time in the ui ("man strftime" for format description)'),
+      'group': 'ui', 
+      }),
+    ('time-format',
+     {'type' : 'string',
+      'default': '%H:%M',
+      'help': _('how to format time in the ui ("man strftime" for format description)'),
+      'group': 'ui', 
+      }),
+    ('float-format',
+     {'type' : 'string',
+      'default': '%.3f',
+      'help': _('how to format float numbers in the ui'),
+      'group': 'ui', 
+      }),
+    ('default-text-format',
+     {'type' : 'choice',
+      'choices': ('text/plain', 'text/rest', 'text/html'),
+      'default': 'text/html', # use fckeditor in the web ui
+      'help': _('default text format for rich text fields.'),
+      'group': 'ui', 
+      }),
+    ('short-line-size',
+     {'type' : 'int',
+      'default': 40,
+      'help': _('maximum number of characters in short description'),
+      'group': 'navigation',
+      }),
+    )
+
+def register_persistent_options(options):
+    global PERSISTENT_OPTIONS
+    PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options)
+                
+CFGTYPE2ETYPE_MAP = {
+    'string': 'String',
+    'choice': 'String',
+    'yn':     'Boolean',
+    'int':    'Int',
+    'float' : 'Float',
+    }
+    
+class CubicWebNoAppConfiguration(ConfigurationMixIn):
+    """base class for cubicweb configuration without a specific instance directory
+    """
+    __metaclass__ = metaconfiguration
+    # to set in concrete configuration
+    name = None
+    # log messages format (see logging module documentation for available keys)
+    log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s'
+    # nor remove vobjects based on unused interface
+    cleanup_interface_sobjects = True
+
+    if os.environ.get('APYCOT_ROOT'):
+        mode = 'test'
+        CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ
+    elif exists(join(CW_SOFTWARE_ROOT, '.hg')):
+        mode = 'dev'
+        CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
+    else:
+        mode = 'installed'
+        CUBES_DIR = '/usr/share/cubicweb/cubes/'
+
+    options = VREGOPTIONS + (
+       ('log-threshold',
+         {'type' : 'string', # XXX use a dedicated type?
+          'default': 'ERROR',
+          'help': 'server\'s log level',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        # pyro name server
+        ('pyro-ns-host',
+         {'type' : 'string',
+          'default': '',
+          'help': 'Pyro name server\'s host. If not set, will be detected by a \
+broadcast query',
+          'group': 'pyro-name-server', 'inputlevel': 1,
+          }),
+        ('pyro-ns-port',
+         {'type' : 'int',
+          'default': None,
+          'help': 'Pyro name server\'s listening port. If not set, default \
+port will be used.',
+          'group': 'pyro-name-server', 'inputlevel': 1,
+          }),
+        ('pyro-ns-group',
+         {'type' : 'string',
+          'default': 'cubicweb',
+          'help': 'Pyro name server\'s group where the repository will be \
+registered.',
+          'group': 'pyro-name-server', 'inputlevel': 1,
+          }),
+        # common configuration options which are potentially required as soon as
+        # you're using "base" application objects (ie to really server/web
+        # specific)
+        ('base-url',
+         {'type' : 'string',
+          'default': None,
+          'help': 'web server root url',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('mangle-emails',
+         {'type' : 'yn',
+          'default': False,
+          'help': "don't display actual email addresses but mangle them if \
+this option is set to yes",
+          'group': 'email', 'inputlevel': 2,
+          }),
+        )
+    # static and class methods used to get application independant resources ##
+        
+    @staticmethod
+    def cubicweb_version():
+        """return installed cubicweb version"""
+        from logilab.common.changelog import Version
+        from cubicweb import __pkginfo__
+        version = __pkginfo__.numversion
+        assert len(version) == 3, version
+        return Version(version)
+    
+    @staticmethod
+    def persistent_options_configuration():
+        return Configuration(options=PERSISTENT_OPTIONS)
+
+    @classmethod
+    def shared_dir(cls):
+        """return the shared data directory (i.e. directory where standard
+        library views and data may be found)
+        """
+        if cls.mode in ('dev', 'test') and not os.environ.get('APYCOT_ROOT'):
+            return join(CW_SOFTWARE_ROOT, 'web')
+        return join(cls.cubes_dir(), 'shared')
+        
+    @classmethod
+    def i18n_lib_dir(cls):
+        """return application's i18n directory"""
+        if cls.mode in ('dev', 'test') and not os.environ.get('APYCOT_ROOT'):
+            return join(CW_SOFTWARE_ROOT, 'i18n')
+        return join(cls.shared_dir(), 'i18n')
+
+    @classmethod
+    def available_cubes(cls):
+        cubes_dir = cls.cubes_dir()
+        return sorted(cube for cube in os.listdir(cubes_dir)
+                      if os.path.isdir(os.path.join(cubes_dir, cube))
+                      and not cube in ('CVS', '.svn', 'shared', '.hg'))
+    
+    @classmethod
+    def cubes_dir(cls):
+        """return the application cubes directory"""
+        return env_path('CW_CUBES', cls.CUBES_DIR, 'cubes')
+    
+    @classmethod
+    def cube_dir(cls, cube):
+        """return the cube directory for the given cube id,
+        raise ConfigurationError if it doesn't exists
+        """
+        cube_dir = join(cls.cubes_dir(), cube)
+        if not exists(cube_dir):
+            raise ConfigurationError('no cube %s in %s' % (
+                cube, cls.cubes_dir()))
+        return cube_dir
+
+    @classmethod
+    def cube_migration_scripts_dir(cls, cube):
+        """cube migration scripts directory"""
+        return join(cls.cube_dir(cube), 'migration')
+    
+    @classmethod
+    def cube_pkginfo(cls, cube):
+        """return the information module for the given cube"""
+        cube = CW_MIGRATION_MAP.get(cube, cube)
+        try:
+            return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__
+        except ImportError:
+            raise ConfigurationError('unable to find packaging information for '
+                                     'cube %s' % cube)
+
+    @classmethod
+    def cube_version(cls, cube):
+        """return the version of the cube located in the given directory        
+        """
+        from logilab.common.changelog import Version
+        version = cls.cube_pkginfo(cube).numversion
+        assert len(version) == 3, version
+        return Version(version)
+
+    @classmethod
+    def cube_dependencies(cls, cube):
+        """return cubicweb cubes used by the given cube"""
+        return getattr(cls.cube_pkginfo(cube), '__use__', ())
+
+    @classmethod
+    def cube_recommends(cls, cube):
+        """return cubicweb cubes recommended by the given cube"""
+        return getattr(cls.cube_pkginfo(cube), '__recommend__', ())
+
+    @classmethod
+    def expand_cubes(cls, cubes):
+        """expand the given list of top level cubes used by adding recursivly
+        each cube dependencies
+        """
+        cubes = list(cubes)
+        todo = cubes[:]
+        while todo:
+            cube = todo.pop(0)
+            for depcube in cls.cube_dependencies(cube):
+                if depcube not in cubes:
+                    depcube = CW_MIGRATION_MAP.get(depcube, depcube)
+                    cubes.append(depcube)
+                    todo.append(depcube)
+        return cubes
+
+    @classmethod
+    def reorder_cubes(cls, cubes):
+        """reorder cubes from the top level cubes to inner dependencies
+        cubes
+        """
+        from logilab.common.graph import get_cycles
+        graph = {}
+        for cube in cubes:
+            cube = CW_MIGRATION_MAP.get(cube, cube)
+            deps = cls.cube_dependencies(cube) + \
+                   cls.cube_recommends(cube)
+            graph[cube] = set(dep for dep in deps if dep in cubes)
+        cycles = get_cycles(graph)
+        if cycles:
+            cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles)
+            raise ConfigurationError('cycles in cubes dependencies: %s'
+                                     % cycles)
+        cubes = []
+        while graph:
+            # sorted to get predictable results
+            for cube, deps in sorted(graph.items()):
+                if not deps:
+                    cubes.append(cube)
+                    del graph[cube]
+                    for deps in graph.itervalues():
+                        try:
+                            deps.remove(cube)
+                        except KeyError:
+                            continue
+        return tuple(reversed(cubes))
+    
+    @classmethod
+    def cls_adjust_sys_path(cls):
+        """update python path if necessary"""
+        try:
+            templdir = abspath(join(cls.cubes_dir(), '..'))
+            if not templdir in sys.path:
+                sys.path.insert(0, templdir)
+        except ConfigurationError:
+            return # cube dir doesn't exists
+
+    @classmethod
+    def load_cwctl_plugins(cls):
+        from logilab.common.modutils import load_module_from_file
+        cls.cls_adjust_sys_path()
+        for ctlfile in ('web/webctl.py',  'etwist/twctl.py',
+                        'server/serverctl.py', 'hercule.py',
+                        'devtools/devctl.py', 'goa/goactl.py'):
+            if exists(join(CW_SOFTWARE_ROOT, ctlfile)):
+                load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile))
+                cls.info('loaded cubicweb-ctl plugin %s', ctlfile)
+        templdir = cls.cubes_dir()
+        for cube in cls.available_cubes():
+            pluginfile = join(templdir, cube, 'ecplugin.py')
+            initfile = join(templdir, cube, '__init__.py')
+            if exists(pluginfile):
+                try:
+                    __import__('cubes.%s.ecplugin' % cube)
+                    cls.info('loaded cubicweb-ctl plugin from %s', cube)
+                except:
+                    cls.exception('while loading plugin %s', pluginfile)
+            elif exists(initfile):
+                try:
+                    __import__('cubes.%s' % cube)
+                except:
+                    cls.exception('while loading cube %s', cube)
+            else:
+                cls.warning('no __init__ file in cube %s', cube) 
+
+    @classmethod
+    def init_available_cubes(cls):
+        """cubes may register some sources (svnfile for instance) in their
+        __init__ file, so they should be loaded early in the startup process
+        """
+        for cube in cls.available_cubes():
+            try:
+                __import__('cubes.%s' % cube)
+            except Exception, ex:
+                cls.warning("can't init cube %s: %s", cube, ex)
+        
+    cubicweb_vobject_path = set(['entities'])
+    cube_vobject_path = set(['entities'])
+
+    @classmethod
+    def build_vregistry_path(cls, templpath, evobjpath=None, tvobjpath=None):
+        """given a list of directories, return a list of sub files and
+        directories that should be loaded by the application objects registry.
+
+        :param evobjpath:
+          optional list of sub-directories (or files without the .py ext) of
+          the cubicweb library that should be tested and added to the output list
+          if they exists. If not give, default to `cubicweb_vobject_path` class
+          attribute.
+        :param tvobjpath:
+          optional list of sub-directories (or files without the .py ext) of
+          directories given in `templpath` that should be tested and added to
+          the output list if they exists. If not give, default to
+          `cube_vobject_path` class attribute.
+        """
+        vregpath = cls.build_vregistry_cubicweb_path(evobjpath)
+        vregpath += cls.build_vregistry_cube_path(templpath, tvobjpath)
+        return vregpath
+
+    @classmethod
+    def build_vregistry_cubicweb_path(cls, evobjpath=None):
+        vregpath = []
+        if evobjpath is None:
+            evobjpath = cls.cubicweb_vobject_path
+        for subdir in evobjpath:
+            path = join(CW_SOFTWARE_ROOT, subdir)
+            if exists(path):
+                vregpath.append(path)
+        return vregpath
+
+    @classmethod
+    def build_vregistry_cube_path(cls, templpath, tvobjpath=None):
+        vregpath = []
+        if tvobjpath is None:
+            tvobjpath = cls.cube_vobject_path
+        for directory in templpath:
+            for subdir in tvobjpath:
+                path = join(directory, subdir)
+                if exists(path):
+                    vregpath.append(path)
+                elif exists(path + '.py'):
+                    vregpath.append(path + '.py')
+        return vregpath
+        
+    def __init__(self):
+        ConfigurationMixIn.__init__(self)
+        self.adjust_sys_path()
+        self.load_defaults()
+        self.translations = {} 
+
+    def adjust_sys_path(self):
+        self.cls_adjust_sys_path()
+        
+    def init_log(self, logthreshold=None, debug=False, 
+                 logfile=None, syslog=False):
+        """init the log service"""
+        if os.environ.get('APYCOT_ROOT'):
+            logthreshold = logging.CRITICAL
+            # redirect logs to stdout to avoid apycot output parsing failure
+            handler = logging.StreamHandler(sys.stdout)
+        else:
+            if debug:
+                if logthreshold is None:
+                    logthreshold = logging.DEBUG # LLDEBUG
+                handler = logging.StreamHandler()
+            elif logfile is None:
+                if syslog:
+                    from logging import handlers
+                    handler = handlers.SysLogHandler()
+                else:
+                    handler = logging.StreamHandler()
+            else:
+                try:
+                    handler = logging.FileHandler(logfile)
+                except IOError:
+                    handler = logging.StreamHandler()
+            if logthreshold is None:
+                thresholdname = self['log-threshold']
+                logthreshold = getattr(logging, THRESHOLD_MAP.get(thresholdname,
+                                                                  thresholdname))
+        # configure the root logger
+        logger = logging.getLogger()
+        logger.setLevel(logthreshold)
+        # only addHandler and removeHandler method while I would like a
+        # setHandler method, so do it this way :$
+        logger.handlers = [handler]
+        isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty()
+        if debug and isatty:
+            from logilab.common.logging_ext import ColorFormatter
+            fmt = ColorFormatter(self.log_format, '%Y-%m-%d %H:%M:%S')
+            def col_fact(record):
+                if 'XXX' in record.message:
+                    return 'cyan'
+                if 'kick' in record.message:
+                    return 'red'
+            fmt.colorfilters.append(col_fact)
+        else:
+            fmt = logging.Formatter(self.log_format, '%Y-%m-%d %H:%M:%S')
+        logger.handlers[0].setFormatter(fmt)
+        # configure simpleTal logger
+        logging.getLogger('simpleTAL').setLevel(logging.ERROR)
+
+    def vregistry_path(self):
+        """return a list of files or directories where the registry will look
+        for application objects. By default return nothing in NoApp config.
+        """
+        return []
+    
+    def eproperty_definitions(self):
+        cfg = self.persistent_options_configuration()
+        for section, options in cfg.options_by_section():
+            section = section.lower()
+            for optname, optdict, value in options:
+                key = '%s.%s' % (section, optname)
+                type, vocab = self.map_option(optdict)
+                default = cfg.option_default(optname, optdict)
+                pdef = {'type': type, 'vocabulary': vocab, 'default': default,
+                        'help': optdict['help'],
+                        'sitewide': optdict.get('sitewide', False)}
+                yield key, pdef
+                
+    def map_option(self, optdict):
+        try:
+            vocab = optdict['choices']
+        except KeyError:
+            vocab = optdict.get('vocabulary')
+            if isinstance(vocab, Method):
+                vocab = getattr(self, vocab.method, ())
+        return CFGTYPE2ETYPE_MAP[optdict['type']], vocab
+
+    
+class CubicWebConfiguration(CubicWebNoAppConfiguration):
+    """base class for cubicweb server and web configurations"""
+    
+    if CubicWebNoAppConfiguration.mode == 'test':
+        root = os.environ['APYCOT_ROOT']
+        REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
+        INSTANCE_DATA_DIR = REGISTRY_DIR
+        RUNTIME_DIR = '/tmp/'
+        MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root
+        if not exists(REGISTRY_DIR):
+            os.makedirs(REGISTRY_DIR)
+    elif CubicWebNoAppConfiguration.mode == 'dev':
+        REGISTRY_DIR = expanduser('~/etc/cubicweb.d/')
+        INSTANCE_DATA_DIR = REGISTRY_DIR
+        RUNTIME_DIR = '/tmp/'
+        MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+    else: #mode = 'installed'
+        REGISTRY_DIR = '/etc/cubicweb.d/'
+        INSTANCE_DATA_DIR = '/var/lib/cubicweb/instances/'
+        RUNTIME_DIR = '/var/run/cubicweb/'
+        MIGRATION_DIR = '/usr/share/cubicweb/migration/'
+
+    # for some commands (creation...) we don't want to initialize gettext
+    set_language = True
+    # set this to true to avoid false error message while creating an application
+    creating = False
+    
+    options = CubicWebNoAppConfiguration.options + (
+        ('log-file',
+         {'type' : 'string',
+          'default': Method('default_log_file'),
+          'help': 'file where output logs should be written',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        # email configuration
+        ('smtp-host',
+         {'type' : 'string',
+          'default': 'mail',
+          'help': 'hostname of the SMTP mail server',
+          'group': 'email', 'inputlevel': 1,
+          }),
+        ('smtp-port',
+         {'type' : 'int',
+          'default': 25,
+          'help': 'listening port of the SMTP mail server',
+          'group': 'email', 'inputlevel': 1,
+          }),
+        ('sender-name',
+         {'type' : 'string',
+          'default': Method('default_application_id'), 
+          'help': 'name used as HELO name for outgoing emails from the \
+repository.',
+          'group': 'email', 'inputlevel': 2,
+          }),
+        ('sender-addr',
+         {'type' : 'string',
+          'default': 'devel@logilab.fr',
+          'help': 'email address used as HELO address for outgoing emails from \
+the repository',
+          'group': 'email', 'inputlevel': 1,
+          }),
+        )
+
+    @classmethod
+    def runtime_dir(cls):
+        """run time directory for pid file..."""
+        return env_path('CW_RUNTIME', cls.RUNTIME_DIR, 'run time')
+    
+    @classmethod
+    def registry_dir(cls):
+        """return the control directory"""
+        return env_path('CW_REGISTRY', cls.REGISTRY_DIR, 'registry')
+
+    @classmethod
+    def instance_data_dir(cls):
+        """return the instance data directory"""
+        return env_path('CW_INSTANCE_DATA', cls.INSTANCE_DATA_DIR,
+                        'additional data')
+        
+    @classmethod
+    def migration_scripts_dir(cls):
+        """cubicweb migration scripts directory"""
+        return env_path('CW_MIGRATION', cls.MIGRATION_DIR, 'migration')
+
+    @classmethod
+    def config_for(cls, appid, config=None):
+        """return a configuration instance for the given application identifier
+        """
+        config = config or guess_configuration(cls.application_home(appid))
+        configcls = configuration_cls(config)
+        return configcls(appid)
+    
+    @classmethod
+    def possible_configurations(cls, appid):
+        """return the name of possible configurations for the given
+        application id
+        """
+        home = cls.application_home(appid)
+        return possible_configurations(home)
+    
+    @classmethod
+    def application_home(cls, appid):
+        """return the home directory of the application with the given
+        application id
+        """
+        home = join(cls.registry_dir(), appid)
+        if not exists(home):
+            raise ConfigurationError('no such application %s (check it exists with "cubicweb-ctl list")' % appid)
+        return home
+
+    MODES = ('common', 'repository', 'Any', 'web')
+    MCOMPAT = {'all-in-one': MODES,
+               'repository': ('common', 'repository', 'Any'),
+               'twisted'   : ('common', 'web'),}
+    @classmethod
+    def accept_mode(cls, mode):
+        #assert mode in cls.MODES, mode
+        return mode in cls.MCOMPAT[cls.name]
+            
+    # default configuration methods ###########################################
+    
+    def default_application_id(self):
+        """return the application identifier, useful for option which need this
+        as default value
+        """
+        return self.appid
+
+    def default_log_file(self):
+        """return default path to the log file of the application'server"""
+        if self.mode == 'dev':
+            basepath = '/tmp/%s-%s' % (basename(self.appid), self.name)
+            path = basepath + '.log'
+            i = 1
+            while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
+                try:
+                    file(path, 'a')
+                    break
+                except IOError:
+                    path = '%s-%s.log' % (basepath, i)
+                    i += 1
+            return path
+        return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name)
+    
+    def default_pid_file(self):
+        """return default path to the pid file of the application'server"""
+        return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name))
+    
+    # instance methods used to get application specific resources #############
+    
+    def __init__(self, appid):
+        self.appid = appid
+        CubicWebNoAppConfiguration.__init__(self)
+        self._cubes = None
+        self._site_loaded = set()
+        self.load_file_configuration(self.main_config_file())
+
+    def adjust_sys_path(self):
+        CubicWebNoAppConfiguration.adjust_sys_path(self)
+        # adding apphome to python path is not usually necessary in production
+        # environments, but necessary for tests
+        if self.apphome and not self.apphome in sys.path:
+            sys.path.insert(0, self.apphome)
+
+    @property
+    def apphome(self):
+        return join(self.registry_dir(), self.appid)
+    
+    @property
+    def appdatahome(self):
+        return join(self.instance_data_dir(), self.appid)
+        
+    def init_cubes(self, cubes):
+        assert self._cubes is None
+        self._cubes = self.reorder_cubes(cubes)
+        # load cubes'__init__.py file first
+        for cube in cubes:
+            __import__('cubes.%s' % cube)
+        self.load_site_cubicweb()
+        # reload config file in cases options are defined in cubes __init__
+        # or site_cubicweb files
+        self.load_file_configuration(self.main_config_file())
+        # configuration initialization hook
+        self.load_configuration()
+        
+    def cubes(self):
+        """return the list of cubes used by this instance
+
+        result is ordered from the top level cubes to inner dependencies
+        cubes
+        """
+        assert self._cubes is not None
+        return self._cubes
+        
+    def cubes_path(self):
+        """return the list of path to cubes used by this instance, from outer
+        most to inner most cubes
+        """
+        return [self.cube_dir(p) for p in self.cubes()]
+
+    def add_cubes(self, cubes):
+        """add given cubes to the list of used cubes"""
+        if not isinstance(cubes, list):
+            cubes = list(cubes)
+        self._cubes = self.reorder_cubes(list(self._cubes) + cubes)
+        
+    def main_config_file(self):
+        """return application's control configuration file"""
+        return join(self.apphome, '%s.conf' % self.name)
+            
+    def save(self):
+        """write down current configuration"""
+        self.generate_config(open(self.main_config_file(), 'w'))
+
+    @cached
+    def instance_md5_version(self):
+        import md5
+        infos = []
+        for pkg in self.cubes():
+            version = self.cube_version(pkg)
+            infos.append('%s-%s' % (pkg, version))
+        return md5.new(';'.join(infos)).hexdigest()
+                
+    def load_site_cubicweb(self):
+        """load (web?) application's specific site_cubicweb file"""
+        for path in reversed([self.apphome] + self.cubes_path()):
+            sitefile = join(path, 'site_cubicweb.py')
+            if exists(sitefile) and not sitefile in self._site_loaded:
+                self._load_site_cubicweb(sitefile)
+                self._site_loaded.add(sitefile)
+            else:
+                sitefile = join(path, 'site_erudi.py')
+                if exists(sitefile) and not sitefile in self._site_loaded:
+                    self._load_site_cubicweb(sitefile)
+                    self._site_loaded.add(sitefile)
+                    self.warning('site_erudi.py is deprecated, should be renamed to site_cubicweb.py')
+                
+    def _load_site_cubicweb(self, sitefile):
+        context = {}
+        execfile(sitefile, context, context)
+        self.info('%s loaded', sitefile)
+        # cube specific options
+        if context.get('options'):
+            self.register_options(context['options'])
+            self.load_defaults()
+                
+    def load_configuration(self):
+        """load application's configuration files"""
+        super(CubicWebConfiguration, self).load_configuration()
+        if self.apphome and self.set_language:
+            # init gettext
+            self._set_language()
+            
+    def init_log(self, logthreshold=None, debug=False, force=False):
+        """init the log service"""
+        if not force and hasattr(self, '_logging_initialized'):
+            return
+        self._logging_initialized = True
+        CubicWebNoAppConfiguration.init_log(self, logthreshold, debug,
+                                         logfile=self.get('log-file'))
+        # read a config file if it exists
+        logconfig = join(self.apphome, 'logging.conf')
+        if exists(logconfig):
+            logging.fileConfig(logconfig)
+
+    def available_languages(self, *args):
+        """return available translation for an application, by looking for
+        compiled catalog
+
+        take *args to be usable as a vocabulary method
+        """
+        from glob import glob
+        yield 'en' # ensure 'en' is yielded even if no .mo found
+        for path in glob(join(self.apphome, 'i18n',
+                              '*', 'LC_MESSAGES', 'cubicweb.mo')):
+            lang = path.split(os.sep)[-3]
+            if lang != 'en':
+                yield lang
+        
+    def _set_language(self):
+        """set language for gettext"""
+        from gettext import translation
+        path = join(self.apphome, 'i18n')
+        for language in self.available_languages():
+            self.info("loading language %s", language)
+            try:
+                tr = translation('cubicweb', path, languages=[language])
+                self.translations[language] = tr.ugettext
+            except (ImportError, AttributeError, IOError):
+                self.exception('localisation support error for language %s',
+                               language)            
+    
+    def vregistry_path(self):
+        """return a list of files or directories where the registry will look
+        for application objects
+        """
+        templpath = list(reversed(self.cubes_path()))
+        if self.apphome: # may be unset in tests
+            templpath.append(self.apphome)
+        return self.build_vregistry_path(templpath)
+
+    def set_sources_mode(self, sources):
+        if not 'all' in sources:
+            print 'warning: ignoring specified sources, requires a repository '\
+                  'configuration'
+        
+    def migration_handler(self):
+        """return a migration handler instance"""
+        from cubicweb.common.migration import MigrationHelper
+        return MigrationHelper(self, verbosity=self.verbosity)
+
+    def i18ncompile(self, langs=None):
+        from cubicweb.common import i18n
+        if langs is None:
+            langs = self.available_languages()
+        i18ndir = join(self.apphome, 'i18n')
+        if not exists(i18ndir):
+            create_dir(i18ndir)
+        sourcedirs = [join(path, 'i18n') for path in self.cubes_path()]
+        sourcedirs.append(self.i18n_lib_dir())
+        return i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs)
+
+        
+# alias to get a configuration instance from an application id
+application_configuration = CubicWebConfiguration.config_for        
+
+# map logilab.common.logger thresholds to logging thresholds
+THRESHOLD_MAP = {'LOG_DEBUG':  'DEBUG',
+                 'LOG_INFO':   'INFO',
+                 'LOG_NOTICE': 'INFO',
+                 'LOG_WARN':   'WARNING',
+                 'LOG_ERR':    'ERROR',
+                 'LOG_CRIT':   'CRITICAL',
+                 }
+
+from cubicweb import set_log_methods
+set_log_methods(CubicWebConfiguration, logging.getLogger('cubicweb.configuration'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cwctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,809 @@
+"""%%prog %s [options] %s
+
+CubicWeb main applications controller. 
+%s"""
+
+import sys
+from os import remove, listdir, system, kill, getpgid
+from os.path import exists, join, isfile, isdir
+
+from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
+from cubicweb.cwconfig import CubicWebConfiguration, CONFIGURATIONS
+from cubicweb.toolsutils import (Command, register_commands, main_run, 
+                              rm, create_dir, pop_arg, confirm)
+    
+def wait_process_end(pid, maxtry=10, waittime=1):
+    """wait for a process to actually die"""
+    import signal
+    from time import sleep
+    nbtry = 0
+    while nbtry < maxtry:
+        try:
+            kill(pid, signal.SIGUSR1)
+        except OSError:
+            break
+        nbtry += 1
+        sleep(waittime)
+    else:
+        raise ExecutionError('can\'t kill process %s' % pid)
+
+def list_instances(regdir):
+    return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir)))
+
+def detect_available_modes(templdir):
+    modes = []
+    for fname in ('schema', 'schema.py'):
+        if exists(join(templdir, fname)):
+            modes.append('repository')
+            break
+    for fname in ('data', 'views', 'views.py'):
+        if exists(join(templdir, fname)):
+            modes.append('web ui')
+            break
+    return modes
+    
+    
+class ApplicationCommand(Command):
+    """base class for command taking 0 to n application id as arguments
+    (0 meaning all registered applications)
+    """
+    arguments = '[<application>...]'    
+    options = (
+        ("force",
+         {'short': 'f', 'action' : 'store_true',
+          'default': False,
+          'help': 'force command without asking confirmation',
+          }
+         ),
+        )
+    actionverb = None
+    
+    def ordered_instances(self):
+        """return instances in the order in which they should be started,
+        considering $REGISTRY_DIR/startorder file if it exists (useful when
+        some instances depends on another as external source
+        """
+        regdir = CubicWebConfiguration.registry_dir()
+        _allinstances = list_instances(regdir)
+        if isfile(join(regdir, 'startorder')):
+            allinstances = []
+            for line in file(join(regdir, 'startorder')):
+                line = line.strip()
+                if line and not line.startswith('#'):
+                    try:
+                        _allinstances.remove(line)
+                        allinstances.append(line)
+                    except ValueError:
+                        print 'ERROR: startorder file contains unexistant instance %s' % line
+            allinstances += _allinstances
+        else:
+            allinstances = _allinstances
+        return allinstances
+    
+    def run(self, args):
+        """run the <command>_method on each argument (a list of application
+        identifiers)
+        """
+        if not args:
+            args = self.ordered_instances()
+            try:
+                askconfirm = not self.config.force
+            except AttributeError:
+                # no force option
+                askconfirm = False
+        else:
+            askconfirm = False
+        self.run_args(args, askconfirm)
+        
+    def run_args(self, args, askconfirm):
+        for appid in args:
+            if askconfirm:
+                print '*'*72
+                if not confirm('%s application %r ?' % (self.name, appid)):
+                    continue
+            self.run_arg(appid)
+            
+    def run_arg(self, appid):
+        cmdmeth = getattr(self, '%s_application' % self.name)
+        try:
+            cmdmeth(appid)
+        except (KeyboardInterrupt, SystemExit):
+            print >> sys.stderr, '%s aborted' % self.name
+            sys.exit(2) # specific error code
+        except (ExecutionError, ConfigurationError), ex:
+            print >> sys.stderr, 'application %s not %s: %s' % (
+                appid, self.actionverb, ex)
+        except Exception, ex:
+            import traceback
+            traceback.print_exc()
+            print >> sys.stderr, 'application %s not %s: %s' % (
+                appid, self.actionverb, ex)
+
+
+class ApplicationCommandFork(ApplicationCommand):
+    """Same as `ApplicationCommand`, but command is forked in a new environment
+    for each argument
+    """
+
+    def run_args(self, args, askconfirm):
+        if len(args) > 1:
+            forkcmd = ' '.join(w for w in sys.argv if not w in args)
+        else:
+            forkcmd = None
+        for appid in args:
+            if askconfirm:
+                print '*'*72
+                if not confirm('%s application %r ?' % (self.name, appid)):
+                    continue
+            if forkcmd:
+                status = system('%s %s' % (forkcmd, appid))
+                if status:
+                    sys.exit(status)
+            else:
+                self.run_arg(appid)
+    
+# base commands ###############################################################
+
+class ListCommand(Command):
+    """List configurations, componants and applications.
+
+    list available configurations, installed web and server componants, and
+    registered applications
+    """
+    name = 'list'
+    options = (
+        ('verbose',
+         {'short': 'v', 'action' : 'store_true', 
+          'help': "display more information."}),        
+        )
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        if args:
+            raise BadCommandUsage('Too much arguments')
+        print 'CubicWeb version:', CubicWebConfiguration.cubicweb_version()
+        print 'Detected mode:', CubicWebConfiguration.mode
+        print
+        print 'Available configurations:'
+        for config in CONFIGURATIONS:
+            print '*', config.name
+            for line in config.__doc__.splitlines():
+                line = line.strip()
+                if not line:
+                    continue
+                print '   ', line
+        print 
+        try:
+            cubesdir = CubicWebConfiguration.cubes_dir()
+            namesize = max(len(x) for x in CubicWebConfiguration.available_cubes())
+        except ConfigurationError, ex:
+            print 'No cubes available:', ex
+        except ValueError:
+            print 'No cubes available in %s' % cubesdir
+        else:
+            print 'Available cubes (%s):' % cubesdir
+            for cube in CubicWebConfiguration.available_cubes():
+                if cube in ('CVS', '.svn', 'shared', '.hg'):
+                    continue
+                templdir = join(cubesdir, cube)
+                try:
+                    tinfo = CubicWebConfiguration.cube_pkginfo(cube)
+                    tversion = tinfo.version
+                except ConfigurationError:
+                    tinfo = None
+                    tversion = '[missing cube information]'
+                print '* %s %s' % (cube.ljust(namesize), tversion)
+                if self.config.verbose:
+                    shortdesc = tinfo and (getattr(tinfo, 'short_desc', '')
+                                           or tinfo.__doc__)
+                    if shortdesc:
+                        print '    '+ '    \n'.join(shortdesc.splitlines())
+                    modes = detect_available_modes(templdir)
+                    print '    available modes: %s' % ', '.join(modes)
+        print
+        try:
+            regdir = CubicWebConfiguration.registry_dir()
+        except ConfigurationError, ex:
+            print 'No application available:', ex
+            print
+            return
+        instances = list_instances(regdir)
+        if instances:
+            print 'Available applications (%s):' % regdir
+            for appid in instances:
+                modes = CubicWebConfiguration.possible_configurations(appid)
+                if not modes:
+                    print '* %s (BROKEN application, no configuration found)' % appid
+                    continue
+                print '* %s (%s)' % (appid, ', '.join(modes))
+                try:
+                    config = CubicWebConfiguration.config_for(appid, modes[0])
+                except Exception, exc: 
+                    print '    (BROKEN application, %s)' % exc
+                    continue
+        else:
+            print 'No application available in %s' % regdir
+        print
+
+
+class CreateApplicationCommand(Command):
+    """Create an application from a cube. This is an unified
+    command which can handle web / server / all-in-one installation
+    according to available parts of the software library and of the
+    desired cube.
+
+    <cube>
+      the name of cube to use (list available cube names using
+      the "list" command). You can use several cubes by separating
+      them using comma (e.g. 'jpl,eemail')
+    <application>
+      an identifier for the application to create
+    """
+    name = 'create'
+    arguments = '<cube> <application>'
+    options = (
+        ("config-level",
+         {'short': 'l', 'type' : 'int', 'metavar': '<level>',
+          'default': 0,
+          'help': 'configuration level (0..2): 0 will ask for essential \
+configuration parameters only while 2 will ask for all parameters',
+          }
+         ),
+        ("config",
+         {'short': 'c', 'type' : 'choice', 'metavar': '<install type>',
+          'choices': ('all-in-one', 'repository', 'twisted'),
+          'default': 'all-in-one',
+          'help': 'installation type, telling which part of an application \
+should be installed. You can list available configurations using the "list" \
+command. Default to "all-in-one", e.g. an installation embedding both the RQL \
+repository and the web server.',
+          }
+         ),
+        )
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        from logilab.common.textutils import get_csv
+        configname = self.config.config
+        cubes = get_csv(pop_arg(args, 1))
+        appid = pop_arg(args)
+        # get the configuration and helper
+        CubicWebConfiguration.creating = True
+        config = CubicWebConfiguration.config_for(appid, configname)
+        config.set_language = False
+        config.init_cubes(config.expand_cubes(cubes))
+        helper = self.config_helper(config)
+        # check the cube exists
+        try:
+            templdirs = [CubicWebConfiguration.cube_dir(cube)
+                         for cube in cubes]
+        except ConfigurationError, ex:
+            print ex
+            print '\navailable cubes:',
+            print ', '.join(CubicWebConfiguration.available_cubes())
+            return
+        # create the registry directory for this application
+        create_dir(config.apphome)
+        # load site_cubicweb from the cubes dir (if any)
+        config.load_site_cubicweb()
+        # cubicweb-ctl configuration
+        print '** application\'s %s configuration' % configname
+        print '-' * 72
+        config.input_config('main', self.config.config_level)
+        # configuration'specific stuff
+        print
+        helper.bootstrap(cubes, self.config.config_level)
+        # write down configuration
+        config.save()
+        # handle i18n files structure
+        # XXX currently available languages are guessed from translations found
+        # in the first cube given
+        from cubicweb.common import i18n
+        langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
+        errors = config.i18ncompile(langs)
+        if errors:
+            print '\n'.join(errors)
+            if not confirm('error while compiling message catalogs, '
+                           'continue anyway ?'):
+                print 'creation not completed'
+                return
+        # create the additional data directory for this application
+        if config.appdatahome != config.apphome: # true in dev mode
+            create_dir(config.appdatahome)
+        if config['uid']:
+            from logilab.common.shellutils import chown
+            # this directory should be owned by the uid of the server process
+            print 'set %s as owner of the data directory' % config['uid']
+            chown(config.appdatahome, config['uid'])
+        print
+        print
+        print '*' * 72
+        print 'application %s (%s) created in %r' % (appid, configname,
+                                                     config.apphome)
+        print
+        helper.postcreate()
+
+    
+class DeleteApplicationCommand(Command):
+    """Delete an application. Will remove application's files and
+    unregister it.
+    """
+    name = 'delete'
+    arguments = '<application>'
+    
+    options = ()
+
+    def run(self, args):
+        """run the command with its specific arguments"""
+        appid = pop_arg(args, msg="No application specified !")
+        configs = [CubicWebConfiguration.config_for(appid, configname)
+                   for configname in CubicWebConfiguration.possible_configurations(appid)]
+        if not configs:
+            raise ExecutionError('unable to guess configuration for %s' % appid)
+        for config in configs:
+            helper = self.config_helper(config, required=False)
+            if helper:
+                helper.cleanup()
+        # remove home
+        rm(config.apphome)
+        # remove instance data directory
+        try:
+            rm(config.appdatahome)
+        except OSError, ex:
+            import errno
+            if ex.errno != errno.ENOENT:
+                raise
+        confignames = ', '.join([config.name for config in configs])
+        print 'application %s (%s) deleted' % (appid, confignames)
+
+
+# application commands ########################################################
+
+class StartApplicationCommand(ApplicationCommand):
+    """Start the given applications. If no application is given, start them all.
+    
+    <application>...
+      identifiers of the applications to start. If no application is
+      given, start them all.
+    """
+    name = 'start'
+    actionverb = 'started'
+    options = (
+        ("debug",
+         {'short': 'D', 'action' : 'store_true',
+          'help': 'start server in debug mode.'}),
+        ("force",
+         {'short': 'f', 'action' : 'store_true',
+          'default': False,
+          'help': 'start the application even if it seems to be already \
+running.'}),
+        ('profile',
+         {'short': 'P', 'type' : 'string', 'metavar': '<stat file>',
+          'default': None,
+          'help': 'profile code and use the specified file to store stats',
+          }),
+        )
+
+    def start_application(self, appid):
+        """start the application's server"""
+        # use get() since start may be used from other commands (eg upgrade)
+        # without all options defined
+        debug = self.get('debug')
+        force = self.get('force')
+        config = CubicWebConfiguration.config_for(appid)
+        if self.get('profile'):
+            config.global_set_option('profile', self.config.profile)
+        helper = self.config_helper(config, cmdname='start')
+        pidf = config['pid-file']
+        if exists(pidf) and not force:
+            msg = "%s seems to be running. Remove %s by hand if necessary or use \
+the --force option."
+            raise ExecutionError(msg % (appid, pidf))
+        command = helper.start_command(config, debug)
+        if debug:
+            print "starting server with command :"
+            print command
+        if system(command):
+            print 'an error occured while starting the application, not started'
+            print
+            return False
+        if not debug:
+            print 'application %s started' % appid
+        return True
+
+
+class StopApplicationCommand(ApplicationCommand):
+    """Stop the given applications.
+    
+    <application>...
+      identifiers of the applications to stop. If no application is
+      given, stop them all.
+    """
+    name = 'stop'
+    actionverb = 'stopped'
+    
+    def ordered_instances(self):
+        instances = super(StopApplicationCommand, self).ordered_instances()
+        instances.reverse()
+        return instances
+    
+    def stop_application(self, appid):
+        """stop the application's server"""
+        config = CubicWebConfiguration.config_for(appid)
+        helper = self.config_helper(config, cmdname='stop')
+        helper.poststop() # do this anyway
+        pidf = config['pid-file']
+        if not exists(pidf):
+            print >> sys.stderr, "%s doesn't exist." % pidf
+            return
+        import signal
+        pid = int(open(pidf).read().strip())
+        try:
+            kill(pid, signal.SIGTERM)
+        except:
+            print >> sys.stderr, "process %s seems already dead." % pid
+        else:
+            try:
+                wait_process_end(pid)
+            except ExecutionError, ex:
+                print >> sys.stderr, ex
+                print >> sys.stderr, 'trying SIGKILL'
+                try:
+                    kill(pid, signal.SIGKILL)
+                except:
+                    # probably dead now
+                    pass
+                wait_process_end(pid)
+        try:
+            remove(pidf)
+        except OSError:
+            # already removed by twistd
+            pass
+        print 'application %s stopped' % appid
+    
+
+class RestartApplicationCommand(StartApplicationCommand,
+                                StopApplicationCommand):
+    """Restart the given applications.
+    
+    <application>...
+      identifiers of the applications to restart. If no application is
+      given, restart them all.
+    """
+    name = 'restart'
+    actionverb = 'restarted'
+
+    def run_args(self, args, askconfirm):
+        regdir = CubicWebConfiguration.registry_dir()
+        if not isfile(join(regdir, 'startorder')) or len(args) <= 1:
+            # no specific startorder
+            super(RestartApplicationCommand, self).run_args(args, askconfirm)
+            return
+        print ('some specific start order is specified, will first stop all '
+               'applications then restart them.')
+        # get instances in startorder
+        stopped = []
+        for appid in args:
+            if askconfirm:
+                print '*'*72
+                if not confirm('%s application %r ?' % (self.name, appid)):
+                    continue
+            self.stop_application(appid)
+            stopped.append(appid)
+        forkcmd = [w for w in sys.argv if not w in args]
+        forkcmd[1] = 'start'
+        forkcmd = ' '.join(forkcmd)
+        for appid in reversed(args):
+            status = system('%s %s' % (forkcmd, appid))
+            if status:
+                sys.exit(status)
+    
+    def restart_application(self, appid):
+        self.stop_application(appid)
+        if self.start_application(appid):
+            print 'application %s %s' % (appid, self.actionverb)
+
+        
+class ReloadConfigurationCommand(RestartApplicationCommand):
+    """Reload the given applications. This command is equivalent to a
+    restart for now.
+    
+    <application>...
+      identifiers of the applications to reload. If no application is
+      given, reload them all.
+    """
+    name = 'reload'
+    
+    def reload_application(self, appid):
+        self.restart_application(appid)
+    
+
+class StatusCommand(ApplicationCommand):
+    """Display status information about the given applications.
+    
+    <application>...
+      identifiers of the applications to status. If no application is
+      given, get status information about all registered applications.
+    """
+    name = 'status'
+    options = ()
+
+    def status_application(self, appid):
+        """print running status information for an application"""
+        for mode in CubicWebConfiguration.possible_configurations(appid):
+            config = CubicWebConfiguration.config_for(appid, mode)
+            print '[%s-%s]' % (appid, mode),
+            try:
+                pidf = config['pid-file']
+            except KeyError:
+                print 'buggy application, pid file not specified'
+                continue
+            if not exists(pidf):
+                print "doesn't seem to be running"
+                continue
+            pid = int(open(pidf).read().strip())
+            # trick to guess whether or not the process is running
+            try:
+                getpgid(pid)
+            except OSError:
+                print "should be running with pid %s but the process can not be found" % pid
+                continue
+            print "running with pid %s" % (pid)
+
+
+class UpgradeApplicationCommand(ApplicationCommandFork,
+                                StartApplicationCommand,
+                                StopApplicationCommand):
+    """Upgrade an application after cubicweb and/or component(s) upgrade.
+
+    For repository update, you will be prompted for a login / password to use
+    to connect to the system database.  For some upgrades, the given user
+    should have create or alter table permissions.
+
+    <application>...
+      identifiers of the applications to upgrade. If no application is
+      given, upgrade them all.
+    """
+    name = 'upgrade'
+    actionverb = 'upgraded'
+    options = ApplicationCommand.options + (
+        ('force-componant-version',
+         {'short': 't', 'type' : 'csv', 'metavar': 'cube1=X.Y.Z,cube2=X.Y.Z',
+          'default': None,
+          'help': 'force migration from the indicated  version for the specified cube.'}),
+        ('force-cubicweb-version',
+         {'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z',
+          'default': None,
+          'help': 'force migration from the indicated cubicweb version.'}),
+        
+        ('fs-only',
+         {'short': 's', 'action' : 'store_true',
+          'default': False,
+          'help': 'only upgrade files on the file system, not the database.'}),
+
+        ('nostartstop',
+         {'short': 'n', 'action' : 'store_true',
+          'default': False,
+          'help': 'don\'t try to stop application before migration and to restart it after.'}),
+        
+        ('verbosity',
+         {'short': 'v', 'type' : 'int', 'metavar': '<0..2>',
+          'default': 1,
+          'help': "0: no confirmation, 1: only main commands confirmed, 2 ask \
+for everything."}),
+        
+        ('backup-db',
+         {'short': 'b', 'type' : 'yn', 'metavar': '<y or n>',
+          'default': None,
+          'help': "Backup the application database before upgrade.\n"\
+          "If the option is ommitted, confirmation will be ask.",
+          }),
+
+        ('ext-sources',
+         {'short': 'E', 'type' : 'csv', 'metavar': '<sources>',
+          'default': None,
+          'help': "For multisources instances, specify to which sources the \
+repository should connect to for upgrading. When unspecified or 'migration' is \
+given, appropriate sources for migration will be automatically selected \
+(recommended). If 'all' is given, will connect to all defined sources.",
+          }),
+        )
+
+    def ordered_instances(self):
+        # need this since mro return StopApplicationCommand implementation
+        return ApplicationCommand.ordered_instances(self)
+    
+    def upgrade_application(self, appid):
+        from logilab.common.changelog import Version
+        if not (CubicWebConfiguration.mode == 'dev' or self.config.nostartstop):
+            self.stop_application(appid)
+        config = CubicWebConfiguration.config_for(appid)
+        config.creating = True # notice we're not starting the server
+        config.verbosity = self.config.verbosity
+        config.set_sources_mode(self.config.ext_sources or ('migration',))
+        # get application and installed versions for the server and the componants
+        print 'getting versions configuration from the repository...'
+        mih = config.migration_handler()
+        repo = mih.repo_connect()
+        vcconf = repo.get_versions()
+        print 'done'
+        if self.config.force_componant_version:
+            packversions = {}
+            for vdef in self.config.force_componant_version:
+                componant, version = vdef.split('=')
+                packversions[componant] = Version(version)
+            vcconf.update(packversions)
+        toupgrade = []
+        for cube in config.cubes():
+            installedversion = config.cube_version(cube)
+            try:
+                applversion = vcconf[cube]
+            except KeyError:
+                config.error('no version information for %s' % cube)
+                continue
+            if installedversion > applversion:
+                toupgrade.append( (cube, applversion, installedversion) )
+        cubicwebversion = config.cubicweb_version()           
+        if self.config.force_cubicweb_version:
+            applcubicwebversion = Version(self.config.force_cubicweb_version)
+            vcconf['cubicweb'] = applcubicwebversion
+        else:
+            applcubicwebversion = vcconf.get('cubicweb')
+        if cubicwebversion > applcubicwebversion:
+            toupgrade.append( ('cubicweb', applcubicwebversion, cubicwebversion) )
+        if not self.config.fs_only and not toupgrade:
+            print 'no software migration needed for application %s' % appid
+            return
+        for cube, fromversion, toversion in toupgrade:
+            print '**** %s migration %s -> %s' % (cube, fromversion, toversion)
+        # run cubicweb/componants migration scripts
+        mih.migrate(vcconf, reversed(toupgrade), self.config)
+        # rewrite main configuration file
+        mih.rewrite_configuration()
+        # handle i18n upgrade:
+        # * install new languages
+        # * recompile catalogs
+        # XXX currently available languages are guessed from translations found
+        # in the first componant given
+        from cubicweb.common import i18n
+        templdir = CubicWebConfiguration.cube_dir(config.cubes()[0])
+        langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
+        errors = config.i18ncompile(langs)
+        if errors:
+            print '\n'.join(errors)
+            if not confirm('error while compiling message catalogs, '
+                           'continue anyway ?'):
+                print 'migration not completed'
+                return
+        mih.rewrite_vcconfiguration()
+        mih.shutdown()
+        print
+        print 'application migrated'
+        if not (CubicWebConfiguration.mode == 'dev' or self.config.nostartstop):
+            self.start_application(appid)
+        print
+
+
+class ShellCommand(Command):
+    """Run an interactive migration shell. This is a python shell with
+    enhanced migration commands predefined in the namespace. An additional
+    argument may be given corresponding to a file containing commands to
+    execute in batch mode.
+
+    <application>
+      the identifier of the application to connect.
+    """
+    name = 'shell'
+    arguments = '<application> [batch command file]'
+    options = (
+        ('system-only',
+         {'short': 'S', 'action' : 'store_true',
+          'default': False,
+          'help': 'only connect to the system source when the instance is '
+          'using multiple sources. You can\'t use this option and the '
+          '--ext-sources option at the same time.'}),
+        
+        ('ext-sources',
+         {'short': 'E', 'type' : 'csv', 'metavar': '<sources>',
+          'default': None,
+          'help': "For multisources instances, specify to which sources the \
+repository should connect to for upgrading. When unspecified or 'all' given, \
+will connect to all defined sources. If 'migration' is given, appropriate \
+sources for migration will be automatically selected.",
+          }),
+        
+        )
+    def run(self, args):
+        appid = pop_arg(args, 99, msg="No application specified !")
+        config = CubicWebConfiguration.config_for(appid)
+        if self.config.ext_sources:
+            assert not self.config.system_only
+            sources = self.config.ext_sources
+        elif self.config.system_only:
+            sources = ('system',)
+        else:
+            sources = ('all',)
+        config.set_sources_mode(sources)
+        mih = config.migration_handler()
+        if args:
+            mih.scripts_session(args)
+        else:
+            mih.interactive_shell()
+        mih.shutdown() 
+
+
+class RecompileApplicationCatalogsCommand(ApplicationCommand):
+    """Recompile i18n catalogs for applications.
+    
+    <application>...
+      identifiers of the applications to consider. If no application is
+      given, recompile for all registered applications.
+    """
+    name = 'i18ncompile'
+    
+    def i18ncompile_application(self, appid):
+        """recompile application's messages catalogs"""
+        config = CubicWebConfiguration.config_for(appid)
+        try:
+            config.bootstrap_cubes()
+        except IOError, ex:
+            import errno
+            if ex.errno != errno.ENOENT:
+                raise
+            # bootstrap_cubes files doesn't exist
+            # set creating to notify this is not a regular start
+            config.creating = True
+            # create an in-memory repository, will call config.init_cubes()
+            config.repository()
+        except AttributeError:
+            # web only config
+            config.init_cubes(config.repository().get_cubes())
+        errors = config.i18ncompile()
+        if errors:
+            print '\n'.join(errors)
+
+
+class ListInstancesCommand(Command):
+    """list available instances, useful for bash completion."""
+    name = 'listinstances'
+    hidden = True
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        regdir = CubicWebConfiguration.registry_dir()
+        for appid in sorted(listdir(regdir)):
+            print appid
+
+
+class ListCubesCommand(Command):
+    """list available componants, useful for bash completion."""
+    name = 'listcubes'
+    hidden = True
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        for cube in CubicWebConfiguration.available_cubes():
+            print cube
+
+register_commands((ListCommand,
+                   CreateApplicationCommand,
+                   DeleteApplicationCommand,
+                   StartApplicationCommand,
+                   StopApplicationCommand,
+                   RestartApplicationCommand,
+                   ReloadConfigurationCommand,
+                   StatusCommand,
+                   UpgradeApplicationCommand,
+                   ShellCommand,
+                   RecompileApplicationCatalogsCommand,
+                   ListInstancesCommand, ListCubesCommand,
+                   ))
+
+                
+def run(args):
+    """command line tool"""
+    CubicWebConfiguration.load_cwctl_plugins()
+    main_run(args, __doc__)
+
+if __name__ == '__main__':
+    run(sys.argv[1:])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cwvreg.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,386 @@
+"""extend the generic VRegistry with some cubicweb specific stuff
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from logilab.common.decorators import cached, clear_cache
+
+from rql import RQLHelper
+
+from cubicweb import Binary, UnknownProperty
+from cubicweb.vregistry import VRegistry, ObjectNotFound, NoSelectableObject
+
+_ = unicode
+
+class DummyCursorError(Exception): pass
+class RaiseCursor:
+    @classmethod
+    def execute(cls, rql, args=None, eid_key=None):
+        raise DummyCursorError()
+
+
+class CubicWebRegistry(VRegistry):
+    """extend the generic VRegistry with some cubicweb specific stuff"""
+    
+    def __init__(self, config, debug=None):
+        # first init log service
+        config.init_log(debug=debug)
+        super(CubicWebRegistry, self).__init__(config)
+        self.schema = None
+        self.reset()
+        self.initialized = False
+        
+    def items(self):
+        return [item for item in self._registries.items()
+                if not item[0] in ('propertydefs', 'propertyvalues')]
+
+    def values(self):
+        return [value for key,value in self._registries.items()
+                if not key in ('propertydefs', 'propertyvalues')]
+    
+    def reset(self):
+        self._registries = {}
+        self._lastmodifs = {}
+        # two special registries, propertydefs which care all the property definitions, and
+        # propertyvals which contains values for those properties
+        self._registries['propertydefs'] = {}
+        self._registries['propertyvalues'] = self.eprop_values = {}
+        for key, propdef in self.config.eproperty_definitions():
+            self.register_property(key, **propdef)
+            
+    def set_schema(self, schema):
+        """set application'schema and load application objects"""
+        self.schema = schema
+        clear_cache(self, 'rqlhelper')
+        # now we can load application's web objects
+        self.register_objects(self.config.vregistry_path())
+        
+    def update_schema(self, schema):
+        """update .schema attribute on registered objects, necessary for some
+        tests
+        """
+        self.schema = schema
+        for registry, regcontent in self._registries.items():
+            if registry in ('propertydefs', 'propertyvalues'):
+                continue
+            for objects in regcontent.values():
+                for obj in objects:
+                    obj.schema = schema
+        
+    def register_objects(self, path, force_reload=None):
+        """overriden to handle type class cache issue"""
+        if  super(CubicWebRegistry, self).register_objects(path, force_reload):
+            # clear etype cache if you don't want to run into deep weirdness
+            clear_cache(self, 'etype_class')
+            # remove vobjects that don't support any available interface
+            interfaces = set()
+            for classes in self.get('etypes', {}).values():
+                for cls in classes:
+                    interfaces.update(cls.__implements__)
+            if not self.config.cleanup_interface_sobjects:
+                return
+            for registry, regcontent in self._registries.items():
+                if registry in ('propertydefs', 'propertyvalues', 'etypes'):
+                    continue
+                for oid, objects in regcontent.items():
+                    for obj in reversed(objects[:]):
+                        if not obj in objects:
+                            continue # obj has been kicked by a previous one
+                        accepted = set(getattr(obj, 'accepts_interfaces', ()))
+                        if accepted:
+                            for accepted_iface in accepted:
+                                for found_iface in interfaces:
+                                    if issubclass(found_iface, accepted_iface):
+                                        # consider priority if necessary
+                                        if hasattr(obj.__registerer__, 'remove_all_equivalents'):
+                                            registerer = obj.__registerer__(self, obj)
+                                            registerer.remove_all_equivalents(objects)
+                                        break
+                                else:
+                                    self.debug('kicking vobject %s (unsupported interface)', obj)
+                                    objects.remove(obj)
+                    # if objects is empty, remove oid from registry
+                    if not objects:
+                        del regcontent[oid]
+
+    def eid_rset(self, cursor, eid, etype=None):
+        """return a result set for the given eid without doing actual query
+        (we have the eid, we can suppose it exists and user has access to the
+        entity)
+        """
+        msg = '.eid_rset is deprecated, use req.eid_rset'
+        warn(msg, DeprecationWarning, stacklevel=2)
+        try:
+            return cursor.req.eid_rset(eid, etype)
+        except AttributeError:
+            # cursor is a session
+            return cursor.eid_rset(eid, etype)
+    
+    @cached
+    def etype_class(self, etype):
+        """return an entity class for the given entity type.
+        Try to find out a specific class for this kind of entity or
+        default to a dump of the class registered for 'Any'
+        """
+        etype = str(etype)
+        eschema = self.schema.eschema(etype)
+        baseschemas = [eschema] + eschema.ancestors()
+        # browse ancestors from most specific to most generic and
+        # try to find an associated custom entity class
+        for baseschema in baseschemas:
+            btype = str(baseschema)
+            try:
+                return self.select(self.registry_objects('etypes', btype), etype)
+            except ObjectNotFound:
+                pass
+        # no entity class for any of the ancestors, fallback to the default one
+        return self.select(self.registry_objects('etypes', 'Any'), etype)
+
+    def render(self, registry, oid, req, **context):
+        """select an object in a given registry and render it
+
+        - registry: the registry's name
+        - oid : the view to call
+        - req : the HTTP request         
+        """
+        objclss = self.registry_objects(registry, oid)
+        try:
+            rset = context.pop('rset')
+        except KeyError:
+            rset = None
+        selected = self.select(objclss, req, rset, **context)
+        return selected.dispatch(**context)
+        
+    def main_template(self, req, oid='main', **context):
+        """display query by calling the given template (default to main),
+        and returning the output as a string instead of requiring the [w]rite
+        method as argument
+        """
+        res = self.render('templates', oid, req, **context)
+        if isinstance(res, unicode):
+            return res.encode(req.encoding)
+        assert isinstance(res, str)
+        return res
+
+    def possible_vobjects(self, registry, *args, **kwargs):
+        """return an ordered list of possible app objects in a given registry,
+        supposing they support the 'visible' and 'order' properties (as most
+        visualizable objects)
+        """
+        return [x for x in sorted(self.possible_objects(registry, *args, **kwargs),
+                                  key=lambda x: x.propval('order'))
+                if x.propval('visible')]    
+        
+    def possible_actions(self, req, rset, **kwargs):
+        if rset is None:
+            actions = self.possible_vobjects('actions', req, rset)
+        else:
+            actions = rset.possible_actions() # cached implementation
+        result = {}
+        for action in actions:
+            result.setdefault(action.category, []).append(action)
+        return result
+        
+    def possible_views(self, req, rset, **kwargs):
+        """return an iterator on possible views for this result set
+
+        views returned are classes, not instances
+        """
+        for vid, views in self.registry('views').items():
+            if vid[0] == '_':
+                continue
+            try:
+                view = self.select(views, req, rset, **kwargs)
+                if view.linkable():
+                    yield view
+            except NoSelectableObject:
+                continue
+            
+    def select_box(self, oid, *args, **kwargs):
+        """return the most specific view according to the result set"""
+        try:
+            return self.select_object('boxes', oid, *args, **kwargs)
+        except NoSelectableObject:
+            return
+
+    def select_action(self, oid, *args, **kwargs):
+        """return the most specific view according to the result set"""
+        try:
+            return self.select_object('actions', oid, *args, **kwargs)
+        except NoSelectableObject:
+            return
+    
+    def select_component(self, cid, *args, **kwargs):
+        """return the most specific component according to the result set"""
+        try:
+            return self.select_object('components', cid, *args, **kwargs)
+        except (NoSelectableObject, ObjectNotFound):
+            return
+
+    def select_view(self, __vid, req, rset, **kwargs):
+        """return the most specific view according to the result set"""
+        views = self.registry_objects('views', __vid)
+        return self.select(views, req, rset, **kwargs)
+
+    
+    # properties handling #####################################################
+
+    def user_property_keys(self, withsitewide=False):
+        if withsitewide:
+            return sorted(self['propertydefs'])
+        return sorted(k for k, kd in self['propertydefs'].iteritems()
+                      if not kd['sitewide'])
+
+    def register_property(self, key, type, help, default=None, vocabulary=None,
+                          sitewide=False):
+        """register a given property"""
+        properties = self._registries['propertydefs']
+        assert type in YAMS_TO_PY
+        properties[key] = {'type': type, 'vocabulary': vocabulary, 
+                           'default': default, 'help': help,
+                           'sitewide': sitewide}
+
+    def property_info(self, key):
+        """return dictionary containing description associated to the given
+        property key (including type, defaut value, help and a site wide
+        boolean)
+        """
+        try:
+            return self._registries['propertydefs'][key]
+        except KeyError:
+            if key.startswith('system.version.'):
+                soft = key.split('.')[-1]
+                return {'type': 'String', 'sitewide': True,
+                        'default': None, 'vocabulary': None,
+                        'help': _('%s software version of the database') % soft}
+            raise UnknownProperty('unregistered property %r' % key)
+            
+    def property_value(self, key):
+        try:
+            return self._registries['propertyvalues'][key]
+        except KeyError:
+            return self._registries['propertydefs'][key]['default']
+
+    def typed_value(self, key, value):
+        """value is an unicode string, return it correctly typed. Let potential
+        type error propagates.
+        """
+        pdef = self.property_info(key)
+        try:
+            value = YAMS_TO_PY[pdef['type']](value)
+        except (TypeError, ValueError):
+            raise ValueError(_('bad value'))
+        vocab = pdef['vocabulary']
+        if vocab is not None:
+            if callable(vocab):
+                vocab = vocab(key, None) # XXX need a req object
+            if not value in vocab:
+                raise ValueError(_('unauthorized value'))
+        return value
+    
+    def init_properties(self, propvalues):
+        """init the property values registry using the given set of couple (key, value)
+        """
+        self.initialized = True
+        values = self._registries['propertyvalues']
+        for key, val in propvalues:
+            try:
+                values[key] = self.typed_value(key, val)
+            except ValueError:
+                self.warning('%s (you should probably delete that property '
+                             'from the database)', ex)
+            except UnknownProperty, ex:
+                self.warning('%s (you should probably delete that property '
+                             'from the database)', ex)
+
+
+    def property_value_widget(self, propkey, req=None, **attrs):
+        """return widget according to key's type / vocab"""
+        from cubicweb.web.widgets import StaticComboBoxWidget, widget_factory
+        if req is None:
+            tr = unicode
+        else:
+            tr = req._
+        try:
+            pdef = self.property_info(propkey)
+        except UnknownProperty, ex:
+            self.warning('%s (you should probably delete that property '
+                         'from the database)', ex)
+            return widget_factory(self, 'EProperty', self.schema['value'], 'String',
+                                  description=u'', **attrs)
+        req.form['value'] = pdef['default'] # XXX hack to pass the default value
+        vocab = pdef['vocabulary']
+        if vocab is not None:
+            if callable(vocab):
+                # list() just in case its a generator function
+                vocabfunc = lambda e: list(vocab(propkey, req))
+            else:
+                vocabfunc = lambda e: vocab
+            w = StaticComboBoxWidget(self, 'EProperty', self.schema['value'], 'String',
+                                     vocabfunc=vocabfunc, description=tr(pdef['help']),
+                                     **attrs)
+        else:
+            w = widget_factory(self, 'EProperty', self.schema['value'], pdef['type'],
+                               description=tr(pdef['help']), **attrs)
+        return w
+
+    def parse(self, session, rql, args=None):
+        rqlst = self.rqlhelper.parse(rql)
+        def type_from_eid(eid, session=session):
+            return session.describe(eid)[0]
+        self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
+        return rqlst
+
+    @property
+    @cached
+    def rqlhelper(self):
+        return RQLHelper(self.schema,
+                         special_relations={'eid': 'uid', 'has_text': 'fti'})
+
+class MulCnxCubicWebRegistry(CubicWebRegistry):
+    """special registry to be used when an application has to deal with
+    connections to differents repository. This class add some additional wrapper
+    trying to hide buggy class attributes since classes are not designed to be
+    shared.
+    """
+    def etype_class(self, etype):
+        """return an entity class for the given entity type.
+        Try to find out a specific class for this kind of entity or
+        default to a dump of the class registered for 'Any'
+        """
+        usercls = super(MulCnxCubicWebRegistry, self).etype_class(etype)
+        usercls.e_schema = self.schema.eschema(etype)
+        return usercls
+
+    def select(self, vobjects, *args, **kwargs):
+        """return an instance of the most specific object according
+        to parameters
+
+        raise NoSelectableObject if not object apply
+        """
+        for vobject in vobjects:
+            vobject.vreg = self
+            vobject.schema = self.schema
+            vobject.config = self.config
+        return super(MulCnxCubicWebRegistry, self).select(vobjects, *args, **kwargs)
+    
+from mx.DateTime import DateTime, Time, DateTimeDelta
+
+YAMS_TO_PY = {
+    'Boolean':  bool,
+    'String' :  unicode,
+    'Password': str,
+    'Bytes':    Binary,
+    'Int':      int,
+    'Float':    float,
+    'Date':     DateTime,
+    'Datetime': DateTime,
+    'Time':     Time,
+    'Interval': DateTimeDelta,
+    }
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dbapi.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,711 @@
+"""DB-API 2.0 compliant module
+
+Take a look at http://www.python.org/peps/pep-0249.html
+
+(most parts of this document are reported here in docstrings)
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logging import getLogger, StreamHandler
+from time import time, clock
+
+from cubicweb import ConnectionError, RequestSessionMixIn, set_log_methods
+from cubicweb.cwvreg import CubicWebRegistry, MulCnxCubicWebRegistry
+from cubicweb.cwconfig import CubicWebNoAppConfiguration
+        
+_MARKER = object()
+
+class ConnectionProperties(object):
+    def __init__(self, cnxtype=None, lang=None, close=True, log=False):
+        self.cnxtype = cnxtype or 'pyro'
+        self.lang = lang
+        self.log_queries = log
+        self.close_on_del = close
+
+
+def get_repository(method, database=None, config=None, vreg=None):
+    """get a proxy object to the CubicWeb repository, using a specific RPC method.
+     
+    Only 'in-memory' and 'pyro' are supported for now. Either vreg or config
+    argument should be given
+    """
+    assert method in ('pyro', 'inmemory')
+    assert vreg or config
+    if vreg and not config:
+        config = vreg.config
+    if method == 'inmemory':
+        # get local access to the repository
+        from cubicweb.server.repository import Repository
+        return Repository(config, vreg=vreg)
+    else: # method == 'pyro'
+        from Pyro import core, naming, config as pyroconfig
+        from Pyro.errors import NamingError, ProtocolError
+        core.initClient(banner=0)
+        pyroconfig.PYRO_NS_DEFAULTGROUP = ':' + config['pyro-ns-group']
+        locator = naming.NameServerLocator()
+        # resolve the Pyro object
+        try:
+            nshost, nsport = config['pyro-ns-host'], config['pyro-ns-port']
+            uri = locator.getNS(nshost, nsport).resolve(database)
+        except ProtocolError:
+            raise ConnectionError('Could not connect to the Pyro name server '
+                                  '(host: %s:%i)' % (nshost, nsport))
+        except NamingError:
+            raise ConnectionError('Could not get repository for %s '
+                                  '(not registered in Pyro),'
+                                  'you may have to restart your server-side '
+                                  'application' % database)
+        return core.getProxyForURI(uri)
+        
+def repo_connect(repo, user, password, cnxprops=None):
+    """Constructor to create a new connection to the CubicWeb repository.
+    
+    Returns a Connection instance.
+    """
+    cnxprops = cnxprops or ConnectionProperties('inmemory')
+    cnxid = repo.connect(unicode(user), password, cnxprops=cnxprops)
+    cnx = Connection(repo, cnxid, cnxprops)
+    if cnxprops.cnxtype == 'inmemory':
+        cnx.vreg = repo.vreg
+    return cnx
+    
+def connect(database=None, user=None, password=None, host=None,
+            group=None, cnxprops=None, port=None, setvreg=True, mulcnx=True):
+    """Constructor for creating a connection to the CubicWeb repository.
+    Returns a Connection object.
+
+    When method is 'pyro' and setvreg is True, use a special registry class
+    (MulCnxCubicWebRegistry) made to deal with connections to differents instances
+    in the same process unless specified otherwise by setting the mulcnx to
+    False.
+    """
+    config = CubicWebNoAppConfiguration()
+    if host:
+        config.global_set_option('pyro-ns-host', host)
+    if port:
+        config.global_set_option('pyro-ns-port', port)
+    if group:
+        config.global_set_option('pyro-ns-group', group)
+    cnxprops = cnxprops or ConnectionProperties()
+    method = cnxprops.cnxtype
+    repo = get_repository(method, database, config=config)
+    if method == 'inmemory':
+        vreg = repo.vreg
+    elif setvreg:
+        if mulcnx:
+            vreg = MulCnxCubicWebRegistry(config)
+        else:
+            vreg = CubicWebRegistry(config)
+        vreg.set_schema(repo.get_schema())
+    else:
+        vreg = None
+    cnx = repo_connect(repo, user, password, cnxprops)
+    cnx.vreg = vreg
+    return cnx
+
+def in_memory_cnx(config, user, password):
+    """usefull method for testing and scripting to get a dbapi.Connection
+    object connected to an in-memory repository instance 
+    """
+    if isinstance(config, CubicWebRegistry):
+        vreg = config
+        config = None
+    else:
+        vreg = None
+    # get local access to the repository
+    repo = get_repository('inmemory', config=config, vreg=vreg)
+    # connection to the CubicWeb repository
+    cnxprops = ConnectionProperties('inmemory')
+    cnx = repo_connect(repo, user, password, cnxprops=cnxprops)
+    return repo, cnx
+
+
+class DBAPIRequest(RequestSessionMixIn):
+    
+    def __init__(self, vreg, cnx=None):
+        super(DBAPIRequest, self).__init__(vreg)
+        try:
+            # no vreg or config which doesn't handle translations
+            self.translations = vreg.config.translations
+        except AttributeError:
+            self.translations = {}
+        self.set_default_language(vreg)
+        # cache entities built during the request
+        self._eid_cache = {}
+        # these args are initialized after a connection is
+        # established
+        self.cnx = None   # connection associated to the request
+        self._user = None # request's user, set at authentication
+        if cnx is not None:
+            self.set_connection(cnx)
+
+    def base_url(self):
+        return self.vreg.config['base-url']
+    
+    def from_controller(self):
+        return 'view'
+    
+    def set_connection(self, cnx, user=None):
+        """method called by the session handler when the user is authenticated
+        or an anonymous connection is open
+        """
+        self.cnx = cnx
+        self.cursor = cnx.cursor(self)
+        self.set_user(user)
+    
+    def set_default_language(self, vreg):
+        try:
+            self.lang = vreg.property_value('ui.language')
+        except: # property may not be registered
+            self.lang = 'en'
+        # use req.__ to translate a message without registering it to the catalog
+        try:
+            self._ = self.__ = self.translations[self.lang]
+        except KeyError:
+            # this occurs usually during test execution
+            self._ = self.__ = unicode
+        self.debug('request language: %s', self.lang)
+
+    def decorate_rset(self, rset):
+        rset.vreg = self.vreg
+        rset.req = self
+        return rset
+    
+    def describe(self, eid):
+        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+        return self.cnx.describe(eid)
+    
+    def source_defs(self):
+        """return the definition of sources used by the repository."""
+        return self.cnx.source_defs()
+            
+    # entities cache management ###############################################
+    
+    def entity_cache(self, eid):
+        return self._eid_cache[eid]
+    
+    def set_entity_cache(self, entity):
+        self._eid_cache[entity.eid] = entity
+
+    def cached_entities(self):
+        return self._eid_cache.values()
+    
+    def drop_entity_cache(self, eid=None):
+        if eid is None:
+            self._eid_cache = {}
+        else:
+            del self._eid_cache[eid]
+
+    # low level session data management #######################################
+
+    def session_data(self):
+        """return a dictionnary containing session data"""
+        return self.cnx.session_data()
+
+    def get_session_data(self, key, default=None, pop=False):
+        """return value associated to `key` in session data"""
+        return self.cnx.get_session_data(key, default, pop)
+        
+    def set_session_data(self, key, value):
+        """set value associated to `key` in session data"""
+        return self.cnx.set_session_data(key, value)
+        
+    def del_session_data(self, key):
+        """remove value associated to `key` in session data"""
+        return self.cnx.del_session_data(key)
+
+    def get_shared_data(self, key, default=None, pop=False):
+        """return value associated to `key` in shared data"""
+        return self.cnx.get_shared_data(key, default, pop)
+        
+    def set_shared_data(self, key, value, querydata=False):
+        """set value associated to `key` in shared data
+
+        if `querydata` is true, the value will be added to the repository
+        session's query data which are cleared on commit/rollback of the current
+        transaction, and won't be available through the connexion, only on the
+        repository side.
+        """
+        return self.cnx.set_shared_data(key, value, querydata)
+
+    # server session compat layer #############################################
+
+    @property
+    def user(self):
+        if self._user is None and self.cnx:
+            self.set_user(self.cnx.user(self))
+        return self._user
+
+    def set_user(self, user):
+        self._user = user
+        if user:
+            self.set_entity_cache(user)
+        
+    def execute(self, *args, **kwargs):
+        """Session interface compatibility"""
+        return self.cursor.execute(*args, **kwargs)
+
+set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
+        
+        
+# exceptions ##################################################################
+
+class ProgrammingError(Exception): #DatabaseError):
+    """Exception raised for errors that are related to the database's operation
+    and not necessarily under the control of the programmer, e.g. an unexpected
+    disconnect occurs, the data source name is not found, a transaction could
+    not be processed, a memory allocation error occurred during processing,
+    etc.
+    """
+
+# module level objects ########################################################
+
+
+apilevel = '2.0'
+
+"""Integer constant stating the level of thread safety the interface supports.
+Possible values are:
+
+                0     Threads may not share the module.
+                1     Threads may share the module, but not connections.
+                2     Threads may share the module and connections.
+                3     Threads may share the module, connections and
+                      cursors.
+
+Sharing in the above context means that two threads may use a resource without
+wrapping it using a mutex semaphore to implement resource locking. Note that
+you cannot always make external resources thread safe by managing access using
+a mutex: the resource may rely on global variables or other external sources
+that are beyond your control.
+"""
+threadsafety = 1
+
+"""String constant stating the type of parameter marker formatting expected by
+the interface. Possible values are :
+
+                'qmark'         Question mark style, 
+                                e.g. '...WHERE name=?'
+                'numeric'       Numeric, positional style, 
+                                e.g. '...WHERE name=:1'
+                'named'         Named style, 
+                                e.g. '...WHERE name=:name'
+                'format'        ANSI C printf format codes, 
+                                e.g. '...WHERE name=%s'
+                'pyformat'      Python extended format codes, 
+                                e.g. '...WHERE name=%(name)s'
+"""
+paramstyle = 'pyformat'
+
+
+# connection object ###########################################################
+
+class Connection(object):
+    """DB-API 2.0 compatible Connection object for CubicWebt
+    """
+    # make exceptions available through the connection object
+    ProgrammingError = ProgrammingError
+
+    def __init__(self, repo, cnxid, cnxprops=None):
+        self._repo = repo
+        self.sessionid = cnxid
+        self._close_on_del = getattr(cnxprops, 'close_on_del', True)
+        self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro')
+        self._closed = None
+        if cnxprops and cnxprops.log_queries:
+            self.executed_queries = []
+            self.cursor_class = LogCursor
+        else:
+            self.cursor_class = Cursor
+        self.anonymous_connection = False
+        self.vreg = None
+        # session's data
+        self.data = {}
+
+    def __repr__(self):
+        if self.anonymous_connection:
+            return '<Connection %s (anonymous)>' % self.sessionid
+        return '<Connection %s>' % self.sessionid
+
+    def request(self):
+        return DBAPIRequest(self.vreg, self)
+    
+    def session_data(self):
+        """return a dictionnary containing session data"""
+        return self.data
+        
+    def get_session_data(self, key, default=None, pop=False):
+        """return value associated to `key` in session data"""
+        if pop:
+            return self.data.pop(key, default)
+        else:
+            return self.data.get(key, default)
+        
+    def set_session_data(self, key, value):
+        """set value associated to `key` in session data"""
+        self.data[key] = value
+        
+    def del_session_data(self, key):
+        """remove value associated to `key` in session data"""
+        try:
+            del self.data[key]
+        except KeyError:
+            pass    
+
+    def check(self):
+        """raise `BadSessionId` if the connection is no more valid"""
+        try:
+            self._repo.check_session(self.sessionid)
+        except AttributeError:
+            # XXX backward compat for repository running cubicweb < 2.48.3
+            self._repo.session_data(self.sessionid)
+
+    def get_shared_data(self, key, default=None, pop=False):
+        """return value associated to `key` in shared data"""
+        return self._repo.get_shared_data(self.sessionid, key, default, pop)
+        
+    def set_shared_data(self, key, value, querydata=False):
+        """set value associated to `key` in shared data
+
+        if `querydata` is true, the value will be added to the repository
+        session's query data which are cleared on commit/rollback of the current
+        transaction, and won't be available through the connexion, only on the
+        repository side.
+        """
+        return self._repo.set_shared_data(self.sessionid, key, value, querydata)
+        
+    def get_schema(self):
+        """Return the schema currently used by the repository.
+        
+        This is NOT part of the DB-API.
+        """
+        if self._closed is not None:
+            raise ProgrammingError('Closed connection')
+        return self._repo.get_schema()
+
+    def load_vobjects(self, cubes=_MARKER, subpath=None, expand=True, force_reload=None):
+        config = self.vreg.config
+        if cubes is _MARKER:
+            cubes = self._repo.get_cubes()
+        elif cubes is None:
+            cubes = ()
+        else:
+            if not isinstance(cubes, (list, tuple)):
+                cubes = (cubes,)
+            if expand:
+                cubes = config.expand_cubes(cubes)
+        if subpath is None:
+            subpath = esubpath = ('entities', 'views')
+        else:
+            esubpath = subpath
+        if 'views' in subpath:
+            esubpath = list(subpath)
+            esubpath.remove('views')
+            esubpath.append('web/views')
+        cubes = reversed([config.cube_dir(p) for p in cubes])
+        vpath = config.build_vregistry_path(cubes, evobjpath=esubpath,
+                                            tvobjpath=subpath)
+        self.vreg.register_objects(vpath, force_reload)
+        if self._cnxtype == 'inmemory':
+            # should reinit hooks manager as well
+            hm, config = self._repo.hm, self._repo.config
+            hm.set_schema(hm.schema) # reset structure
+            hm.register_system_hooks(config)
+            # application specific hooks
+            if self._repo.config.application_hooks:
+                hm.register_hooks(config.load_hooks(self.vreg))
+            
+    def source_defs(self):
+        """Return the definition of sources used by the repository.
+        
+        This is NOT part of the DB-API.
+        """
+        if self._closed is not None:
+            raise ProgrammingError('Closed connection')
+        return self._repo.source_defs()
+
+    def user(self, req, props=None):
+        """return the User object associated to this connection"""
+        # cnx validity is checked by the call to .user_info
+        eid, login, groups, properties = self._repo.user_info(self.sessionid, props)
+        if req is None:
+            req = self.request()
+        rset = req.eid_rset(eid, 'EUser')
+        user = self.vreg.etype_class('EUser')(req, rset, row=0, groups=groups,
+                                              properties=properties)
+        user['login'] = login # cache login
+        return user
+
+    def __del__(self):
+        """close the remote connection if necessary"""
+        if self._closed is None and self._close_on_del:
+            try:
+                self.close()
+            except:
+                pass
+    
+    def describe(self, eid):
+        return self._repo.describe(self.sessionid, eid)
+            
+    def close(self):
+        """Close the connection now (rather than whenever __del__ is called).
+        
+        The connection will be unusable from this point forward; an Error (or
+        subclass) exception will be raised if any operation is attempted with
+        the connection. The same applies to all cursor objects trying to use the
+        connection.  Note that closing a connection without committing the
+        changes first will cause an implicit rollback to be performed.
+        """
+        if self._closed:
+            raise ProgrammingError('Connection is already closed')
+        self._repo.close(self.sessionid)
+        self._closed = 1
+
+    def commit(self):
+        """Commit any pending transaction to the database. Note that if the
+        database supports an auto-commit feature, this must be initially off. An
+        interface method may be provided to turn it back on.
+            
+        Database modules that do not support transactions should implement this
+        method with void functionality.
+        """
+        if not self._closed is None:
+            raise ProgrammingError('Connection is already closed')
+        self._repo.commit(self.sessionid)
+
+    def rollback(self):
+        """This method is optional since not all databases provide transaction
+        support.
+            
+        In case a database does provide transactions this method causes the the
+        database to roll back to the start of any pending transaction.  Closing
+        a connection without committing the changes first will cause an implicit
+        rollback to be performed.
+        """
+        if not self._closed is None:
+            raise ProgrammingError('Connection is already closed')
+        self._repo.rollback(self.sessionid)
+
+    def cursor(self, req=None):
+        """Return a new Cursor Object using the connection.  If the database
+        does not provide a direct cursor concept, the module will have to
+        emulate cursors using other means to the extent needed by this
+        specification.
+        """
+        if self._closed is not None:
+            raise ProgrammingError('Can\'t get cursor on closed connection')
+        if req is None:
+            req = self.request()
+        return self.cursor_class(self, self._repo, req=req)
+
+
+# cursor object ###############################################################
+
+class Cursor(object):
+    """These objects represent a database cursor, which is used to manage the
+    context of a fetch operation. Cursors created from the same connection are
+    not isolated, i.e., any changes done to the database by a cursor are
+    immediately visible by the other cursors. Cursors created from different
+    connections can or can not be isolated, depending on how the transaction
+    support is implemented (see also the connection's rollback() and commit()
+    methods.)
+    """
+    
+    def __init__(self, connection, repo, req=None):
+        """This read-only attribute return a reference to the Connection
+        object on which the cursor was created.
+        """
+        self.connection = connection
+        """optionnal issuing request instance"""
+        self.req = req
+
+        """This read/write attribute specifies the number of rows to fetch at a
+        time with fetchmany(). It defaults to 1 meaning to fetch a single row
+        at a time.
+        
+        Implementations must observe this value with respect to the fetchmany()
+        method, but are free to interact with the database a single row at a
+        time. It may also be used in the implementation of executemany().
+        """
+        self.arraysize = 1
+
+        self._repo = repo
+        self._sessid = connection.sessionid
+        self._res = None
+        self._closed = None
+        self._index = 0
+
+        
+    def close(self):
+        """Close the cursor now (rather than whenever __del__ is called).  The
+        cursor will be unusable from this point forward; an Error (or subclass)
+        exception will be raised if any operation is attempted with the cursor.
+        """
+        self._closed = True
+
+            
+    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+        """Prepare and execute a database operation (query or command).
+        Parameters may be provided as sequence or mapping and will be bound to
+        variables in the operation.  Variables are specified in a
+        database-specific notation (see the module's paramstyle attribute for
+        details).
+        
+        A reference to the operation will be retained by the cursor.  If the
+        same operation object is passed in again, then the cursor can optimize
+        its behavior.  This is most effective for algorithms where the same
+        operation is used, but different parameters are bound to it (many
+        times).
+        
+        For maximum efficiency when reusing an operation, it is best to use the
+        setinputsizes() method to specify the parameter types and sizes ahead
+        of time.  It is legal for a parameter to not match the predefined
+        information; the implementation should compensate, possibly with a loss
+        of efficiency.
+        
+        The parameters may also be specified as list of tuples to e.g. insert
+        multiple rows in a single operation, but this kind of usage is
+        depreciated: executemany() should be used instead.
+        
+        Return values are not defined by the DB-API, but this here it returns a
+        ResultSet object.
+        """
+        self._res = res = self._repo.execute(self._sessid, operation,
+                                             parameters, eid_key, build_descr)
+        self.req.decorate_rset(res)
+        self._index = 0
+        return res
+        
+
+    def executemany(self, operation, seq_of_parameters):
+        """Prepare a database operation (query or command) and then execute it
+        against all parameter sequences or mappings found in the sequence
+        seq_of_parameters.
+        
+        Modules are free to implement this method using multiple calls to the
+        execute() method or by using array operations to have the database
+        process the sequence as a whole in one call.
+        
+        Use of this method for an operation which produces one or more result
+        sets constitutes undefined behavior, and the implementation is
+        permitted (but not required) to raise an exception when it detects that
+        a result set has been created by an invocation of the operation.
+        
+        The same comments as for execute() also apply accordingly to this
+        method.
+        
+        Return values are not defined.
+        """
+        for parameters in seq_of_parameters:
+            self.execute(operation, parameters)
+            if self._res.rows is not None:
+                self._res = None
+                raise ProgrammingError('Operation returned a result set')
+
+
+    def fetchone(self):
+        """Fetch the next row of a query result set, returning a single
+        sequence, or None when no more data is available.
+        
+        An Error (or subclass) exception is raised if the previous call to
+        execute*() did not produce any result set or no call was issued yet.
+        """
+        if self._res is None:
+            raise ProgrammingError('No result set')
+        row = self._res.rows[self._index]
+        self._index += 1
+        return row
+
+        
+    def fetchmany(self, size=None):
+        """Fetch the next set of rows of a query result, returning a sequence
+        of sequences (e.g. a list of tuples). An empty sequence is returned
+        when no more rows are available.
+        
+        The number of rows to fetch per call is specified by the parameter.  If
+        it is not given, the cursor's arraysize determines the number of rows
+        to be fetched. The method should try to fetch as many rows as indicated
+        by the size parameter. If this is not possible due to the specified
+        number of rows not being available, fewer rows may be returned.
+        
+        An Error (or subclass) exception is raised if the previous call to
+        execute*() did not produce any result set or no call was issued yet.
+        
+        Note there are performance considerations involved with the size
+        parameter.  For optimal performance, it is usually best to use the
+        arraysize attribute.  If the size parameter is used, then it is best
+        for it to retain the same value from one fetchmany() call to the next.
+        """
+        if self._res is None:
+            raise ProgrammingError('No result set')
+        if size is None:
+            size = self.arraysize
+        rows = self._res.rows[self._index:self._index + size]
+        self._index += size
+        return rows
+
+        
+    def fetchall(self):
+        """Fetch all (remaining) rows of a query result, returning them as a
+        sequence of sequences (e.g. a list of tuples).  Note that the cursor's
+        arraysize attribute can affect the performance of this operation.
+        
+        An Error (or subclass) exception is raised if the previous call to
+        execute*() did not produce any result set or no call was issued yet.
+        """
+        if self._res is None:
+            raise ProgrammingError('No result set')
+        if not self._res.rows:
+            return []
+        rows = self._res.rows[self._index:]
+        self._index = len(self._res)
+        return rows
+
+
+    def setinputsizes(self, sizes):
+        """This can be used before a call to execute*() to predefine memory
+        areas for the operation's parameters.
+        
+        sizes is specified as a sequence -- one item for each input parameter.
+        The item should be a Type Object that corresponds to the input that
+        will be used, or it should be an integer specifying the maximum length
+        of a string parameter.  If the item is None, then no predefined memory
+        area will be reserved for that column (this is useful to avoid
+        predefined areas for large inputs).
+        
+        This method would be used before the execute*() method is invoked.
+        
+        Implementations are free to have this method do nothing and users are
+        free to not use it.
+        """
+        pass
+
+        
+    def setoutputsize(self, size, column=None):
+        """Set a column buffer size for fetches of large columns (e.g. LONGs,
+        BLOBs, etc.).  The column is specified as an index into the result
+        sequence.  Not specifying the column will set the default size for all
+        large columns in the cursor.
+        
+        This method would be used before the execute*() method is invoked.
+        
+        Implementations are free to have this method do nothing and users are
+        free to not use it.
+        """    
+        pass
+
+    
+class LogCursor(Cursor):
+    """override the standard cursor to log executed queries"""
+    
+    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+        """override the standard cursor to log executed queries"""
+        tstart, cstart = time(), clock()
+        rset = Cursor.execute(self, operation, parameters, eid_key, build_descr)
+        self.connection.executed_queries.append((operation, parameters,
+                                                 time() - tstart, clock() - cstart))
+        return rset
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian.etch/control	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,146 @@
+Source: erudi
+Section: web
+Priority: optional
+Maintainer: Logilab Packaging Team <contact@logilab.fr>
+Uploaders: Sylvain Thenault <sylvain.thenault@logilab.fr> 
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4), python-central (>= 0.5)
+Standards-Version: 3.7.3
+XS-Python-Version: >= 2.4, << 2.6
+
+Package: erudi
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-server (= ${source:Version}), erudi-twisted (= ${source:Version}), erudi-client (= ${source:Version}), postgresql-8.1, postgresql-plpython-8.1, postgresql-contrib-8.1
+Description: the full Erudi knowledge management system
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package will install all the components you need to run erudi on
+ a single machine. You can also deploy erudi by running the different
+ process on different computers, in which case you need to install the
+ corresponding packages on the different hosts.
+
+
+Package: erudi-server
+Architecture: all
+Conflicts: erudi-server-common, python2.3-erudi-server
+Replaces: erudi-server-common, python2.3-erudi-server
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-common (= ${source:Version}), erudi-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2
+Recommends: pyro, postgresql-8.1, postgresql-plpython-8.1, postgresql-contrib-8.1, erudi-documentation (= ${source:Version})
+Description: the Erudi repository server
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the repository server part of the system.
+ .
+ This package provides the repository server part of the library and
+ necessary shared data files such as the schema library.
+
+
+Package: erudi-twisted
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Provides: erudi-web-frontend
+Depends: ${python:Depends}, erudi-web (= ${source:Version}), erudi-ctl (= ${source:Version}), python-twisted-web2
+Recommends: pyro, erudi-documentation (= ${source:Version})
+Description: twisted interface for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides a twisted based HTTP server embedding the 
+ adaptative web interface to the Erudi repository server.
+ .
+ This package provides only the twisted server part of the library. 
+
+
+Package: erudi-web
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-common (= ${source:Version}), python-simplejson (>= 1.3), python-docutils, python-vobject, python-elementtree
+Recommends: fckeditor
+Description: web interface library for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides an adaptative web interface to the Erudi server.
+ You'll have to install the erudi-twisted package to serve this interface.
+ .
+ This package provides the web interface part of the library and
+ necessary shared data files such as defaut views, images...
+
+
+Package: erudi-common
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Conflicts: python2.3-erudi-common
+Replaces: python2.3-erudi-common
+Depends: ${python:Depends}, erudi-core (= ${source:Version}), python-logilab-mtconverter (>= 0.4.0), python-simpletal (>= 4.0), graphviz, gettext, python-lxml
+Recommends: python-psyco
+Description: common library for repository/web framework of the Erudi knowledge management 
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides common part of the library used by both repository
+ and web application code.
+
+
+Package: erudi-ctl
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-core (= ${source:Version})
+Conflicts: erudi-dev (<< ${source:Version})
+Description: all in one control script for the Erudi system
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a control script to create, upgrade, start,
+ stop, etc... Erudi application. It also include the init.d script
+ to automatically start and stop Erudi applications on boot or shutdown.
+
+
+Package: erudi-client
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-ctl (= ${source:Version}), pyro
+Description: a RQL command line client
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a RQL command line client using pyro to connect
+ to a repository server.
+
+
+Package: erudi-core
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, python-logilab-common (>= 0.35.3), python-yams (>= 0.20.0), python-rql (>= 0.20.2)
+Description: core library for the Erudi knowledge management framework
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides core part of the library used by anyone having 
+ to do some erudi programming in Python
+
+
+Package: erudi-dev
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-server (= ${source:Version}), erudi-web (= ${source:Version}), python-pysqlite2
+Suggests: w3c-dtd-xhtml
+Description: tests suite and development tools for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides the Erudi tests suite and some development tools
+ helping  in the creation of application.
+
+
+Package: erudi-documentation
+Architecture: all
+Recommends: doc-base
+Description: documentation for the Erudi knowledge management tool
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the system's documentation.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/changelog	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+cubicweb (2.99.0-1) unstable; urgency=low
+
+  * initial public release
+
+ -- Nicolas Chauvat <nicolas.chauvat@logilab.fr>  Fri, 24 Oct 2008 23:01:21 +0200
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/compat	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+5
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/control	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,143 @@
+Source: cubicweb
+Section: web
+Priority: optional
+Maintainer: Logilab Packaging Team <contact@logilab.fr>
+Uploaders: Sylvain Thenault <sylvain.thenault@logilab.fr> 
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4), python-central (>= 0.5)
+Standards-Version: 3.7.3
+XS-Python-Version: >= 2.4, << 2.6
+
+Package: cubicweb
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version}), cubicweb-client (= ${source:Version}), postgresql-8.3, postgresql-plpython-8.3, postgresql-contrib-8.3
+Description: the full CubicWeb knowledge management system
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package will install all the components you need to run cubicweb on
+ a single machine. You can also deploy cubicweb by running the different
+ process on different computers, in which case you need to install the
+ corresponding packages on the different hosts.
+
+
+Package: cubicweb-server
+Architecture: all
+Conflicts: cubicweb-server-common, python2.3-cubicweb-server
+Replaces: cubicweb-server-common, python2.3-cubicweb-server
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2
+Recommends: pyro, postgresql-8.3, postgresql-plpython-8.3, postgresql-contrib-8.3, cubicweb-documentation (= ${source:Version})
+Description: the CubicWeb repository server
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the repository server part of the system.
+ .
+ This package provides the repository server part of the library and
+ necessary shared data files such as the schema library.
+
+
+Package: cubicweb-twisted
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Provides: cubicweb-web-frontend
+Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web2
+Recommends: pyro, cubicweb-documentation (= ${source:Version})
+Description: twisted interface for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides a twisted based HTTP server embedding the 
+ adaptative web interface to the CubicWeb repository server.
+ .
+ This package provides only the twisted server part of the library. 
+
+
+Package: cubicweb-web
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3), python-docutils, python-vobject, python-elementtree
+Recommends: fckeditor
+Description: web interface library for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides an adaptative web interface to the CubicWeb server.
+ You'll have to install the cubicweb-twisted package to serve this interface.
+ .
+ This package provides the web interface part of the library and
+ necessary shared data files such as defaut views, images...
+
+
+Package: cubicweb-common
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-core (= ${source:Version}), python-logilab-mtconverter (>= 0.4.0), python-simpletal (>= 4.0), graphviz, gettext, python-lxml
+Recommends: python-psyco
+Description: common library for repository/web framework of the CubicWeb knowledge management 
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides common part of the library used by both repository
+ and web application code.
+
+
+Package: cubicweb-ctl
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-core (= ${source:Version})
+Description: all in one control script for the CubicWeb system
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a control script to create, upgrade, start,
+ stop, etc... CubicWeb application. It also include the init.d script
+ to automatically start and stop CubicWeb applications on boot or shutdown.
+
+
+Package: cubicweb-client
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-ctl (= ${source:Version}), pyro
+Description: a RQL command line client
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a RQL command line client using pyro to connect
+ to a repository server.
+
+
+Package: cubicweb-core
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, python-logilab-common (>= 0.35.3), python-yams (>= 0.20.0), python-rql (>= 0.20.2)
+Description: core library for the CubicWeb knowledge management framework
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides core part of the library used by anyone having 
+ to do some cubicweb programming in Python
+
+
+Package: cubicweb-dev
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
+Suggests: w3c-dtd-xhtml
+Description: tests suite and development tools for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides the CubicWeb tests suite and some development tools
+ helping  in the creation of application.
+
+
+Package: cubicweb-documentation
+Architecture: all
+Recommends: doc-base
+Description: documentation for the CubicWeb knowledge management tool
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the system's documentation.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/copyright	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+This package was debianized by Logilab <contact@logilab.fr>.
+
+
+Upstream Author: 
+
+  Logilab <contact@logilab.fr>
+
+Copyright:
+
+Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU Lesser General Public License as published by the Free 
+Software Foundation; either version 2 of the License, or (at your option) any 
+later version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc.,
+51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+
+On Debian systems, the complete text of the GNU Lesser General Public License
+may be found in '/usr/share/common-licenses/LGPL'.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-client.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+usr/lib/python2.4/site-packages/cubicweb/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-common.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/lib/python2.4/site-packages/cubicweb/common
+usr/share/cubicweb/cubes/shared
+usr/share/doc/cubicweb-common
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-common.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+#! /bin/sh -e
+
+if [ "$1" = configure ]; then
+    # XXX bw compat: erudi -> cubicweb migration
+    if [ -e "/usr/share/erudi/templates/" ]; then
+      mv /usr/share/erudi/templates/* /usr/share/cubicweb/cubes/
+      echo 'moved /usr/share/erudi/templates/* to /usr/share/cubicweb/cubes/'
+    fi
+fi 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-core.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/doc/cubicweb-core
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.bash_completion	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,116 @@
+# -*- shell-script -*-
+
+_ec_commands()
+{
+    local commands
+    commands="$("$ec" listcommands 2>/dev/null)" || commands=""
+    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$commands' -- "$cur"))
+}
+
+_ec()
+{
+    local cur prev cmd cmd_index opts i
+    local ec="$1"
+
+    COMPREPLY=()
+    cur="$2"
+    prev="$3"
+
+    # searching for the command
+    # (first non-option argument that doesn't follow a global option that
+    #  receives an argument)
+    for ((i=1; $i<=$COMP_CWORD; i++)); do
+	if [[ ${COMP_WORDS[i]} != -* ]]; then
+	    cmd="${COMP_WORDS[i]}"
+	    cmd_index=$i
+	    break
+	fi
+    done
+
+    if [[ "$cur" == -* ]]; then
+        if [ -z "$cmd" ]; then
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '--help' -- "$cur"))
+	else
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || commands=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options' -- "$cur"))
+	fi
+	return
+    fi
+
+    if [ -z "$cmd" ] || [ $COMP_CWORD -eq $i ]; then
+	_ec_commands
+	return
+    fi
+
+    # try to generate completion candidates for whatever command the user typed
+    if _ec_command_specific; then
+	return
+    fi
+}
+
+_ec_command_specific()
+{
+    if [ "$(type -t "_ec_cmd_$cmd")" = function ]; then
+	"_ec_cmd_$cmd"
+	return 0
+    fi
+
+    case "$cmd" in
+	client)
+	    if [ "$prev" == "-b" ] || [ "$prev" == "--batch" ]; then
+		COMPREPLY=( $( compgen -o filenames -G "$cur*" ) )
+		return
+	    fi
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+	    instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+	;;
+	db-dump)
+	    if [ "$prev" == "-o" ] || [ "$prev" == "--output" ]; then
+		COMPREPLY=( $( compgen -o filenames -G "$cur*" ) )
+		return
+	    fi
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+	    instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+	;;
+	# commands with template as argument
+ 	i18nupdate)
+	    cubes="$("$ec" listcubes 2>/dev/null)" || cubes=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $cubes' -- "$cur"))
+ 	;;
+	# generic commands with instance as argument
+ 	start|stop|reload|restart|upgrade|start-repository|db-create|db-init|db-check|db-grant-user)
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+	    instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ 	;;
+	# generic commands without argument
+ 	list|newtemplate|i18nlibupdate|live-server)
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ 	;;
+	# generic commands without option
+ 	shell|i18ncompile|delete|status|schema-sync)
+	    instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ 	;;
+	# XXX should do better
+ 	create)
+	    options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ 	;;
+ 	db-copy,db-restore,mboximport)
+	    instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+	    COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ 	;;
+ 	*)
+ 	    return 1
+ 	;;
+    esac
+
+    return 0
+}
+
+complete -o bashdefault -o default -F _ec cubicweb-ctl 2>/dev/null \
+    || complete -o default -F _ec cubicweb-ctl
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.cubicweb.init	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+
+### BEGIN INIT INFO
+# Provides:          cubicweb
+# Required-Start:    $syslog $local_fs $network
+# Required-Stop:     $syslog $local_fs $network
+# Should-Start:      $postgresql $pyro-nsd
+# Should-Stop:       $postgresql $pyro-nsd
+# Default-Start:     2 3 4 5
+# Default-Stop:      0 1 6
+# Short-Description: Start cubicweb application at boot time
+### END INIT INFO
+
+cd /tmp
+/usr/bin/cubicweb-ctl $1 --force
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+usr/lib/python2.4/site-packages/cubicweb/
+etc/init.d
+etc/cubicweb.d
+etc/bash_completion.d
+usr/bin
+usr/share/doc/cubicweb-ctl
+var/run/cubicweb
+var/log/cubicweb
+var/lib/cubicweb/backup
+var/lib/cubicweb/instances
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.logrotate	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+/var/log/cubicweb/*.log {
+        weekly
+        missingok
+        rotate 10
+        compress
+        delaycompress
+        notifempty
+        create 640 root adm
+        sharedscripts
+        postrotate
+           if [ -x /usr/sbin/invoke-rc.d ]; then \
+              invoke-rc.d cubicweb reload > /dev/null; \
+           else \
+              /etc/init.d/cubicweb reload > /dev/null; \
+           fi; \
+        endscript
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.manpages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+man/cubicweb-ctl.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,35 @@
+#! /bin/sh -e
+
+case "$1" in
+    configure|abort-upgrade|abort-remove|abort-deconfigure)
+        update-rc.d cubicweb defaults >/dev/null
+    ;;
+    *)
+        echo "postinst called with unknown argument \`$1'" >&2
+        exit 1
+    ;;
+esac
+
+if [ "$1" = configure ]; then
+    # XXX bw compat: erudi -> cubicweb migration
+    if [ -e "/etc/erudi.d/" ]; then
+      mv /etc/erudi.d/* /etc/cubicweb.d/
+      echo 'moved /etc/erudi.d/* to /etc/cubicweb.d/'
+    fi
+    if [ -e "/var/log/erudi/" ]; then
+      mv /var/log/erudi/* /var/log/cubicweb/
+      echo 'moved /var/log/erudi/* to /var/log/cubicweb/'
+    fi
+    if [ -e "/var/lib/erudi/backup" ]; then
+      mv /var/lib/erudi/backup/* /var/lib/cubicweb/backup/
+      echo 'moved /var/lib/erudi/backup/* to /var/lib/cubicweb/backup/'
+    fi
+    if [ -e "/var/lib/erudi/instances" ]; then
+      mv /var/lib/erudi/instances/* /var/lib/cubicweb/instances/
+      echo 'moved /var/lib/erudi/instances/* to /var/lib/cubicweb/instances/'
+    fi
+fi
+  
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.postrm	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+if [ "$1" = "purge" ] ; then
+        update-rc.d cubicweb remove >/dev/null
+fi
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.prerm	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+#! /bin/sh -e
+ 
+case "$1" in
+    purge)
+        rm -rf /etc/cubicweb.d/
+    	rm -rf /var/run/cubicweb/
+	rm -rf /var/log/cubicweb/
+	rm -rf /var/lib/cubicweb/
+    ;;
+esac
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-dev.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/lib/python2.4/site-packages/cubicweb/common
+usr/lib/python2.4/site-packages/cubicweb/web
+usr/lib/python2.4/site-packages/cubicweb/server
+usr/lib/python2.4/site-packages/cubicweb/sobjects
+usr/lib/python2.4/site-packages/cubicweb/entities
+usr/share/doc/cubicweb-dev
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-doc	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+Document: cubicweb-doc
+Title: CubicWeb documentation
+Author: Logilab
+Abstract: Some base documentation for CubicWeb users and developpers
+Section: Apps/Programming
+
+Format: HTML
+Index: /usr/share/doc/cubicweb-documentation/index.html
+Files: /usr/share/doc/cubicweb-documentation/*.html
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/share/doc/cubicweb-documentation/
+usr/share/doc/cubicweb-documentation/devmanual_fr
+usr/share/doc-base/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.install	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+debian/cubicweb-doc usr/share/doc-base/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+#
+
+if [ "$1" = configure ]; then
+  if which install-docs >/dev/null 2>&1; then
+    install-docs -i /usr/share/doc-base/cubicweb-doc
+  fi
+fi
+
+
+#DEBHELPER#
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.prerm	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+#! /bin/sh -e
+#
+
+if [ "$1" = remove -o "$1" = upgrade ]; then
+  if which install-docs >/dev/null 2>&1; then
+    install-docs -r cubicweb-doc
+  fi
+fi
+
+#DEBHELPER#
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/lib/python2.4/site-packages/cubicweb/
+usr/share/cubicweb
+usr/share/doc/cubicweb-server
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+#! /bin/sh -e
+
+if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+    invoke-rc.d cubicweb-ctl restart || true
+fi
+ 
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.prerm	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+ 
+case "$1" in
+    remove)
+	if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+	    invoke-rc.d cubicweb-ctl stop || true
+	fi
+    ;;
+esac
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/lib/python2.4/site-packages
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/doc/cubicweb-twisted
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+#! /bin/sh -e
+
+if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+    invoke-rc.d cubicweb-ctl restart || true
+fi
+ 
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.prerm	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+ 
+case "$1" in
+    remove)
+	if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+	    invoke-rc.d cubicweb-ctl stop || true
+	fi
+    ;;
+esac
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-web.dirs	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+usr/lib/python2.4/site-packages/
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/cubicweb/cubes/shared
+usr/share/doc/cubicweb-web
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-web.postinst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+#! /bin/sh -e
+
+ln -sf /usr/share/fckeditor/fckeditor.js /usr/share/cubicweb/cubes/shared/data
+ 
+#DEBHELPER#
+ 
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/pycompat	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/rules	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,112 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+build: build-stamp
+build-stamp: 
+	dh_testdir
+	# XXX doesn't work if logilab-doctools, logilab-xml are not in build depends
+	# and I can't get pbuilder find them in its chroot :(
+	#cd doc && make
+	python setup.py -q build
+	touch build-stamp
+
+clean: 
+	dh_testdir
+	dh_testroot
+	rm -f build-stamp configure-stamp
+	rm -rf build
+	rm -rf debian/cubicweb-*/
+	find . -name "*.pyc" | xargs rm -f
+	dh_clean
+
+install: build
+	dh_testdir
+	dh_testroot
+	dh_clean -k
+	dh_installdirs
+	########## core package #############################################
+	# put : 
+	# * all the python library and data in cubicweb-core
+	# * scripts in cubicweb-server
+	#
+	# pick from each latter to construct each package
+	python setup.py -q install_lib --no-compile --install-dir=debian/cubicweb-core/usr/lib/python2.4/site-packages/
+	python setup.py -q install_data --install-dir=debian/cubicweb-core/usr/
+	python setup.py -q install_scripts --install-dir=debian/cubicweb-server/usr/bin/
+	########## common package #############################################
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/common/ debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/entities/ debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb
+	# data
+	mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/i18n debian/cubicweb-common/usr/share/cubicweb/cubes/shared/
+	touch debian/cubicweb-common/usr/share/cubicweb/cubes/__init__.py
+	########## server package #############################################
+	# library
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/server/ debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/sobjects/ debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb
+	# data
+	mv debian/cubicweb-core/usr/share/cubicweb/schemas/ debian/cubicweb-server/usr/share/cubicweb/
+	mv debian/cubicweb-core/usr/share/cubicweb/migration/ debian/cubicweb-server/usr/share/cubicweb/
+	########## twisted package ############################################
+	# library
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/etwist/ debian/cubicweb-twisted/usr/lib/python2.4/site-packages/cubicweb/
+	########## web package ################################################
+	# library
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/web/ debian/cubicweb-web/usr/lib/python2.4/site-packages/cubicweb/
+	# data / web documentation
+	mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/data debian/cubicweb-web/usr/share/cubicweb/cubes/shared/
+	mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/wdoc debian/cubicweb-web/usr/share/cubicweb/cubes/shared/
+	########## ctl package ################################################
+	# scripts
+	mv debian/cubicweb-server/usr/bin/cubicweb-ctl debian/cubicweb-ctl/usr/bin/
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/cwctl.py debian/cubicweb-ctl/usr/lib/python2.4/site-packages/cubicweb
+	mv debian/cubicweb-ctl.bash_completion debian/cubicweb-ctl/etc/bash_completion.d/cubicweb-ctl
+	########## client package #############################################
+	# library
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/hercule.py debian/cubicweb-client/usr/lib/python2.4/site-packages/cubicweb
+	########## dev package ################################################
+	# devtools package
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/devtools/ debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/
+	# tests directories
+	mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/
+	mv debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb/common/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/common/
+	mv debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb/server/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/server/
+	mv debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb/sobjects/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/sobjects/
+	mv debian/cubicweb-web/usr/lib/python2.4/site-packages/cubicweb/web/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/web/
+	########## documentation package ######################################
+	cp doc/*.html doc/*.txt debian/cubicweb-documentation/usr/share/doc/cubicweb-documentation/
+	cp doc/devmanual_fr/*.html doc/devmanual_fr/*.txt doc/devmanual_fr/*.png debian/cubicweb-documentation/usr/share/doc/cubicweb-documentation/devmanual_fr
+	########## core package ###############################################
+	# small cleanup
+	rm -rf debian/cubicweb-core/usr/share/cubicweb/
+	# undistributed for now
+	rm -rf debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/goa
+	rm -rf debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/wsgi
+
+# Build architecture-independent files here.
+binary-indep: build install
+	dh_testdir
+	dh_testroot -i
+	dh_install -i
+	dh_pycentral -i
+	dh_python -i
+	dh_installinit -i -n --name cubicweb -u"defaults 99"
+	dh_installlogrotate -i
+	dh_installdocs -i -A README
+	dh_installman -i
+	dh_installchangelogs -i
+	dh_link -i
+	dh_compress -i -X.py -X.ini -X.xml
+	dh_fixperms -i
+	dh_installdeb -i
+	dh_gencontrol  -i
+	dh_md5sums -i
+	dh_builddeb -i
+
+binary-arch:
+
+binary: binary-indep 
+.PHONY: build clean binary binary-indep binary-arch
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,345 @@
+"""Test tools for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+import logging
+from os.path import (abspath, join, exists, basename, dirname, normpath, split,
+                     isfile, isabs)
+
+from mx.DateTime import strptime, DateTimeDelta
+
+from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError
+from cubicweb.toolsutils import read_config
+from cubicweb.cwconfig import CubicWebConfiguration, merge_options
+from cubicweb.server.serverconfig import ServerConfiguration
+from cubicweb.etwist.twconfig import TwistedConfiguration
+
+# validators are used to validate (XML, DTD, whatever) view's content
+# validators availables are :
+#  'dtd' : validates XML + declared DTD
+#  'xml' : guarantees XML is well formed
+#  None : do not try to validate anything
+VIEW_VALIDATORS = {}
+BASE_URL = 'http://testing.fr/cubicweb/'
+DEFAULT_SOURCES = {'system': {'adapter' : 'native',
+                              'db-encoding' : 'UTF-8', #'ISO-8859-1',
+                              'db-user' : u'admin',
+                              'db-password' : 'gingkow',
+                              'db-name' : 'tmpdb',
+                              'db-driver' : 'sqlite',
+                              'db-host' : None,
+                              },
+                   'admin' : {'login': u'admin',
+                              'password': u'gingkow',
+                              },
+                   }
+
+class TestServerConfiguration(ServerConfiguration):
+    mode = 'test'
+    set_language = False
+    read_application_schema = False
+    bootstrap_schema = False
+    init_repository = True
+    options = merge_options(ServerConfiguration.options + (
+        ('anonymous-user',
+         {'type' : 'string',
+          'default': None,
+          'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('anonymous-password',
+         {'type' : 'string',
+          'default': None,
+          'help': 'password of the CubicWeb user account matching login',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ))
+                            
+    if not os.environ.get('APYCOT_ROOT'):
+        REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes'))
+    
+    def __init__(self, appid, log_threshold=logging.CRITICAL+10):
+        ServerConfiguration.__init__(self, appid)
+        self.global_set_option('log-file', None)
+        self.init_log(log_threshold, force=True)
+        # need this, usually triggered by cubicweb-ctl
+        self.load_cwctl_plugins()
+
+    anonymous_user = TwistedConfiguration.anonymous_user.im_func
+        
+    @property
+    def apphome(self):
+        if exists(self.appid):
+            return abspath(self.appid)
+        # application cube test
+        return abspath('..')
+    appdatahome = apphome
+    
+    def main_config_file(self):
+        """return application's control configuration file"""
+        return join(self.apphome, '%s.conf' % self.name)
+
+    def instance_md5_version(self):
+        return ''
+
+    def bootstrap_cubes(self):
+        try:
+            super(TestServerConfiguration, self).bootstrap_cubes()
+        except IOError:
+            # no cubes
+            self.init_cubes( () )
+
+    sourcefile = None
+    def sources_file(self):
+        """define in subclasses self.sourcefile if necessary"""
+        if self.sourcefile:
+            print 'Reading sources from', self.sourcefile
+            sourcefile = self.sourcefile
+            if not isabs(sourcefile):
+                sourcefile = join(self.apphome, sourcefile)
+        else:
+            sourcefile = super(TestServerConfiguration, self).sources_file()
+        return sourcefile
+
+    def sources(self):
+        """By default, we run tests with the sqlite DB backend.  One may use its
+        own configuration by just creating a 'sources' file in the test
+        directory from wich tests are launched or by specifying an alternative
+        sources file using self.sourcefile.
+        """
+        sources = super(TestServerConfiguration, self).sources()
+        if not sources:
+            sources = DEFAULT_SOURCES
+        return sources
+    
+    def load_defaults(self):
+        super(TestServerConfiguration, self).load_defaults()
+        # note: don't call global set option here, OptionManager may not yet be initialized
+        # add anonymous user
+        self.set_option('anonymous-user', 'anon')
+        self.set_option('anonymous-password', 'anon')
+        # uncomment the line below if you want rql queries to be logged
+        #self.set_option('query-log-file', '/tmp/test_rql_log.' + `os.getpid()`)
+        self.set_option('sender-name', 'cubicweb-test')
+        self.set_option('sender-addr', 'cubicweb-test@logilab.fr')
+        try:
+            send_to =  '%s@logilab.fr' % os.getlogin()
+        except OSError:
+            send_to =  '%s@logilab.fr' % (os.environ.get('USER')
+                                          or os.environ.get('USERNAME')
+                                          or os.environ.get('LOGNAME'))
+        self.set_option('sender-addr', send_to)
+        self.set_option('default-dest-addrs', send_to)
+        self.set_option('base-url', BASE_URL)
+
+
+class BaseApptestConfiguration(TestServerConfiguration, TwistedConfiguration):
+    repo_method = 'inmemory'
+    options = merge_options(TestServerConfiguration.options + TwistedConfiguration.options)
+    cubicweb_vobject_path = TestServerConfiguration.cubicweb_vobject_path | TwistedConfiguration.cubicweb_vobject_path
+    cube_vobject_path = TestServerConfiguration.cube_vobject_path | TwistedConfiguration.cube_vobject_path
+
+    def available_languages(self, *args):
+        return ('en', 'fr', 'de')
+    
+    def ext_resources_file(self):
+        """return application's external resources file"""
+        return join(self.apphome, 'data', 'external_resources')
+    
+    def pyro_enabled(self):
+        # but export PYRO_MULTITHREAD=0 or you get problems with sqlite and threads
+        return True
+
+
+class ApptestConfiguration(BaseApptestConfiguration):
+    
+    def __init__(self, appid, log_threshold=logging.CRITICAL, sourcefile=None):
+        BaseApptestConfiguration.__init__(self, appid, log_threshold=log_threshold)
+        self.init_repository = sourcefile is None
+        self.sourcefile = sourcefile
+        import re
+        self.global_set_option('embed-allowed', re.compile('.*'))
+        
+
+class RealDatabaseConfiguration(ApptestConfiguration):
+    init_repository = False
+    sourcesdef =  {'system': {'adapter' : 'native',
+                              'db-encoding' : 'UTF-8', #'ISO-8859-1',
+                              'db-user' : u'admin',
+                              'db-password' : 'gingkow',
+                              'db-name' : 'seotest',
+                              'db-driver' : 'postgres',
+                              'db-host' : None,
+                              },
+                   'admin' : {'login': u'admin',
+                              'password': u'gingkow',
+                              },
+                   }
+    
+    def __init__(self, appid, log_threshold=logging.CRITICAL, sourcefile=None):
+        ApptestConfiguration.__init__(self, appid)
+        self.init_repository = False
+
+
+    def sources(self):
+        """
+        By default, we run tests with the sqlite DB backend.
+        One may use its own configuration by just creating a
+        'sources' file in the test directory from wich tests are
+        launched. 
+        """
+        self._sources = self.sourcesdef
+        return self._sources
+
+
+def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None):
+    """convenience function that builds a real-db configuration class"""
+    sourcesdef =  {'system': {'adapter' : 'native',
+                              'db-encoding' : 'UTF-8', #'ISO-8859-1',
+                              'db-user' : dbuser,
+                              'db-password' : dbpassword,
+                              'db-name' : dbname,
+                              'db-driver' : 'postgres',
+                              'db-host' : dbhost,
+                              },
+                   'admin' : {'login': adminuser,
+                              'password': adminpassword,
+                              },
+                   }
+    return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+                {'sourcesdef': sourcesdef})
+
+def loadconfig(filename):
+    """convenience function that builds a real-db configuration class
+    from a file
+    """
+    return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+                {'sourcesdef': read_config(filename)})
+    
+
+class LivetestConfiguration(BaseApptestConfiguration):
+    init_repository = False
+    
+    def __init__(self, cube=None, sourcefile=None, pyro_name=None,
+                 log_threshold=logging.CRITICAL):
+        TestServerConfiguration.__init__(self, cube, log_threshold=log_threshold)
+        self.appid = pyro_name or cube
+        # don't change this, else some symlink problems may arise in some
+        # environment (e.g. mine (syt) ;o)
+        # XXX I'm afraid this test will prevent to run test from a production
+        # environment
+        self._sources = None
+        # application cube test
+        if cube is not None:
+            self.apphome = self.cube_dir(cube)
+        elif 'web' in os.getcwd().split(os.sep):
+            # web test
+            self.apphome = join(normpath(join(dirname(__file__), '..')), 'web')
+        else:
+            # application cube test
+            self.apphome = abspath('..')
+        self.sourcefile = sourcefile
+        self.global_set_option('realm', '')
+        self.use_pyro = pyro_name is not None
+
+    def pyro_enabled(self):
+        if self.use_pyro:
+            return True
+        else:
+            return False
+
+CubicWebConfiguration.cls_adjust_sys_path()
+                                                    
+def install_sqlite_path(querier):
+    """This patch hotfixes the following sqlite bug :
+     - http://www.sqlite.org/cvstrac/tktview?tn=1327,33
+    (some dates are returned as strings rather thant date objects)
+    """
+    def wrap_execute(base_execute):
+        def new_execute(*args, **kwargs):
+            rset = base_execute(*args, **kwargs)
+            if rset.description:
+                found_date = False
+                for row, rowdesc in zip(rset, rset.description):
+                    for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)):
+                        if vtype in ('Date', 'Datetime') and type(value) is unicode:
+                            found_date = True
+                            try:
+                                row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
+                            except:
+                                row[cellindex] = strptime(value, '%Y-%m-%d')
+                        if vtype == 'Time' and type(value) is unicode:
+                            found_date = True
+                            try:
+                                row[cellindex] = strptime(value, '%H:%M:%S')
+                            except:
+                                # DateTime used as Time?
+                                row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
+                        if vtype == 'Interval' and type(value) is int:
+                            found_date = True
+                            row[cellindex] = DateTimeDelta(0, 0, 0, value)
+                    if not found_date:
+                        break
+            return rset
+        return new_execute
+    querier.__class__.execute = wrap_execute(querier.__class__.execute)
+
+
+def init_test_database(driver='sqlite', configdir='data', config=None,
+                       vreg=None):
+    """init a test database for a specific driver"""
+    from cubicweb.dbapi import in_memory_cnx
+    if vreg and not config:
+        config = vreg.config
+    config = config or TestServerConfiguration(configdir)
+    source = config.sources()
+    if driver == 'sqlite':
+        init_test_database_sqlite(config, source)
+    elif driver == 'postgres':
+        init_test_database_postgres(config, source)
+    else:
+        raise ValueError('no initialization function for driver %r' % driver)
+    config._cubes = None # avoid assertion error
+    repo, cnx = in_memory_cnx(vreg or config, unicode(source['admin']['login']),
+                              source['admin']['password'] or 'xxx')
+    if driver == 'sqlite':
+        install_sqlite_path(repo.querier)
+    return repo, cnx
+
+def init_test_database_postgres(config, source, vreg=None):
+    """initialize a fresh sqlite databse used for testing purpose"""
+    if config.init_repository:
+        from cubicweb.server import init_repository
+        init_repository(config, interactive=False, drop=True, vreg=vreg)
+
+def cleanup_sqlite(dbfile, removecube=False):
+    try:
+        os.remove(dbfile)
+        os.remove('%s-journal' % dbfile)
+    except OSError:
+        pass
+    if removecube:
+        try:
+            os.remove('%s-cube' % dbfile)
+        except OSError:
+            pass
+    
+def init_test_database_sqlite(config, source, vreg=None):
+    """initialize a fresh sqlite databse used for testing purpose"""
+    import shutil
+    # remove database file if it exists (actually I know driver == 'sqlite' :)
+    dbfile = source['system']['db-name']
+    cleanup_sqlite(dbfile)
+    cube = '%s-cube' % dbfile
+    if exists(cube):
+        shutil.copy(cube, dbfile)
+    else:
+        # initialize the database
+        from cubicweb.server import init_repository
+        init_repository(config, interactive=False, vreg=vreg)
+        shutil.copy(dbfile, cube)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/_apptest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,257 @@
+"""Hidden internals for the devtools.apptest module
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys, traceback
+
+from logilab.common.pytest import pause_tracing, resume_tracing
+
+import yams.schema
+
+from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+from cubicweb.cwvreg import CubicWebRegistry
+
+from cubicweb.web.application import CubicWebPublisher
+from cubicweb.web import Redirect
+
+from cubicweb.devtools import ApptestConfiguration, init_test_database
+from cubicweb.devtools.fake import FakeRequest
+    
+SYSTEM_ENTITIES = ('EGroup', 'EUser',
+                   'EFRDef', 'ENFRDef',
+                   'EConstraint', 'EConstraintType', 'EProperty',
+                   'EEType', 'ERType',
+                   'State', 'Transition', 'TrInfo',
+                   'RQLExpression',
+                   )
+SYSTEM_RELATIONS = (
+    # virtual relation
+    'identity',
+    # metadata
+    'is', 'is_instance_of', 'owned_by', 'created_by', 'specializes',
+    # workflow related
+    'state_of', 'transition_of', 'initial_state', 'allowed_transition',
+    'destination_state', 'in_state', 'wf_info_for', 'from_state', 'to_state', 
+    'condition',
+    # permission
+    'in_group', 'require_group', 'require_permission',
+    'read_permission', 'update_permission', 'delete_permission', 'add_permission',
+    # eproperty
+    'for_user',
+    # schema definition
+    'relation_type', 'from_entity', 'to_entity',
+    'constrained_by', 'cstrtype', 'widget',
+    # deducted from other relations
+    'primary_email', 
+                    )
+
+def unprotected_entities(app_schema, strict=False):
+    """returned a Set of each non final entity type, excluding EGroup, and EUser...
+    """
+    if strict:
+        protected_entities = yams.schema.BASE_TYPES
+    else:
+        protected_entities = yams.schema.BASE_TYPES.union(set(SYSTEM_ENTITIES))
+    entities = set(app_schema.entities())
+    return entities - protected_entities
+    
+
+def ignore_relations(*relations):
+    SYSTEM_RELATIONS += relations
+
+class TestEnvironment(object):
+    """TestEnvironment defines a context (e.g. a config + a given connection) in
+    which the tests are executed
+    """
+    
+    def __init__(self, appid, reporter=None, verbose=False,
+                 configcls=ApptestConfiguration, requestcls=FakeRequest):
+        config = configcls(appid)
+        self.requestcls = requestcls
+        self.cnx = None
+        config.db_perms = False
+        source = config.sources()['system']
+        if verbose:
+            print "init test database ..."
+        self.vreg = vreg = CubicWebRegistry(config)
+        self.admlogin = source['db-user']
+        # restore database <=> init database
+        self.restore_database()
+        if verbose:
+            print "init done"
+        login = source['db-user']
+        config.repository = lambda x=None: self.repo
+        self.app = CubicWebPublisher(config, vreg=vreg)
+        self.verbose = verbose
+        schema = self.vreg.schema
+        # else we may run into problems since email address are ususally share in app tests
+        # XXX should not be necessary anymore
+        schema.rschema('primary_email').set_rproperty('EUser', 'EmailAddress', 'composite', False)
+        self.deletable_entities = unprotected_entities(schema)
+
+    def restore_database(self):
+        """called by unittests' tearDown to restore the original database
+        """
+        try:
+            pause_tracing()
+            if self.cnx:
+                self.cnx.close()
+            source = self.vreg.config.sources()['system']
+            self.repo, self.cnx = init_test_database(driver=source['db-driver'],
+                                                     vreg=self.vreg)
+            self._orig_cnx = self.cnx
+            resume_tracing()
+        except:
+            resume_tracing()
+            traceback.print_exc()
+            sys.exit(1)
+        # XXX cnx decoration is usually done by the repository authentication manager,
+        # necessary in authentication tests
+        self.cnx.vreg = self.vreg
+        self.cnx.login = source['db-user']
+        self.cnx.password = source['db-password']
+        
+
+    def create_user(self, login, groups=('users',), req=None):
+        req = req or self.create_request()
+        cursor = self._orig_cnx.cursor(req)
+        rset = cursor.execute('INSERT EUser X: X login %(login)s, X upassword %(passwd)s,'
+                              'X in_state S WHERE S name "activated"',
+                              {'login': unicode(login), 'passwd': login.encode('utf8')})
+        user = rset.get_entity(0, 0)
+        cursor.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
+                       % ','.join(repr(g) for g in groups),
+                       {'x': user.eid}, 'x')
+        user.clear_related_cache('in_group', 'subject')
+        self._orig_cnx.commit()
+        return user
+
+    def login(self, login):
+        if login == self.admlogin:
+            self.restore_connection()
+        else:
+            self.cnx = repo_connect(self.repo, unicode(login), str(login),
+                                    ConnectionProperties('inmemory'))
+        if login == self.vreg.config.anonymous_user()[0]:
+            self.cnx.anonymous_connection = True
+        return self.cnx
+    
+    def restore_connection(self):
+        if not self.cnx is self._orig_cnx:
+            try:
+                self.cnx.close()
+            except ProgrammingError:
+                pass # already closed
+        self.cnx = self._orig_cnx
+
+    ############################################################################
+
+    def execute(self, rql, args=None, eidkey=None, req=None):
+        """executes <rql>, builds a resultset, and returns a couple (rset, req)
+        where req is a FakeRequest
+        """
+        req = req or self.create_request(rql=rql)
+        return self.cnx.cursor(req).execute(unicode(rql), args, eidkey)
+    
+    def create_request(self, rql=None, **kwargs):
+        """executes <rql>, builds a resultset, and returns a
+        couple (rset, req) where req is a FakeRequest
+        """
+        if rql:
+            kwargs['rql'] = rql
+        req = self.requestcls(self.vreg, form=kwargs)
+        req.set_connection(self.cnx)
+        return req
+        
+    def get_rset_and_req(self, rql, optional_args=None, args=None, eidkey=None):
+        """executes <rql>, builds a resultset, and returns a
+        couple (rset, req) where req is a FakeRequest
+        """
+        return (self.execute(rql, args, eidkey),
+                self.create_request(rql=rql, **optional_args or {}))
+    
+    def check_view(self, rql, vid, optional_args, template='main'):
+        """checks if vreg.view() raises an exception in this environment
+
+        If any exception is raised in this method, it will be considered
+        as a TestFailure
+        """
+        return self.call_view(vid, rql,
+                              template=template, optional_args=optional_args)
+    
+    def call_view(self, vid, rql, template='main', optional_args=None):
+        """shortcut for self.vreg.view()"""
+        assert template
+        if optional_args is None:
+            optional_args = {}
+        optional_args['vid'] = vid
+        req = self.create_request(rql=rql, **optional_args)
+        return self.vreg.main_template(req, template)
+
+    def call_edit(self, req):
+        """shortcut for self.app.edit()"""
+        controller = self.app.select_controller('edit', req)
+        try:
+            controller.publish()
+        except Redirect:
+            result = 'success'
+        else:
+            raise Exception('edit should raise Redirect on success')
+        req.cnx.commit()
+        return result
+
+    def iter_possible_views(self, req, rset):
+        """returns a list of possible vids for <rql>"""
+        for view in self.vreg.possible_views(req, rset):
+            if view.category == 'startupview':
+                continue
+            yield view.id
+        if rset.rowcount == 1:
+            yield 'edition'
+
+    def iter_startup_views(self, req):
+        """returns the list of startup views"""
+        for view in self.vreg.possible_views(req, None):
+            if view.category != 'startupview':
+                continue
+            yield view.id
+
+    def iter_possible_actions(self, req, rset):
+        """returns a list of possible vids for <rql>"""
+        for action in self.vreg.possible_vobjects('actions', req, rset):
+            yield action
+
+class ExistingTestEnvironment(TestEnvironment):
+    
+    def __init__(self, appid, sourcefile, verbose=False):
+        config = ApptestConfiguration(appid, sourcefile=sourcefile)
+        if verbose:
+            print "init test database ..."
+        source = config.sources()['system']
+        self.vreg = CubicWebRegistry(config)
+        repo, self.cnx = init_test_database(driver=source['db-driver'],
+                                            vreg=self.vreg)
+        if verbose:
+            print "init done" 
+        self.app = CubicWebPublisher(config, vreg=self.vreg)
+        self.verbose = verbose
+        # this is done when the publisher is opening a connection
+        self.cnx.vreg = self.vreg
+        login = source['db-user']
+        
+    def setup(self, config=None):
+        """config is passed by TestSuite but is ignored in this environment"""
+        cursor = self.cnx.cursor()
+        self.last_eid = cursor.execute('Any X WHERE X creation_date D ORDERBY D DESC LIMIT 1').rows[0][0]
+
+    def cleanup(self):
+        """cancel inserted elements during tests"""
+        cursor = self.cnx.cursor()
+        cursor.execute('DELETE Any X WHERE X eid > %(x)s', {'x' : self.last_eid}, eid_key='x')
+        print "cleaning done"
+        self.cnx.commit()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/apptest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,504 @@
+"""This module provides misc utilities to test applications
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from copy import deepcopy
+
+import simplejson
+
+from logilab.common.testlib import TestCase
+from logilab.common.pytest import nocoverage
+from logilab.common.umessage import message_from_string
+
+from cubicweb.devtools import init_test_database, TestServerConfiguration, ApptestConfiguration
+from cubicweb.devtools._apptest import TestEnvironment
+from cubicweb.devtools.fake import FakeRequest
+
+from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+
+
+MAILBOX = []
+class Email:
+    def __init__(self, recipients, msg):
+        self.recipients = recipients
+        self.msg = msg
+
+    @property
+    def message(self):
+        return message_from_string(self.msg)
+    
+    def __repr__(self):
+        return '<Email to %s with subject %s>' % (','.join(self.recipients),
+                                                  self.message.get('Subject'))
+    
+class MockSMTP:
+    def __init__(self, server, port):
+        pass
+    def close(self):
+        pass
+    def sendmail(self, helo_addr, recipients, msg):
+        MAILBOX.append(Email(recipients, msg))
+
+from cubicweb.server import hookhelper
+hookhelper.SMTP = MockSMTP
+
+
+def get_versions(self, checkversions=False):
+    """return the a dictionary containing cubes used by this application
+    as key with their version as value, including cubicweb version. This is a
+    public method, not requiring a session id.
+
+    replace Repository.get_versions by this method if you don't want versions
+    checking
+    """
+    vcconf = {'cubicweb': self.config.cubicweb_version()}
+    self.config.bootstrap_cubes()
+    for pk in self.config.cubes():
+        version = self.config.template_version(pk)
+        vcconf[pk] = version
+    self.config._cubes = None
+    return vcconf
+
+
+@property
+def late_binding_env(self):
+    """builds TestEnvironment as late as possible"""
+    if not hasattr(self, '_env'):
+        self.__class__._env = TestEnvironment('data', configcls=self.configcls,
+                                              requestcls=self.requestcls)
+    return self._env
+
+
+class autoenv(type):
+    """automatically set environment on EnvBasedTC subclasses if necessary
+    """
+    def __new__(mcs, name, bases, classdict):
+        env = classdict.get('env')
+        # try to find env in one of the base classes
+        if env is None:
+            for base in bases:
+                env = getattr(base, 'env', None)
+                if env is not None:
+                    classdict['env'] = env
+                    break
+        if not classdict.get('__abstract__')  and not classdict.get('env'):
+            classdict['env'] = late_binding_env
+        return super(autoenv, mcs).__new__(mcs, name, bases, classdict)
+
+
+class EnvBasedTC(TestCase):
+    """abstract class for test using an apptest environment
+    """
+    __metaclass__ = autoenv
+    __abstract__ = True
+    env = None
+    configcls = ApptestConfiguration
+    requestcls = FakeRequest
+    
+    # user / session management ###############################################
+
+    def user(self, req=None):
+        if req is None:
+            req = self.env.create_request()
+            return self.env.cnx.user(req)
+        else:
+            return req.user
+
+    def create_user(self, *args, **kwargs):
+        return self.env.create_user(*args, **kwargs)
+
+    def login(self, login):
+        return self.env.login(login)
+
+    def restore_connection(self):
+        self.env.restore_connection()
+        
+    # db api ##################################################################
+
+    @nocoverage
+    def cursor(self, req=None):
+        return self.env.cnx.cursor(req or self.request())
+    
+    @nocoverage
+    def execute(self, *args, **kwargs):
+        return self.env.execute(*args, **kwargs)
+
+    @nocoverage
+    def commit(self):
+        self.env.cnx.commit()
+    
+    @nocoverage
+    def rollback(self):
+        try:
+            self.env.cnx.rollback()
+        except ProgrammingError:
+            pass
+        
+    # other utilities #########################################################
+    def set_debug(self, debugmode):
+        from cubicweb.server import set_debug
+        set_debug(debugmode)
+    
+    @property
+    def config(self):
+        return self.vreg.config
+
+    def session(self):
+        """return current server side session (using default manager account)"""
+        return self.env.repo._sessions[self.env.cnx.sessionid]
+    
+    def request(self, *args, **kwargs):
+        """return a web interface request"""
+        return self.env.create_request(*args, **kwargs)
+
+    @nocoverage
+    def rset_and_req(self, *args, **kwargs):
+        return self.env.get_rset_and_req(*args, **kwargs)
+    
+    def entity(self, rql, args=None, eidkey=None, req=None):
+        return self.execute(rql, args, eidkey, req=req).get_entity(0, 0)
+    
+    def etype_instance(self, etype, req=None):
+        req = req or self.request()
+        e = self.env.vreg.etype_class(etype)(req, None, None)
+        e.eid = None
+        return e
+    
+    def add_entity(self, etype, **kwargs):
+        rql = ['INSERT %s X' % etype]
+
+        # dict for replacement in RQL Request
+        rql_args = {}
+
+        if kwargs: #
+            rql.append(':')
+            # dict to define new entities variables
+            entities = {}
+
+            # assignement part of the request
+            sub_rql = []
+            for key, value in kwargs.iteritems():
+                # entities
+                if hasattr(value, 'eid'): 
+                    new_value = "%s__" % key.upper()
+                    
+                    entities[new_value] = value.eid
+                    rql_args[new_value] = value.eid
+                    
+                    sub_rql.append("X %s %s" % (key, new_value))
+                # final attributes
+                else: 
+                    sub_rql.append('X %s %%(%s)s' % (key, key))
+                    rql_args[key] = value
+            rql.append(', '.join(sub_rql))
+
+
+            if entities:
+                rql.append('WHERE')
+                # WHERE part of the request (to link entity to they eid)
+                sub_rql = []
+                for key, value in entities.iteritems():
+                    sub_rql.append("%s eid %%(%s)s" % (key, key))
+                rql.append(', '.join(sub_rql))
+
+        rql = ' '.join(rql)
+        rset = self.execute(rql, rql_args)
+        return rset.get_entity(0, 0)
+
+    def set_option(self, optname, value):
+        self.vreg.config.global_set_option(optname, value)
+
+    def pviews(self, req, rset):
+        return sorted((a.id, a.__class__) for a in self.vreg.possible_views(req, rset)) 
+        
+    def pactions(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions')):
+        return [(a.id, a.__class__) for a in self.vreg.possible_vobjects('actions', req, rset)
+                if a.category not in skipcategories]
+    def pactionsdict(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions')):
+        res = {}
+        for a in self.vreg.possible_vobjects('actions', req, rset):
+            if a.category not in skipcategories:
+                res.setdefault(a.category, []).append(a.__class__)
+        return res
+
+    def paddrelactions(self, req, rset):
+        return [(a.id, a.__class__) for a in self.vreg.possible_vobjects('actions', req, rset)
+                if a.category == 'addrelated']
+               
+    def remote_call(self, fname, *args):
+        """remote call simulation"""
+        dump = simplejson.dumps
+        args = [dump(arg) for arg in args]
+        req = self.request(mode='remote', fname=fname, pageid='123', arg=args)
+        ctrl = self.env.app.select_controller('json', req)
+        return ctrl.publish(), req
+
+    # default test setup and teardown #########################################
+        
+    def setup_database(self):
+        pass
+
+    def setUp(self):
+        self.restore_connection()
+        session = self.session()
+        #self.maxeid = self.execute('Any MAX(X)')
+        session.set_pool()
+        self.maxeid = session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0]
+        self.app = self.env.app
+        self.vreg = self.env.app.vreg
+        self.schema = self.vreg.schema
+        self.vreg.config.mode = 'test'
+        # set default-dest-addrs to a dumb email address to avoid mailbox or
+        # mail queue pollution
+        self.set_option('default-dest-addrs', ['whatever'])
+        self.setup_database()
+        self.commit()
+        MAILBOX[:] = [] # reset mailbox
+        
+    @nocoverage
+    def tearDown(self):
+        self.rollback()
+        # self.env.restore_database()
+        self.env.restore_connection()
+        self.session().unsafe_execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
+        self.commit()
+
+
+# XXX
+try:
+    from cubicweb.web import Redirect
+    from urllib import unquote
+except ImportError:
+    pass # cubicweb-web not installed
+else:
+    class ControllerTC(EnvBasedTC):
+        def setUp(self):
+            super(ControllerTC, self).setUp()
+            self.req = self.request()
+            self.ctrl = self.env.app.select_controller('edit', self.req)
+
+        def publish(self, req):
+            assert req is self.ctrl.req
+            try:
+                result = self.ctrl.publish()
+                req.cnx.commit()
+            except Redirect:
+                req.cnx.commit()
+                raise
+            return result
+
+        def expect_redirect_publish(self, req=None):
+            if req is not None:
+                self.ctrl = self.env.app.select_controller('edit', req)
+            else:
+                req = self.req
+            try:
+                res = self.publish(req)
+            except Redirect, ex:
+                try:
+                    path, params = ex.location.split('?', 1)
+                except:
+                    path, params = ex.location, ""
+                req._url = path
+                cleanup = lambda p: (p[0], unquote(p[1]))
+                params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
+                return req.relative_path(False), params # path.rsplit('/', 1)[-1], params
+            else:
+                self.fail('expected a Redirect exception')
+
+
+def make_late_binding_repo_property(attrname):
+    @property
+    def late_binding(self):
+        """builds cnx as late as possible"""
+        if not hasattr(self, attrname):
+            # sets explicit test mode here to avoid autoreload
+            from cubicweb.cwconfig import CubicWebConfiguration
+            CubicWebConfiguration.mode = 'test'
+            cls = self.__class__
+            config = self.repo_config or TestServerConfiguration('data')
+            cls._repo, cls._cnx = init_test_database('sqlite',  config=config)
+        return getattr(self, attrname)
+    return late_binding
+
+
+class autorepo(type):
+    """automatically set repository on RepositoryBasedTC subclasses if necessary
+    """
+    def __new__(mcs, name, bases, classdict):
+        repo = classdict.get('repo')
+        # try to find repo in one of the base classes
+        if repo is None:
+            for base in bases:
+                repo = getattr(base, 'repo', None)
+                if repo is not None:
+                    classdict['repo'] = repo
+                    break
+        if name != 'RepositoryBasedTC' and not classdict.get('repo'):
+            classdict['repo'] = make_late_binding_repo_property('_repo')
+            classdict['cnx'] = make_late_binding_repo_property('_cnx')
+        return super(autorepo, mcs).__new__(mcs, name, bases, classdict)
+
+
+class RepositoryBasedTC(TestCase):
+    """abstract class for test using direct repository connections
+    """
+    __metaclass__ = autorepo
+    repo_config = None # set a particular config instance if necessary
+    
+    # user / session management ###############################################
+
+    def create_user(self, user, groups=('users',), password=None, commit=True):
+        if password is None:
+            password = user
+        eid = self.execute('INSERT EUser X: X login %(x)s, X upassword %(p)s,'
+                            'X in_state S WHERE S name "activated"',
+                            {'x': unicode(user), 'p': password})[0][0]
+        groups = ','.join(repr(group) for group in groups)
+        self.execute('SET X in_group Y WHERE X eid %%(x)s, Y name IN (%s)' % groups,
+                      {'x': eid})
+        if commit:
+            self.commit()
+        self.session.reset_pool()        
+        return eid
+    
+    def login(self, login, password=None):
+        cnx = repo_connect(self.repo, unicode(login), password or login,
+                           ConnectionProperties('inmemory'))
+        self.cnxs.append(cnx)
+        return cnx
+
+    def current_session(self):
+        return self.repo._sessions[self.cnxs[-1].sessionid]
+    
+    def restore_connection(self):
+        assert len(self.cnxs) == 1, self.cnxs
+        cnx = self.cnxs.pop()
+        try:
+            cnx.close()
+        except Exception, ex:
+            print "exception occured while closing connection", ex
+        
+    # db api ##################################################################
+
+    def execute(self, rql, args=None, eid_key=None):
+        assert self.session.id == self.cnxid
+        rset = self.__execute(self.cnxid, rql, args, eid_key)
+        rset.vreg = self.vreg
+        rset.req = self.session
+        # call to set_pool is necessary to avoid pb when using
+        # application entities for convenience
+        self.session.set_pool()
+        return rset
+    
+    def commit(self):
+        self.__commit(self.cnxid)
+        self.session.set_pool()        
+    
+    def rollback(self):
+        self.__rollback(self.cnxid)
+        self.session.set_pool()        
+    
+    def close(self):
+        self.__close(self.cnxid)
+
+    # other utilities #########################################################
+    def set_debug(self, debugmode):
+        from cubicweb.server import set_debug
+        set_debug(debugmode)
+        
+    def set_option(self, optname, value):
+        self.vreg.config.global_set_option(optname, value)
+            
+    def add_entity(self, etype, **kwargs):
+        restrictions = ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs)
+        rql = 'INSERT %s X' % etype
+        if kwargs:
+            rql += ': %s' % ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs)
+        rset = self.execute(rql, kwargs)
+        return rset.get_entity(0, 0)
+
+    def default_user_password(self):
+        config = self.repo.config #TestConfiguration('data')
+        user = unicode(config.sources()['system']['db-user'])
+        passwd = config.sources()['system']['db-password']
+        return user, passwd
+    
+    def close_connections(self):
+        for cnx in self.cnxs:
+            try:
+                cnx.rollback()
+                cnx.close()
+            except:
+                continue
+        self.cnxs = []
+
+    pactions = EnvBasedTC.pactions.im_func
+    pactionsdict = EnvBasedTC.pactionsdict.im_func
+    
+    # default test setup and teardown #########################################
+    copy_schema = False
+    
+    def _prepare(self):
+        MAILBOX[:] = [] # reset mailbox
+        if hasattr(self, 'cnxid'):
+            return
+        repo = self.repo
+        self.__execute = repo.execute
+        self.__commit = repo.commit
+        self.__rollback = repo.rollback
+        self.__close = repo.close
+        self.cnxid = repo.connect(*self.default_user_password())
+        self.session = repo._sessions[self.cnxid]
+        # XXX copy schema since hooks may alter it and it may be not fully
+        #     cleaned (missing some schema synchronization support)
+        try:
+            origschema = repo.__schema
+        except AttributeError:
+            origschema = repo.schema
+            repo.__schema = origschema
+        if self.copy_schema:
+            repo.schema = deepcopy(origschema)
+            repo.set_schema(repo.schema) # reset hooks
+            repo.vreg.update_schema(repo.schema)
+        self.cnxs = []
+        # reset caches, they may introduce bugs among tests
+        repo._type_source_cache = {}
+        repo._extid_cache = {}
+        repo.querier._rql_cache = {}
+        for source in repo.sources:
+            source.reset_caches()
+        for s in repo.sources:
+            if hasattr(s, '_cache'):
+                s._cache = {}
+
+    @property
+    def config(self):
+        return self.repo.config
+
+    @property
+    def vreg(self):
+        return self.repo.vreg
+
+    @property
+    def schema(self):
+        return self.repo.schema
+    
+    def setUp(self):
+        self._prepare()
+        self.session.set_pool()
+        self.maxeid = self.session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0]
+        #self.maxeid = self.execute('Any MAX(X)')
+        
+    def tearDown(self, close=True):
+        self.close_connections()
+        self.rollback()
+        self.session.unsafe_execute('DELETE Any X WHERE X eid > %(x)s', {'x': self.maxeid})
+        self.commit()
+        if close:
+            self.close()
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/cwtwill.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,108 @@
+"""cubicweb extensions for twill"""
+
+import re
+from urllib import quote
+
+from twill import commands as twc
+
+# convenience / consistency renaming
+has_text = twc.find
+hasnt_text = twc.notfind
+
+
+# specific commands
+_LINK = re.compile('<a.*?href="(.*?)".*?>(.*?)</a>', re.I | re.S)
+
+def has_link(text, url=''):
+    browser = twc.get_browser()
+    html = browser.get_html()
+    if html:
+        for match in _LINK.finditer(html):
+            linkurl = match.group(1)
+            linktext = match.group(2)
+            if linktext == text:
+                # if url is specified linkurl must match
+                if url and linkurl != url:
+                    continue
+                return        
+    raise AssertionError('link %s (%s) not found' % (text, url))
+        
+
+def view(rql, vid=''):
+    """
+    >> view 'Project P'
+
+    apply <vid> to <rql>'s rset
+    """
+    if vid:
+        twc.go('view?rql=%s&vid=%s' % (quote(rql), vid))
+    else:
+        twc.go('view?rql=%s' % quote(rql))
+
+def create(etype):
+    """
+    >> create Project
+
+    go to <etype>'s creation page
+    """
+    twc.go('view?etype=%s&vid=creation' % etype)
+
+def edit(rql):
+    """
+    >> edit "Project P WHERE P eid 123"
+
+    calls edition view for <rql>
+    """
+    twc.go('view?rql=%s&vid=edition' % quote(rql))
+
+
+        
+
+def setvalue(formname, fieldname, value):
+    """
+    >> setvalue entityForm name pylint
+
+    sets the field's value in the form
+    <forname> should either be the form's index, the form's name
+    or the form's id
+    """
+    browser = twc.get_browser()
+    form = browser.get_form(formname)
+    if form is None:
+        # try to find if one of the forms has <formname> as id
+        for index, form in enumerate(browser._browser.forms()):
+            # forms in cubicweb don't always have a name
+            if form.attrs.get('id') == formname:
+                # browser.get_form_field knows how to deal with form index
+                formname = str(index+1)
+                break
+        else:
+            raise ValueError('could not find form named <%s>' % formname)
+    eid = browser.get_form_field(form, 'eid').value
+    twc.formvalue(formname, '%s:%s' % (fieldname, eid), value)
+
+
+def submitform(formname, submit_button=None):
+    """
+    >> submitform entityForm
+
+    Submit the form named entityForm. This is useful when the form is pre-filed
+    and we only want to click on submit.
+    (The original submit command chooses the form to submit according to the last
+    formvalue instruction)
+    """
+    browser = twc.get_browser()
+    form = browser.get_form(formname)
+    if form is None:
+        # try to find if one of the forms has <formname> as id
+        for form in browser._browser.forms():
+            # forms in cubicweb don't always have a name
+            if form.attrs.get('id') == formname:
+                break
+        else:
+            raise ValueError('could not find form named <%s>' % formname)
+    browser._browser.form = form
+    browser.submit(submit_button)
+
+    
+# missing actions: delete, copy, changeview
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/devctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,434 @@
+"""additional cubicweb-ctl commands and command handlers for cubicweb and cubicweb's
+cubes development
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from os import walk, mkdir, chdir, listdir
+from os.path import join, exists, abspath, basename, normpath, split, isdir
+
+
+from logilab.common import STD_BLACKLIST
+from logilab.common.modutils import get_module_files
+from logilab.common.textutils import get_csv
+
+from cubicweb import CW_SOFTWARE_ROOT as BASEDIR
+from cubicweb.__pkginfo__ import version as cubicwebversion
+from cubicweb import BadCommandUsage
+from cubicweb.toolsutils import Command, register_commands, confirm, copy_skeleton
+from cubicweb.web.webconfig import WebConfiguration
+from cubicweb.server.serverconfig import ServerConfiguration
+
+
+class DevConfiguration(ServerConfiguration, WebConfiguration):
+    """dummy config to get full library schema and entities"""
+    creating = True
+    def __init__(self, appid=None, cube=None):
+        self._cube = cube
+        super(DevConfiguration, self).__init__(appid)
+        if self._cube is None:
+            self._cubes = ()
+        else:
+            self._cubes = self.expand_cubes(self.cube_dependencies(self._cube))
+        
+#    def adjust_sys_path(self):
+#        # update python path if necessary
+#        if not self.cubes_dir() in sys.path:
+#            sys.path.insert(0, self.cubes_dir())
+    
+    @property
+    def apphome(self):
+        return self.appid
+    
+    def init_log(self, debug=None):
+        pass
+    def load_configuration(self):
+        pass
+
+    cubicweb_vobject_path = ServerConfiguration.cubicweb_vobject_path | WebConfiguration.cubicweb_vobject_path
+    cube_vobject_path = ServerConfiguration.cube_vobject_path | WebConfiguration.cube_vobject_path
+
+
+def generate_schema_pot(w, cubedir=None):
+    """generate a pot file with schema specific i18n messages
+
+    notice that relation definitions description and static vocabulary
+    should be marked using '_' and extracted using xgettext
+    """
+    from cubicweb.cwvreg import CubicWebRegistry
+    cube = cubedir and split(cubedir)[-1]
+    config = DevConfiguration(join(BASEDIR, 'web'), cube)
+    if cubedir:
+        libschema = config.load_schema()
+        config = DevConfiguration(cubedir, cube)
+        schema = config.load_schema()
+    else:
+        schema = config.load_schema()
+        libschema = None
+        config.cleanup_interface_sobjects = False
+    vreg = CubicWebRegistry(config)
+    vreg.set_schema(schema)
+    vreg.register_objects(config.vregistry_path())
+    w(DEFAULT_POT_HEAD)
+    _generate_schema_pot(w, vreg, schema, libschema=libschema,
+                         cube=cube)
+    # cleanup sys.modules, required when we're updating multiple cubes
+    for name, mod in sys.modules.items():
+        if mod is None:
+            # duh ? logilab.common.os for instance
+            del sys.modules[name]
+            continue
+        if not hasattr(mod, '__file__'):
+            continue
+        for path in config.vregistry_path():
+            if mod.__file__.startswith(path):
+                del sys.modules[name]
+                break
+                
+def _generate_schema_pot(w, vreg, schema, libschema=None, cube=None):
+    from mx.DateTime import now
+    from cubicweb.common.i18n import add_msg
+    w('# schema pot file, generated on %s\n' % now().strftime('%Y-%m-%d %H:%M:%S'))
+    w('# \n')
+    w('# singular and plural forms for each entity type\n')
+    w('\n')
+    if libschema is not None:
+        entities = [e for e in schema.entities() if not e in libschema]
+    else:
+        entities = schema.entities()
+    done = set()
+    for eschema in sorted(entities):
+        etype = eschema.type
+        add_msg(w, etype)
+        add_msg(w, '%s_plural' % etype)
+        if not eschema.is_final():
+            add_msg(w, 'This %s' % etype)
+            add_msg(w, 'New %s' % etype)
+            add_msg(w, 'add a %s' % etype)
+            add_msg(w, 'remove this %s' % etype)
+        if eschema.description and not eschema.description in done:
+            done.add(eschema.description)
+            add_msg(w, eschema.description)
+    w('# subject and object forms for each relation type\n')
+    w('# (no object form for final relation types)\n')
+    w('\n')
+    if libschema is not None:
+        relations = [r for r in schema.relations() if not r in libschema]
+    else:
+        relations = schema.relations()
+    for rschema in sorted(set(relations)):
+        rtype = rschema.type
+        add_msg(w, rtype)
+        if not (schema.rschema(rtype).is_final() or rschema.symetric):
+            add_msg(w, '%s_object' % rtype)
+        if rschema.description and rschema.description not in done:
+            done.add(rschema.description)
+            add_msg(w, rschema.description)
+    w('# add related box generated message\n')
+    w('\n')
+    for eschema in schema.entities():
+        if eschema.is_final():
+            continue
+        entity = vreg.etype_class(eschema)(None, None)
+        for x, rschemas in (('subject', eschema.subject_relations()),
+                            ('object', eschema.object_relations())):
+            for rschema in rschemas:
+                if rschema.is_final():
+                    continue
+                for teschema in rschema.targets(eschema, x):
+                    if defined_in_library(libschema, eschema, rschema, teschema, x):
+                        continue
+                    if entity.relation_mode(rschema.type, teschema.type, x) == 'create':
+                        if x == 'subject':
+                            label = 'add %s %s %s %s' % (eschema, rschema, teschema, x)
+                            label2 = "creating %s (%s %%(linkto)s %s %s)" % (teschema, eschema, rschema, teschema)
+                        else:
+                            label = 'add %s %s %s %s' % (teschema, rschema, eschema, x)
+                            label2 = "creating %s (%s %s %s %%(linkto)s)" % (teschema, teschema, rschema, eschema)
+                        add_msg(w, label)
+                        add_msg(w, label2)
+    cube = (cube or 'cubicweb') + '.'
+    done = set()
+    for reg, objdict in vreg.items():
+        for objects in objdict.values():
+            for obj in objects:
+                objid = '%s_%s' % (reg, obj.id)
+                if objid in done:
+                    continue
+                if obj.__module__.startswith(cube) and obj.property_defs:
+                    add_msg(w, '%s_description' % objid)
+                    add_msg(w, objid)
+                    done.add(objid)
+                    
+def defined_in_library(libschema, etype, rtype, tetype, x):
+    """return true if the given relation definition exists in cubicweb's library"""
+    if libschema is None:
+        return False
+    if x == 'subject':
+        subjtype, objtype = etype, tetype
+    else:
+        subjtype, objtype = tetype, etype
+    try:
+        return libschema.rschema(rtype).has_rdef(subjtype, objtype)
+    except KeyError:
+        return False
+
+
+LANGS = ('en', 'fr')
+I18NDIR = join(BASEDIR, 'i18n')
+DEFAULT_POT_HEAD = r'''msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb %s\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: cubicweb-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+''' % cubicwebversion
+
+
+class UpdateCubicWebCatalogCommand(Command):
+    """Update i18n catalogs for cubicweb library.
+    
+    It will regenerate cubicweb/i18n/xx.po files. You'll have then to edit those
+    files to add translations of newly added messages.
+    """
+    name = 'i18nlibupdate'
+
+    def run(self, args):
+        """run the command with its specific arguments"""
+        if args:
+            raise BadCommandUsage('Too much arguments')
+        import shutil
+        from tempfile import mktemp
+        import yams
+        from logilab.common.fileutils import ensure_fs_mode
+        from logilab.common.shellutils import find, rm
+        from cubicweb.common.i18n import extract_from_tal, execute
+        tempdir = mktemp()
+        mkdir(tempdir)
+        potfiles = [join(I18NDIR, 'entities.pot')]
+        print '******** extract schema messages'
+        schemapot = join(tempdir, 'schema.pot')
+        potfiles.append(schemapot)
+        # explicit close necessary else the file may not be yet flushed when
+        # we'll using it below
+        schemapotstream = file(schemapot, 'w')
+        generate_schema_pot(schemapotstream.write, cubedir=None)
+        schemapotstream.close()
+        print '******** extract TAL messages'
+        tali18nfile = join(tempdir, 'tali18n.py')
+        extract_from_tal(find(join(BASEDIR, 'web'), ('.py', '.pt')), tali18nfile)
+        print '******** .pot files generation'
+        for id, files, lang in [('cubicweb', get_module_files(BASEDIR) + find(join(BASEDIR, 'misc', 'migration'), '.py'), None),
+                                ('schemadescr', find(join(BASEDIR, 'schemas'), '.py'), None),
+                                ('yams', get_module_files(yams.__path__[0]), None),
+                                ('tal', [tali18nfile], None),
+                                ('js', find(join(BASEDIR, 'web'), '.js'), 'java'),
+                                ]:
+            cmd = 'xgettext --no-location --omit-header -k_ -o %s %s'
+            if lang is not None:
+                cmd += ' -L %s' % lang
+            potfiles.append(join(tempdir, '%s.pot' % id))
+            execute(cmd % (potfiles[-1], ' '.join(files)))
+        print '******** merging .pot files'
+        cubicwebpot = join(tempdir, 'cubicweb.pot')
+        execute('msgcat %s > %s' % (' '.join(potfiles), cubicwebpot))
+        print '******** merging main pot file with existing translations'
+        chdir(I18NDIR)
+        toedit = []
+        for lang in LANGS:
+            target = '%s.po' % lang
+            execute('msgmerge -N --sort-output  %s %s > %snew' % (target, cubicwebpot, target))
+            ensure_fs_mode(target)
+            shutil.move('%snew' % target, target)
+            toedit.append(abspath(target))
+        # cleanup
+        rm(tempdir)
+        # instructions pour la suite
+        print '*' * 72
+        print 'you can now edit the following files:'
+        print '* ' + '\n* '.join(toedit)
+        print
+        print "then you'll have to update cubes catalogs using the i18nupdate command"
+
+
+class UpdateTemplateCatalogCommand(Command):
+    """Update i18n catalogs for cubes. If no cube is specified, update
+    catalogs of all registered cubes.
+    """
+    name = 'i18nupdate'
+    arguments = '[<cube>...]'
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        CUBEDIR = DevConfiguration.cubes_dir()
+        if args:
+            cubes = [join(CUBEDIR, app) for app in args]
+        else:
+            cubes = [join(CUBEDIR, app) for app in listdir(CUBEDIR)
+                         if exists(join(CUBEDIR, app, 'i18n'))]
+        update_cubes_catalogs(cubes)
+
+def update_cubes_catalogs(cubes):
+    import shutil
+    from tempfile import mktemp
+    from logilab.common.fileutils import ensure_fs_mode
+    from logilab.common.shellutils import find, rm
+    from cubicweb.common.i18n import extract_from_tal, execute
+    toedit = []
+    for cubedir in cubes:
+        cube = basename(normpath(cubedir))
+        if not isdir(cubedir):
+            print 'unknown cube', cube
+            continue
+        tempdir = mktemp()
+        mkdir(tempdir)
+        print '*' * 72
+        print 'updating %s cube...' % cube
+        chdir(cubedir)
+        potfiles = [join('i18n', scfile) for scfile in ('entities.pot',)
+                    if exists(join('i18n', scfile))]
+        print '******** extract schema messages'
+        schemapot = join(tempdir, 'schema.pot')
+        potfiles.append(schemapot)
+        # explicit close necessary else the file may not be yet flushed when
+        # we'll using it below
+        schemapotstream = file(schemapot, 'w')
+        generate_schema_pot(schemapotstream.write, cubedir)
+        schemapotstream.close()
+        print '******** extract TAL messages'
+        tali18nfile = join(tempdir, 'tali18n.py')
+        extract_from_tal(find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST+('test',)), tali18nfile)
+        print '******** extract Javascript messages'
+        jsfiles =  find('.', '.js')
+        if jsfiles:
+            tmppotfile = join(tempdir, 'js.pot')
+            execute('xgettext --no-location --omit-header -k_ -L java --from-code=utf-8 -o %s %s'
+                    % (tmppotfile, ' '.join(jsfiles)))
+            # no pot file created if there are no string to translate
+            if exists(tmppotfile): 
+                potfiles.append(tmppotfile)
+        print '******** create cube specific catalog'
+        tmppotfile = join(tempdir, 'generated.pot')
+        cubefiles = find('.', '.py', blacklist=STD_BLACKLIST+('test',))
+        cubefiles.append(tali18nfile)
+        execute('xgettext --no-location --omit-header -k_ -o %s %s'
+                % (tmppotfile, ' '.join(cubefiles)))
+        if exists(tmppotfile): # doesn't exists of no translation string found
+            potfiles.append(tmppotfile)
+        potfile = join(tempdir, 'cube.pot')
+        print '******** merging .pot files'
+        execute('msgcat %s > %s' % (' '.join(potfiles), potfile))
+        print '******** merging main pot file with existing translations'
+        chdir('i18n')
+        for lang in LANGS:
+            print '****', lang
+            cubepo = '%s.po' % lang
+            if not exists(cubepo):
+                shutil.copy(potfile, cubepo)
+            else:
+                execute('msgmerge -N -s %s %s > %snew' % (cubepo, potfile, cubepo))
+                ensure_fs_mode(cubepo)
+                shutil.move('%snew' % cubepo, cubepo)
+            toedit.append(abspath(cubepo))
+        # cleanup
+        rm(tempdir)
+    # instructions pour la suite
+    print '*' * 72
+    print 'you can now edit the following files:'
+    print '* ' + '\n* '.join(toedit)
+
+
+class LiveServerCommand(Command):
+    """Run a server from within a cube directory.
+    """
+    name = 'live-server'
+    arguments = ''
+    options = ()
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        from cubicweb.devtools.livetest import runserver
+        runserver()
+
+
+class NewTemplateCommand(Command):
+    """Create a new cube.
+
+    <cubename>
+      the name of the new cube
+    """
+    name = 'newcube'
+    arguments = '<cubename>'
+
+    
+    def run(self, args):
+        if len(args) != 1:
+            raise BadCommandUsage("exactly one argument (cube name) is expected")
+        cubename, = args
+        if ServerConfiguration.mode != "dev":
+            self.fail("you can only create new cubes in development mode")
+        cubedir = ServerConfiguration.CUBES_DIR
+        if not isdir(cubedir):
+            print "creating apps directory", cubedir
+            try:
+                mkdir(cubedir)
+            except OSError, err:
+                self.fail("failed to create directory %r\n(%s)" % (cubedir, err))
+        cubedir = join(cubedir, cubename)
+        if exists(cubedir):
+            self.fail("%s already exists !" % (cubedir))
+        skeldir = join(BASEDIR, 'skeleton')
+        distname = raw_input('Debian name for your cube (just type enter to use the cube name): ').strip()
+        if not distname:
+            distname = 'cubicweb-%s' % cubename.lower()
+        elif not distname.startswith('cubicweb-'):
+            if confirm('do you mean cubicweb-%s ?' % distname):
+                distname = 'cubicweb-' + distname
+        shortdesc = raw_input('Enter a short description for your cube: ')
+        longdesc = raw_input('Enter a long description (or nothing if you want to reuse the short one): ')
+        includes = self._ask_for_dependancies()
+        if len(includes) == 1:
+            dependancies = '%r,' % includes[0]
+        else:
+            dependancies = ', '.join(repr(cube) for cube in includes)
+        from mx.DateTime import now
+        context = {'cubename' : cubename,
+                   'distname' : distname,
+                   'shortdesc' : shortdesc,
+                   'longdesc' : longdesc or shortdesc,
+                   'dependancies' : dependancies,
+                   'version'  : cubicwebversion,
+                   'year'  : str(now().year),
+                   }
+        copy_skeleton(skeldir, cubedir, context)
+
+    def _ask_for_dependancies(self):
+        includes = []
+        for stdtype in ServerConfiguration.available_cubes():
+            ans = raw_input("Depends on cube %s? (N/y/s(kip)/t(ype)"
+                            % stdtype).lower().strip()
+            if ans == 'y':
+                includes.append(stdtype)
+            if ans == 't':
+                includes = get_csv(raw_input('type dependancies: '))
+                break
+            elif ans == 's':
+                break
+        return includes
+    
+        
+register_commands((UpdateCubicWebCatalogCommand,
+                   UpdateTemplateCatalogCommand,
+                   LiveServerCommand,
+                   NewTemplateCommand,
+                   ))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fake.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,241 @@
+"""Fake objects to ease testing of cubicweb without a fully working environment
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.testlib import mock_object as Mock
+from logilab.common.adbh import get_adv_func_helper
+
+from indexer import get_indexer
+
+from cubicweb import RequestSessionMixIn
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb.devtools import BASE_URL, BaseApptestConfiguration
+
+
+class FakeConfig(dict, BaseApptestConfiguration):
+    translations = {}
+    apphome = None
+    def __init__(self, appid='data', apphome=None, cubes=()):
+        self.appid = appid
+        self.apphome = apphome
+        self._cubes = cubes
+        self['auth-mode'] = 'cookie'
+        self['uid'] = None 
+        self['base-url'] = BASE_URL
+        self['rql-cache-size'] = 100
+       
+    def cubes(self, expand=False):
+        return self._cubes
+    
+    def sources(self):
+        return {}
+
+class FakeVReg(object):
+    def __init__(self, schema=None, config=None):
+        self.schema = schema
+        self.config = config or FakeConfig()
+        self.properties = {'ui.encoding': 'UTF8',
+                           'ui.language': 'en',
+                           }
+        
+    def property_value(self, key):
+        return self.properties[key]
+
+    _registries = {
+        'controllers' : [Mock(id='view'), Mock(id='login'),
+                         Mock(id='logout'), Mock(id='edit')],
+        'views' : [Mock(id='primary'), Mock(id='secondary'),
+                         Mock(id='oneline'), Mock(id='list')],
+        }
+    
+    def registry_objects(self, name, oid=None):
+        return self._registries[name]
+    
+    def etype_class(self, etype):
+        class Entity(dict):
+            e_schema = self.schema[etype]
+            def __init__(self, session, eid, row=0, col=0):
+                self.req = session
+                self.eid = eid
+                self.row, self.col = row, col
+            def set_eid(self, eid):
+                self.eid = self['eid'] = eid
+        return Entity
+
+
+class FakeRequest(CubicWebRequestBase):
+    """test implementation of an cubicweb request object"""
+
+    def __init__(self, *args, **kwargs):
+        if not (args or 'vreg' in kwargs):
+            kwargs['vreg'] = FakeVReg()
+        kwargs['https'] = False
+        self._url = kwargs.pop('url', 'view?rql=Blop&vid=blop')
+        super(FakeRequest, self).__init__(*args, **kwargs)
+        self._session_data = {}
+        self._headers = {}
+
+    def header_accept_language(self):
+        """returns an ordered list of preferred languages"""
+        return ('en',)
+
+    def header_if_modified_since(self):
+        return None
+
+    def base_url(self):
+        """return the root url of the application"""
+        return BASE_URL
+
+    def relative_path(self, includeparams=True):
+        """return the normalized path of the request (ie at least relative
+        to the application's root, but some other normalization may be needed
+        so that the returned path may be used to compare to generated urls
+        """
+        if self._url.startswith(BASE_URL):
+            url = self._url[len(BASE_URL):]
+        else:
+            url = self._url
+        if includeparams:
+            return url
+        return url.split('?', 1)[0]
+
+    def set_content_type(self, content_type, filename=None, encoding=None):
+        """set output content type for this request. An optional filename
+        may be given
+        """
+        pass
+
+    def set_header(self, header, value):
+        """set an output HTTP header"""
+        pass
+    
+    def add_header(self, header, value):
+        """set an output HTTP header"""
+        pass
+    
+    def remove_header(self, header):
+        """remove an output HTTP header"""
+        pass
+    
+    def get_header(self, header, default=None):
+        """return the value associated with the given input header,
+        raise KeyError if the header is not set
+        """
+        return self._headers.get(header, default)
+
+    def set_cookie(self, cookie, key, maxage=300):
+        """set / update a cookie key
+
+        by default, cookie will be available for the next 5 minutes
+        """
+        pass
+
+    def remove_cookie(self, cookie, key):
+        """remove a cookie by expiring it"""
+        pass
+
+    def validate_cache(self):
+        pass
+
+    # session compatibility (in some test are using this class to test server
+    # side views...)
+    def actual_session(self):
+        """return the original parent session if any, else self"""
+        return self
+
+    def unsafe_execute(self, *args, **kwargs):
+        """return the original parent session if any, else self"""
+        kwargs.pop('propagate', None)
+        return self.execute(*args, **kwargs)
+
+
+class FakeUser(object):
+    login = 'toto'
+    eid = 0
+    def in_groups(self, groups):
+        return True
+
+
+class FakeSession(RequestSessionMixIn):
+    def __init__(self, repo=None, user=None):
+        self.repo = repo
+        self.vreg = getattr(self.repo, 'vreg', FakeVReg())
+        self.pool = FakePool()
+        self.user = user or FakeUser()
+        self.is_internal_session = False
+        self.is_super_session = self.user.eid == -1
+        self._query_data = {}
+        
+    def execute(self, *args):
+        pass
+    def commit(self, *args):
+        self._query_data.clear()
+    def close(self, *args):
+        pass
+    def system_sql(self, sql, args=None):
+        pass
+
+    def decorate_rset(self, rset, propagate=False):
+        rset.vreg = self.vreg
+        rset.req = self
+        return rset
+
+    def set_entity_cache(self, entity):
+        pass
+    
+class FakeRepo(object):
+    querier = None
+    def __init__(self, schema, vreg=None, config=None):
+        self.extids = {}
+        self.eids = {}
+        self._count = 0
+        self.schema = schema
+        self.vreg = vreg or FakeVReg()
+        self.config = config or FakeConfig()
+
+    def internal_session(self):
+        return FakeSession(self)
+    
+    def extid2eid(self, source, extid, etype, session, insert=True):
+        try:
+            return self.extids[extid]
+        except KeyError:
+            if not insert:
+                return None
+            self._count += 1
+            eid = self._count
+            entity = source.before_entity_insertion(session, extid, etype, eid)
+            self.extids[extid] = eid
+            self.eids[eid] = extid
+            source.after_entity_insertion(session, extid, entity)
+            return eid
+        
+    def eid2extid(self, source, eid, session=None):
+        return self.eids[eid]
+
+
+class FakeSource(object):
+    dbhelper = get_adv_func_helper('sqlite')
+    indexer = get_indexer('sqlite', 'UTF8')
+    dbhelper.fti_uid_attr = indexer.uid_attr
+    dbhelper.fti_table = indexer.table
+    dbhelper.fti_restriction_sql = indexer.restriction_sql
+    dbhelper.fti_need_distinct_query = indexer.need_distinct
+    def __init__(self, uri):
+        self.uri = uri
+
+        
+class FakePool(object):
+    def source(self, uri):
+        return FakeSource(uri)
+
+# commented until proven to be useful
+## from logging import getLogger
+## from cubicweb import set_log_methods
+## for cls in (FakeConfig, FakeVReg, FakeRequest, FakeSession, FakeRepo,
+##             FakeSource, FakePool):
+##     set_log_methods(cls, getLogger('fake'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fill.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,483 @@
+# -*- coding: iso-8859-1 -*-
+"""This modules defines func / methods for creating test repositories
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from random import randint, choice
+from copy import deepcopy
+
+from mx.DateTime import DateTime, DateTimeDelta
+from decimal import Decimal
+from yams.constraints import (SizeConstraint, StaticVocabularyConstraint,
+                              IntervalBoundConstraint)
+from rql.utils import decompose_b26 as base_decompose_b26
+
+from cubicweb import Binary
+from cubicweb.schema import RQLConstraint
+
+def decompose_b26(index, ascii=False):
+    """return a letter (base-26) decomposition of index"""
+    if ascii:
+        return base_decompose_b26(index)
+    return base_decompose_b26(index, u'éabcdefghijklmnopqrstuvwxyz')
+
+def get_choices(eschema, attrname):
+    """returns possible choices for 'attrname'
+    if attrname doesn't have ChoiceConstraint, return None
+    """
+    for cst in eschema.constraints(attrname):
+        if isinstance(cst, StaticVocabularyConstraint):
+            return cst.vocabulary()
+    return None
+    
+
+def get_max_length(eschema, attrname):
+    """returns the maximum length allowed for 'attrname'"""
+    for cst in eschema.constraints(attrname):
+        if isinstance(cst, SizeConstraint) and cst.max:
+            return cst.max
+    return 300
+    #raise AttributeError('No Size constraint on attribute "%s"' % attrname)
+
+def get_bounds(eschema, attrname):
+    for cst in eschema.constraints(attrname):
+        if isinstance(cst, IntervalBoundConstraint):
+            return cst.minvalue, cst.maxvalue
+    return None, None
+
+
+_GENERATED_VALUES = {}
+
+class _ValueGenerator(object):
+    """generates integers / dates / strings / etc. to fill a DB table"""
+
+    def __init__(self, eschema, choice_func=None):
+        """<choice_func> is a function that returns a list of possible
+        choices for a given entity type and an attribute name. It should
+        looks like :
+            def values_for(etype, attrname):
+                # some stuff ...
+                return alist_of_acceptable_values # or None
+        """
+        self.e_schema = eschema
+        self.choice_func = choice_func
+
+    def _generate_value(self, attrname, index, **kwargs):
+        if not self.e_schema.has_unique_values(attrname):
+            return self.__generate_value(attrname, index, **kwargs)
+        value = self.__generate_value(attrname, index, **kwargs)
+        while value in _GENERATED_VALUES.get((self.e_schema.type, attrname), ()):
+            index += 1
+            value = self.__generate_value(attrname, index, **kwargs)
+        _GENERATED_VALUES.setdefault((self.e_schema.type, attrname), set()).add(value)
+        return value
+        
+    def __generate_value(self, attrname, index, **kwargs):
+        """generates a consistent value for 'attrname'"""
+        attrtype = str(self.e_schema.destination(attrname)).lower()
+        # Before calling generate_%s functions, try to find values domain
+        etype = self.e_schema.type
+        if self.choice_func is not None:
+            values_domain = self.choice_func(etype, attrname)
+            if values_domain is not None:
+                return choice(values_domain)
+        gen_func = getattr(self, 'generate_%s_%s' % (self.e_schema.type, attrname), None)
+        if gen_func is None:
+            gen_func = getattr(self, 'generate_Any_%s' % attrname, None)
+        if gen_func is not None:
+            return gen_func(index, **kwargs)
+        # If no specific values domain, then generate a dummy value
+        gen_func = getattr(self, 'generate_%s' % (attrtype))
+        return gen_func(attrname, index, **kwargs)
+
+    def generate_choice(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's a choice"""
+        choices = get_choices(self.e_schema, attrname)
+        if choices is None:
+            return None
+        return unicode(choice(choices)) # FIXME
+        
+    def generate_string(self, attrname, index, format=None):
+        """generates a consistent value for 'attrname' if it's a string"""
+        # First try to get choices
+        choosed = self.generate_choice(attrname, index)
+        if choosed is not None:
+            return choosed
+        # All other case, generate a default string
+        attrlength = get_max_length(self.e_schema, attrname)
+        num_len = numlen(index)
+        if num_len >= attrlength:
+            ascii = self.e_schema.rproperty(attrname, 'internationalizable')
+            return ('&'+decompose_b26(index, ascii))[:attrlength]
+        # always use plain text when no format is specified
+        attrprefix = attrname[:max(attrlength-num_len-1, 0)]
+        if format == 'text/html':
+            value = u'<span>é%s<b>%d</b></span>' % (attrprefix, index)
+        elif format == 'text/rest':
+            value = u"""
+title
+-----
+
+* %s
+* %d
+* é&
+""" % (attrprefix, index)
+        else:
+            value = u'é&%s%d' % (attrprefix, index)
+        return value[:attrlength]
+
+    def generate_password(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's a password"""
+        return u'toto'
+        
+    def generate_integer(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's an integer"""
+        minvalue, maxvalue = get_bounds(self.e_schema, attrname)
+        if maxvalue is not None and maxvalue <= 0 and minvalue is None:
+            minvalue = maxvalue - index # i.e. randint(-index, 0)
+        else:
+            maxvalue = maxvalue or index
+        return randint(minvalue or 0, maxvalue)
+    
+    generate_int = generate_integer
+    
+    def generate_float(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's a float"""
+        return float(randint(-index, index))
+    
+    def generate_decimal(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's a float"""
+        return Decimal(str(self.generate_float(attrname, index)))
+    
+    def generate_date(self, attrname, index):
+        """generates a random date (format is 'yyyy-mm-dd')"""
+        return DateTime(randint(2000, 2004), randint(1, 12), randint(1, 28))
+
+    def generate_time(self, attrname, index):
+        """generates a random time (format is ' HH:MM')"""
+        return DateTimeDelta(0, 11, index%60) #'11:%02d' % (index % 60)
+    
+    def generate_datetime(self, attrname, index):
+        """generates a random date (format is 'yyyy-mm-dd HH:MM')"""
+        return DateTime(randint(2000, 2004), randint(1, 12), randint(1, 28), 11, index%60)
+        
+
+    def generate_bytes(self, attrname, index, format=None):
+        # modpython way
+        fakefile = Binary("%s%s" % (attrname, index))
+        fakefile.filename = "file_%s" % attrname
+        fakefile.value = fakefile.getvalue()
+        return fakefile
+    
+    def generate_boolean(self, attrname, index):
+        """generates a consistent value for 'attrname' if it's a boolean"""
+        return index % 2 == 0
+
+    def generate_Any_data_format(self, index, **kwargs):
+        # data_format attribute of Image/File has no vocabulary constraint, we
+        # need this method else stupid values will be set which make mtconverter
+        # raise exception
+        return u'application/octet-stream'
+    
+    def generate_Any_content_format(self, index, **kwargs):
+        # content_format attribute of EmailPart has no vocabulary constraint, we
+        # need this method else stupid values will be set which make mtconverter
+        # raise exception
+        return u'text/plain'
+
+    def generate_Image_data_format(self, index, **kwargs):
+        # data_format attribute of Image/File has no vocabulary constraint, we
+        # need this method else stupid values will be set which make mtconverter
+        # raise exception
+        return u'image/png'
+
+
+class autoextend(type):
+    def __new__(mcs, name, bases, classdict):
+        for attrname, attrvalue in classdict.items():
+            if callable(attrvalue):
+                if attrname.startswith('generate_') and \
+                       attrvalue.func_code.co_argcount < 2:
+                    raise TypeError('generate_xxx must accept at least 1 argument')
+                setattr(_ValueGenerator, attrname, attrvalue)
+        return type.__new__(mcs, name, bases, classdict)
+
+class ValueGenerator(_ValueGenerator):
+    __metaclass__ = autoextend
+
+
+def _default_choice_func(etype, attrname):
+    """default choice_func for insert_entity_queries"""
+    return None
+
+def insert_entity_queries(etype, schema, vreg, entity_num,
+                          choice_func=_default_choice_func):
+    """returns a list of 'add entity' queries (couples query, args)
+    :type etype: str
+    :param etype: the entity's type
+
+    :type schema: cubicweb.schema.Schema
+    :param schema: the application schema
+
+    :type entity_num: int
+    :param entity_num: the number of entities to insert
+
+    XXX FIXME: choice_func is here for *historical* reasons, it should
+               probably replaced by a nicer way to specify choices
+    :type choice_func: function
+    :param choice_func: a function that takes an entity type, an attrname and
+                        returns acceptable values for this attribute
+    """
+    # XXX HACK, remove or fix asap
+    if etype in (('String', 'Int', 'Float', 'Boolean', 'Date', 'EGroup', 'EUser')):
+        return []
+    queries = []
+    for index in xrange(entity_num):
+        restrictions = []
+        args = {}
+        for attrname, value in make_entity(etype, schema, vreg, index, choice_func).items():
+            restrictions.append('X %s %%(%s)s' % (attrname, attrname))
+            args[attrname] = value
+        if restrictions:
+            queries.append(('INSERT %s X: %s' % (etype, ', '.join(restrictions)),
+                            args))
+            assert not 'eid' in args, args
+        else:
+            queries.append(('INSERT %s X' % etype, {}))        
+    return queries
+
+
+def make_entity(etype, schema, vreg, index=0, choice_func=_default_choice_func,
+                form=False):
+    """generates a random entity and returns it as a dict
+
+    by default, generate an entity to be inserted in the repository
+    elif form, generate an form dictionnary to be given to a web controller
+    """
+    eschema = schema.eschema(etype)
+    valgen = ValueGenerator(eschema, choice_func)
+    entity = {}
+    # preprocessing to deal with _format fields
+    attributes = []
+    relatedfields = {}
+    for rschema, attrschema in eschema.attribute_definitions():
+        attrname = rschema.type
+        if attrname == 'eid':
+            # don't specify eids !
+            continue
+        if attrname.endswith('_format') and attrname[:-7] in eschema.subject_relations():
+            relatedfields[attrname[:-7]] = attrschema
+        else:
+            attributes.append((attrname, attrschema))
+    for attrname, attrschema in attributes:
+        if attrname in relatedfields:
+            # first generate a format and record it
+            format = valgen._generate_value(attrname + '_format', index)
+            entity[attrname + '_format'] = format
+            # then a value coherent with this format
+            value = valgen._generate_value(attrname, index, format=format)
+        else:
+            value = valgen._generate_value(attrname, index)
+        if form: # need to encode values
+            if attrschema.type == 'Bytes':
+                # twisted way
+                fakefile = value
+                filename = value.filename
+                value = (filename, u"text/plain", fakefile)
+            elif attrschema.type == 'Date':
+                value = value.strftime(vreg.property_value('ui.date-format'))
+            elif attrschema.type == 'Datetime':
+                value = value.strftime(vreg.property_value('ui.datetime-format'))
+            elif attrschema.type == 'Time':
+                value = value.strftime(vreg.property_value('ui.time-format'))
+            elif attrschema.type == 'Float':
+                fmt = vreg.property_value('ui.float-format')
+                value = fmt % value
+            else:
+                value = unicode(value)
+        entity[attrname] = value
+    return entity
+
+
+
+def select(constraints, cursor, selectvar='O'):
+    """returns list of eids matching <constraints>
+
+    <selectvar> should be either 'O' or 'S' to match schema definitions
+    """
+    try:
+        rset = cursor.execute('Any %s WHERE %s' % (selectvar, constraints))
+    except:
+        print "could restrict eid_list with given constraints (%r)" % constraints
+        return []
+    return set(eid for eid, in rset.rows)
+
+
+
+def make_relations_queries(schema, edict, cursor, ignored_relations=(),
+                           existingrels=None):
+    """returns a list of generated RQL queries for relations
+    :param schema: The application schema
+
+    :param e_dict: mapping between etypes and eids
+
+    :param ignored_relations: list of relations to ignore (i.e. don't try
+                              to generate insert queries for these relations)
+    """
+    gen = RelationsQueriesGenerator(schema, cursor, existingrels)
+    return gen.compute_queries(edict, ignored_relations)
+
+
+class RelationsQueriesGenerator(object):
+    rql_tmpl = 'SET S %s O WHERE S eid %%(subjeid)s, O eid %%(objeid)s'
+    def __init__(self, schema, cursor, existing=None):
+        self.schema = schema
+        self.cursor = cursor
+        self.existingrels = existing or {}
+
+    def compute_queries(self, edict, ignored_relations):
+        queries = []
+        #   1/ skip final relations and explictly ignored relations
+        rels = [rschema for rschema in self.schema.relations()
+                if not (rschema.is_final() or rschema in ignored_relations)]
+        # for each relation
+        #   2/ take each possible couple (subj, obj)
+        #   3/ analyze cardinality of relation
+        #      a/ if relation is mandatory, insert one relation
+        #      b/ else insert N relations where N is the mininum
+        #         of 20 and the number of existing targetable entities
+        for rschema in rels:
+            sym = set()
+            sedict = deepcopy(edict)
+            oedict = deepcopy(edict)
+            delayed = []
+            # for each couple (subjschema, objschema), insert relations
+            for subj, obj in rschema.iter_rdefs():
+                sym.add( (subj, obj) )
+                if rschema.symetric and (obj, subj) in sym:
+                    continue
+                subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality')
+                # process mandatory relations first
+                if subjcard in '1+' or objcard in '1+': 
+                    queries += self.make_relation_queries(sedict, oedict,
+                                                          rschema, subj, obj)
+                else:
+                    delayed.append( (subj, obj) )
+            for subj, obj in delayed:
+                queries += self.make_relation_queries(sedict, oedict, rschema,
+                                                      subj, obj)
+        return queries
+        
+    def qargs(self, subjeids, objeids, subjcard, objcard, subjeid, objeid):
+        if subjcard in '?1':
+            subjeids.remove(subjeid)
+        if objcard in '?1':
+            objeids.remove(objeid)
+        return {'subjeid' : subjeid, 'objeid' : objeid}
+
+    def make_relation_queries(self, sedict, oedict, rschema, subj, obj):
+        subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality')
+        subjeids = sedict.get(subj, frozenset())
+        used = self.existingrels[rschema.type]
+        preexisting_subjrels = set(subj for subj, obj in used)
+        preexisting_objrels = set(obj for subj, obj in used)
+        # if there are constraints, only select appropriate objeids
+        q = self.rql_tmpl % rschema.type
+        constraints = [c for c in rschema.rproperty(subj, obj, 'constraints')
+                       if isinstance(c, RQLConstraint)]
+        if constraints:
+            restrictions = ', '.join(c.restriction for c in constraints)
+            q += ', %s' % restrictions
+            # restrict object eids if possible
+            objeids = select(restrictions, self.cursor)
+        else:
+            objeids = oedict.get(obj, frozenset())
+        if subjcard in '?1' or objcard in '?1':
+            for subjeid, objeid in used:
+                if subjcard in '?1' and subjeid in subjeids:
+                    subjeids.remove(subjeid)
+                    if objeid in objeids:
+                        objeids.remove(objeid)
+                if objcard in '?1' and objeid in objeids:
+                    objeids.remove(objeid)
+                    if subjeid in subjeids:
+                        subjeids.remove(subjeid)
+        if not subjeids:
+            check_card_satisfied(objcard, objeids, subj, rschema, obj)
+            return 
+        if not objeids:
+            check_card_satisfied(subjcard, subjeids, subj, rschema, obj)
+            return
+        if subjcard in '?1+':
+            for subjeid in tuple(subjeids):
+                # do not insert relation if this entity already has a relation
+                if subjeid in preexisting_subjrels:
+                    continue
+                objeid = choose_eid(objeids, subjeid)
+                if objeid is None or (subjeid, objeid) in used:
+                    continue
+                yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+                                    subjeid, objeid)
+                used.add( (subjeid, objeid) )
+                if not objeids:
+                    check_card_satisfied(subjcard, subjeids, subj, rschema, obj)
+                    break
+        elif objcard in '?1+':
+            for objeid in tuple(objeids):
+                # do not insert relation if this entity already has a relation
+                if objeid in preexisting_objrels:
+                    continue
+                subjeid = choose_eid(subjeids, objeid)
+                if subjeid is None or (subjeid, objeid) in used:
+                    continue
+                yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+                                    subjeid, objeid)
+                used.add( (subjeid, objeid) )
+                if not subjeids:
+                    check_card_satisfied(objcard, objeids, subj, rschema, obj)
+                    break
+        else:
+            # FIXME: 20 should be read from config
+            subjeidsiter = [choice(tuple(subjeids)) for i in xrange(min(len(subjeids), 20))]
+            objeidsiter = [choice(tuple(objeids)) for i in xrange(min(len(objeids), 20))]
+            for subjeid, objeid in zip(subjeidsiter, objeidsiter):
+                if subjeid != objeid and not (subjeid, objeid) in used:
+                    used.add( (subjeid, objeid) )
+                    yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+                                        subjeid, objeid)
+                    
+def check_card_satisfied(card, remaining, subj, rschema, obj):
+    if card in '1+' and remaining:
+        raise Exception("can't satisfy cardinality %s for relation %s %s %s"
+                        % (card, subj, rschema, obj))
+
+def choose_eid(values, avoid):
+    values = tuple(values)
+    if len(values) == 1 and values[0] == avoid:
+        return None
+    objeid = choice(values)
+    while objeid == avoid: # avoid infinite recursion like in X comment X
+        objeid = choice(values)
+    return objeid
+                    
+                
+
+# UTILITIES FUNCS ##############################################################
+def make_tel(num_tel):
+    """takes an integer, converts is as a string and inserts
+    white spaces each 2 chars (french notation)
+    """
+    num_list = list(str(num_tel))
+    for index in (6, 4, 2):
+        num_list.insert(index, ' ')
+
+    return ''.join(num_list)
+
+
+def numlen(number):
+    """returns the number's length"""
+    return len(str(number))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fix_po_encoding	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+"""usage: fix-po-encodings [filename...]
+change the encoding of the po files passed as arguments to utf-8
+"""
+import sys
+import re
+import codecs
+
+def change_encoding(filename, target='UTF-8'):
+    fdesc = open(filename)
+    data = fdesc.read()
+    fdesc.close()
+    encoding = find_encoding(data)
+    if encoding == target:
+        return
+    data = fix_encoding(data, target)
+    data = unicode(data, encoding)
+    fdesc = codecs.open(filename, 'wb', encoding=target)
+    fdesc.write(data)
+    fdesc.close()
+
+def find_encoding(data):
+    regexp = re.compile(r'"Content-Type:.* charset=([a-zA-Z0-9-]+)\\n"', re.M)
+    mo = regexp.search(data)
+    if mo is None:
+        raise ValueError('No encoding declaration')
+    return mo.group(1)
+
+def fix_encoding(data, target_encoding):
+    regexp = re.compile(r'("Content-Type:.* charset=)(.*)(\\n")', re.M)
+    return regexp.sub(r'\1%s\3' % target_encoding, data)
+    
+
+
+for filename in sys.argv[1:]:
+    print filename
+    change_encoding(filename)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/htmlparser.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,181 @@
+"""defines a validating HTML parser used in web application tests"""
+
+import re
+from StringIO import StringIO
+
+from lxml import etree
+from lxml.builder import E
+
+from cubicweb.common.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE, CW_XHTML_EXTENSIONS
+
+STRICT_DOCTYPE = str(STRICT_DOCTYPE % CW_XHTML_EXTENSIONS).strip()
+TRANSITIONAL_DOCTYPE = str(TRANSITIONAL_DOCTYPE % CW_XHTML_EXTENSIONS).strip()
+
+ERR_COUNT = 0
+
+class Validator(object):
+    
+    def parse_string(self, data, sysid=None):
+        try:
+            data = self.preprocess_data(data)
+            return PageInfo(data, etree.fromstring(data, self.parser))
+        except etree.XMLSyntaxError, exc:
+            def save_in(fname=''):
+                file(fname, 'w').write(data)
+            new_exc = AssertionError(u'invalid xml %s' % exc)
+            new_exc.position = exc.position
+            raise new_exc
+
+    def preprocess_data(self, data):
+        return data
+
+
+class DTDValidator(Validator):
+    def __init__(self):
+        Validator.__init__(self)
+        self.parser = etree.XMLParser(dtd_validation=True)
+
+    def preprocess_data(self, data):
+        """used to fix potential blockquote mess generated by docutils"""
+        if STRICT_DOCTYPE not in data:
+            return data
+        # parse using transitional DTD
+        data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE)
+        tree = etree.fromstring(data, self.parser)
+        namespace = tree.nsmap.get(None)
+        # this is the list of authorized child tags for <blockquote> nodes
+        expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \
+                   'fieldset table form noscript ins del script'.split()
+        if namespace:
+            blockquotes = tree.findall('.//{%s}blockquote' % namespace)
+            expected = ['{%s}%s' % (namespace, tag) for tag in expected]
+        else:
+            blockquotes = tree.findall('.//blockquote')
+        # quick and dirty approach: remove all blockquotes
+        for blockquote in blockquotes:
+            parent = blockquote.getparent()
+            parent.remove(blockquote)
+##         # for each blockquote, wrap unauthorized child in a div
+##         for blockquote in blockquotes:
+##             if len(blockquote):
+##                 needs_wrap = [(index, child) for index, child in enumerate(blockquote)
+##                               if child.tag not in expected]
+##                 for index, child in needs_wrap:
+##                     # the child is automatically popped from blockquote when
+##                     # its parent is changed
+##                     div = E.div(child)
+##                     blockquote.insert(index, div)
+##             elif blockquote.text:
+##                 div = E.div(blockquote.text)
+##                 blockquote.text = None
+##                 blockquote.append(div)
+        data = etree.tostring(tree)
+        return '<?xml version="1.0" encoding="UTF-8"?>%s\n%s' % (STRICT_DOCTYPE, data)
+
+   
+class SaxOnlyValidator(Validator):
+
+    def __init__(self):
+        Validator.__init__(self)
+        self.parser = etree.XMLParser()
+
+class HTMLValidator(Validator):
+
+    def __init__(self):
+        Validator.__init__(self)
+        self.parser = etree.HTMLParser()
+
+    
+
+class PageInfo(object):
+    """holds various informations on the view's output"""
+    def __init__(self, source, root):
+        self.source = source
+        self.etree = root
+        self.source = source
+        self.raw_text = u''.join(root.xpath('//text()'))
+        self.namespace = self.etree.nsmap
+        self.default_ns = self.namespace.get(None)
+        self.a_tags = self.find_tag('a')
+        self.h1_tags = self.find_tag('h1')
+        self.h2_tags = self.find_tag('h2')
+        self.h3_tags = self.find_tag('h3')
+        self.h4_tags = self.find_tag('h4')
+        self.input_tags = self.find_tag('input')
+        self.title_tags = [self.h1_tags, self.h2_tags, self.h3_tags, self.h4_tags]
+        
+    def find_tag(self, tag):
+        """return a list which contains text of all "tag" elements """
+        if self.default_ns is None:
+            iterstr = ".//%s" % tag
+        else:
+            iterstr = ".//{%s}%s" % (self.default_ns, tag)
+        if tag in ('a', 'input'):
+            return [(elt.text, elt.attrib) for elt in self.etree.iterfind(iterstr)]
+        return [u''.join(elt.xpath('.//text()')) for elt in self.etree.iterfind(iterstr)]
+         
+    def appears(self, text):
+        """returns True if <text> appears in the page"""
+        return text in self.raw_text
+
+    def __contains__(self, text):
+        return text in self.source
+    
+    def has_title(self, text, level=None):
+        """returns True if <h?>text</h?>
+
+        :param level: the title's level (1 for h1, 2 for h2, etc.)
+        """
+        if level is None:
+            for hlist in self.title_tags:
+                if text in hlist:
+                    return True
+            return False
+        else:
+            hlist = self.title_tags[level - 1]
+            return text in hlist
+
+    def has_title_regexp(self, pattern, level=None):
+        """returns True if <h?>pattern</h?>"""
+        sre = re.compile(pattern)
+        if level is None:
+            for hlist in self.title_tags:
+                for title in hlist:
+                    if sre.match(title):
+                        return True
+            return False
+        else:
+            hlist = self.title_tags[level - 1]
+            for title in hlist:
+                if sre.match(title):
+                    return True
+            return False
+    
+    def has_link(self, text, url=None):
+        """returns True if <a href=url>text</a> was found in the page"""
+        for link_text, attrs in self.a_tags:
+            if text == link_text:
+                if url is None:
+                    return True
+                try:
+                    href = attrs['href']
+                    if href == url:
+                        return True
+                except KeyError:
+                    continue
+        return False
+    
+    def has_link_regexp(self, pattern, url=None):
+        """returns True if <a href=url>pattern</a> was found in the page"""
+        sre = re.compile(pattern)
+        for link_text, attrs in self.a_tags:
+            if sre.match(link_text):
+                if url is None:
+                    return True
+                try:
+                    href = attrs['href']
+                    if href == url:
+                        return True
+                except KeyError:
+                    continue
+        return False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/livetest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,148 @@
+"""provide utilies for web (live) unit testing"""
+
+import socket
+import logging
+from os.path import join, dirname, exists
+from StringIO import StringIO
+
+#from twisted.application import service, strports
+# from twisted.internet import reactor, task
+from twisted.web2 import channel
+from twisted.web2 import server
+from twisted.web2 import static
+from twisted.internet import reactor
+from twisted.internet.error import CannotListenError
+
+from logilab.common.testlib import TestCase
+
+import cubicweb.web
+from cubicweb.dbapi import in_memory_cnx
+from cubicweb.etwist.server import CubicWebRootResource
+from cubicweb.devtools import LivetestConfiguration, init_test_database
+
+
+
+def get_starturl(port=7777, login=None, passwd=None):
+    if login:
+        return 'http://%s:%s/view?login=%s&password=%s' % (socket.gethostname(), port, login, passwd)
+    else:
+        return 'http://%s:%s/' % (socket.gethostname(), port)
+
+
+class LivetestResource(CubicWebRootResource):
+    """redefines main resource to search for data files in several directories"""
+
+    def locateChild(self, request, segments):
+        """Indicate which resource to use to process down the URL's path"""
+        if len(segments) and segments[0] == 'data':
+            # Anything in data/ is treated as static files
+            dirlist = [self.data_dir, join(dirname(cubicweb.web.__file__), 'data')]
+            for alternative in dirlist:
+                filepath = join(alternative, *segments[1:]) 
+                if exists(filepath):
+                    self.info('publish static file: %s', '/'.join(segments))
+                    return static.File(filepath), ()
+        # Otherwise we use this single resource
+        return self, ()
+    
+    
+    
+def make_site(cube, options=None):
+    from cubicweb.etwist import twconfig # trigger configuration registration
+    sourcefile = options.sourcefile
+    config = LivetestConfiguration(cube, sourcefile,
+                                   pyro_name=options.pyro_name,
+                                   log_threshold=logging.DEBUG)
+    source = config.sources()['system']
+    init_test_database(driver=source['db-driver'], config=config)
+    # if '-n' in sys.argv: # debug mode
+    cubicweb = LivetestResource(config, debug=True)
+    toplevel = cubicweb
+    website = server.Site(toplevel)
+    cube_dir = config.cube_dir(cube)
+    for port in xrange(7777, 7798):
+        try:
+            reactor.listenTCP(port, channel.HTTPFactory(website))
+            saveconf(cube_dir, port, source['db-user'], source['db-password'])
+            break
+        except CannotListenError, exc:
+            print "port %s already in use, I will try another one" % port
+    else:
+        raise
+    cubicweb.base_url = get_starturl(port=port)
+    print "you can go here : %s" % cubicweb.base_url
+
+def runserver():
+    reactor.run()
+
+def saveconf(templhome, port, user, passwd):
+    import pickle
+    conffile = file(join(templhome, 'test', 'livetest.conf'), 'w')
+    
+    pickle.dump((port, user, passwd, get_starturl(port, user, passwd)),
+                conffile)
+    conffile.close()
+
+
+def loadconf(filename='livetest.conf'):
+    import pickle
+    return pickle.load(file(filename))
+
+
+def execute_scenario(filename, **kwargs):
+    """based on twill.parse.execute_file, but inserts cubicweb extensions"""
+    from twill.parse import _execute_script
+    stream = StringIO('extend_with cubicweb.devtools.cubicwebtwill\n' + file(filename).read())
+    kwargs['source'] = filename
+    _execute_script(stream, **kwargs)
+
+
+def hijack_twill_output(new_output):
+    from twill import commands as twc
+    from twill import browser as twb
+    twc.OUT = new_output
+    twb.OUT = new_output
+    
+    
+class LiveTestCase(TestCase):
+
+    sourcefile = None
+    cube = ''
+    def setUp(self):
+        assert self.cube, "You must specify a cube in your testcase"
+        # twill can be quite verbose ...
+        self.twill_output = StringIO()
+        hijack_twill_output(self.twill_output)
+        # build a config, and get a connection
+        self.config = LivetestConfiguration(self.cube, self.sourcefile)
+        _, user, passwd, _ = loadconf()
+        self.repo, self.cnx = in_memory_cnx(self.config, user, passwd)
+        self.setup_db(self.cnx)
+
+    def tearDown(self):
+        self.teardown_db(self.cnx)
+    
+
+    def setup_db(self, cnx):
+        """override setup_db() to setup your environment"""
+
+    def teardown_db(self, cnx):
+        """override teardown_db() to clean up your environment"""
+
+    def get_loggedurl(self):
+        port, user, passwd, logged_url = loadconf()
+        return logged_url
+
+    def get_anonurl(self):
+        port, _, _, _ = loadconf()
+        return 'http://%s:%s/view?login=anon&password=anon' % (
+            socket.gethostname(), port)
+
+    # convenience
+    execute_scenario = staticmethod(execute_scenario)
+
+
+if __name__ == '__main__':
+    runserver()
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/migrtest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,152 @@
+"""Migration test script
+
+* migration will be played into a chroot of the local machine
+* the database server used can be configured
+* test tested instance may be on another host
+
+
+We are using postgres'.pgpass file. Here is a copy of postgres documentation
+about that:
+
+The file .pgpass in a user's home directory or the file referenced by
+PGPASSFILE can contain passwords to be used if the connection requires
+a password (and no password has been specified otherwise).
+
+
+This file should contain lines of the following format:
+
+hostname:port:database:username:password
+
+Each of the first four fields may be a literal value, or *, which
+matches anything. The password field from the first line that matches
+the current connection parameters will be used. (Therefore, put
+more-specific entries first when you are using wildcards.) If an entry
+needs to contain : or \, escape this character with \. A hostname of
+localhost matches both host (TCP) and local (Unix domain socket)
+connections coming from the local machine.
+
+The permissions on .pgpass must disallow any access to world or group;
+achieve this by the command chmod 0600 ~/.pgpass. If the permissions
+are less strict than this, the file will be ignored. 
+
+:organization: Logilab
+:copyright: 2001-2006 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os import system
+from os.path import join, basename
+
+from logilab.common.shellutils import cp, rm
+
+from cubicweb.toolsutils import read_config
+from cubicweb.server.serverctl import generate_sources_file
+
+# XXXX use db-copy instead
+
+# test environment configuration
+chrootpath = '/sandbox/cubicwebtest'
+tmpdbhost = 'crater'
+tmpdbuser = 'syt' 
+tmpdbpasswd = 'syt'
+
+def play_migration(applhome, applhost='', sudo=False):
+    applid = dbname = basename(applhome)
+    testapplhome = join(chrootpath, applhome)
+    # copy instance into the chroot
+    if applhost:
+        system('scp -r %s:%s %s' % (applhost, applhome, testapplhome))
+    else:
+        cp(applhome, testapplhome)
+##     # extract db parameters
+##     sources = read_config(join(testapplhome, 'sources'))
+##     dbname = sources['system']['db-name']
+##     dbhost = sources['system'].get('db-host') or ''
+##     dbuser = sources['system'].get('db-user') or ''
+##     dbpasswd = sources['system'].get('db-password') or ''
+    # generate sources file
+    # XXX multisources
+    sources = {'system': {}}
+    sources['system']['db-encoding'] = 'UTF8' # XXX
+    sources['system']['db-name'] = dbname
+    sources['system']['db-host'] = None
+    sources['system']['db-user'] = tmpdbuser
+    sources['system']['db-password'] = None
+    generate_sources_file(join(testapplhome, 'sources'), sources)
+##     # create postgres password file so we won't need anymore passwords
+##     # XXX may exist!
+##     pgpassfile = expanduser('~/.pgpass')
+##     pgpass = open(pgpassfile, 'w')
+##     if dbpasswd:
+##         pgpass.write('%s:*:%s:%s:%s\n' % (dbhost or applhost or 'localhost',
+##                                           dbname, dbuser, dbpasswd))
+##     if tmpdbpasswd:
+##         pgpass.write('%s:*:%s:%s:%s\n' % (tmpdbhost or 'localhost', dbname,
+##                                           tmpdbuser, tmpdbpasswd))
+##     pgpass.close()
+##     chmod(pgpassfile, 0600)
+    # dump db
+##     dumpcmd = 'pg_dump -Fc -U %s -f /tmp/%s.dump %s' % (
+##         dbuser, dbname, dbname)
+##     if dbhost:
+##         dumpcmd += ' -h %s' % dbhost
+    dumpfile = '/tmp/%s.dump' % applid
+    dumpcmd = 'cubicweb-ctl db-dump --output=%s %s' % (dumpfile, applid)
+    if sudo:
+        dumpcmd = 'sudo %s' % dumpcmd
+    if applhost:
+        dumpcmd = 'ssh %s "%s"' % (applhost, dumpcmd)
+    if system(dumpcmd):
+        raise Exception('error while dumping the database')
+##     if not dbhost and applhost:
+    if applhost:
+        # retrieve the dump
+        if system('scp %s:%s %s' % (applhost, dumpfile, dumpfile)):
+            raise Exception('error while retreiving the dump')
+    # move the dump into the chroot
+    system('mv %s %s%s' % (dumpfile, chrootpath, dumpfile))
+    # locate installed versions
+    vcconf = read_config(join(testapplhome, 'vc.conf'))
+    template = vcconf['TEMPLATE']
+    cubicwebversion = vcconf['CW']
+    templversion = vcconf['TEMPLATE_VERSION']
+    # install the same versions cubicweb and template versions into the chroot
+    system('sudo chroot %s apt-get update' % chrootpath)
+    system('sudo chroot %s apt-get install cubicweb-server=%s cubicweb-client=%s'
+           % (chrootpath, cubicwebversion, cubicwebversion))
+    system('sudo chroot %s apt-get install cubicweb-%s-appl-server=%s cubicweb-%s-appl-client=%s'
+           % (chrootpath, template, templversion, template, templversion))
+    # update and upgrade to the latest version
+    system('sudo chroot %s apt-get install cubicweb-server cubicweb-client' % chrootpath)
+    system('sudo chroot %s apt-get install cubicweb-%s-appl-server cubicweb-%s-appl-client'
+           % (chrootpath, template, template))
+    # create and fill the database
+    system('sudo chroot cubicweb-ctl db-restore %s %s' % (applid, dumpfile))
+##     if not tmpdbhost:
+##         system('createdb -U %s -T template0 -E UTF8 %s' % (tmpdbuser, dbname))
+##         system('pg_restore -U %s -O -Fc -d %s /tmp/%s.dump'
+##                % (tmpdbuser, dbname, dbname))
+##     else:
+##         system('createdb -h %s -U %s -T template0 -E UTF8 %s'
+##                % (tmpdbhost, tmpdbuser, dbname))
+##         system('pg_restore -h %s -U %s -O -Fc -d %s /tmp/%s.dump'
+##                % (tmpdbhost, tmpdbuser, dbname, dbname))
+    # launch upgrade
+    system('sudo chroot %s cubicweb-ctl upgrade %s' % (chrootpath, applid))
+
+    # cleanup
+    rm(testapplhome)
+##     rm(pgpassfile)
+##     if tmpdbhost:
+##         system('dropdb -h %s -U %s %s' % (tmpdbuser, tmpdbhost, dbname))
+##     else:
+##         system('dropdb -U %s %s' % (tmpdbuser, dbname))
+##     if not dbhost and applhost:
+    if applhost:
+        system('ssh %s rm %s' % (applhost, dumpfile))
+    rm('%s%s' % (chrootpath, dumpfile))
+
+
+if __name__ == '__main__':
+    play_migration('/etc/cubicweb.d/jpl', 'lepus')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/pkginfo.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,120 @@
+"""distutils / __pkginfo__ helpers for cubicweb applications"""
+
+import os
+from os.path import isdir, join
+
+
+def get_distutils_datafiles(cube, i18n=True, recursive=False):
+    """
+    :param cube: application cube's name
+    """
+    data_files = []
+    data_files += get_basepyfiles(cube)
+    data_files += get_webdatafiles(cube)
+    if i18n:
+        data_files += get_i18nfiles(cube)
+    data_files += get_viewsfiles(cube, recursive=recursive)
+    data_files += get_migrationfiles(cube)
+    data_files += get_schemafiles(cube)
+    return data_files
+
+
+
+## listdir filter funcs ################################################
+def nopyc_and_nodir(fname):
+    if isdir(fname) or fname.endswith('.pyc') or fname.endswith('~'):
+        return False
+    return True
+
+def no_version_control(fname):
+    if fname in ('CVS', '.svn', '.hg'):
+        return False
+    if fname.endswith('~'):
+        return False
+    return True
+
+def basepy_files(fname):
+    if fname.endswith('.py') and fname != 'setup.py':
+        return True
+    return False
+
+def chain(*filters):
+    def newfilter(fname):
+        for filterfunc in filters:
+            if not filterfunc(fname):
+                return False
+        return True
+    return newfilter
+
+def listdir_with_path(path='.', filterfunc=None):
+    if filterfunc:
+        return [join(path, fname) for fname in os.listdir(path) if filterfunc(join(path, fname))]
+    else:
+        return [join(path, fname) for fname in os.listdir(path)]
+
+
+## data_files helpers ##################################################
+CUBES_DIR = join('share', 'cubicweb', 'cubes')
+
+def get_i18nfiles(cube):
+    """returns i18n files in a suitable format for distutils's
+    data_files parameter
+    """
+    i18ndir = join(CUBES_DIR, cube, 'i18n')
+    potfiles = [(i18ndir, listdir_with_path('i18n', chain(no_version_control, nopyc_and_nodir)))]
+    return potfiles
+
+
+def get_viewsfiles(cube, recursive=False):
+    """returns views files in a suitable format for distutils's
+    data_files parameter
+
+    :param recursive: include views' subdirs recursively if True
+    """
+    if recursive:
+        datafiles = []
+        for dirpath, dirnames, filenames in os.walk('views'):
+            filenames = [join(dirpath, fname) for fname in filenames
+                         if nopyc_and_nodir(join(dirpath, fname))]
+            dirpath = join(CUBES_DIR, cube, dirpath)
+            datafiles.append((dirpath, filenames))
+        return datafiles
+    else:
+        viewsdir = join(CUBES_DIR, cube, 'views')
+        return [(viewsdir,
+                 listdir_with_path('views', filterfunc=nopyc_and_nodir))]
+
+
+def get_basepyfiles(cube):
+    """returns cube's base python scripts (tali18n.py, etc.)
+    in a suitable format for distutils's data_files parameter
+    """
+    return [(join(CUBES_DIR, cube),
+             [fname for fname in os.listdir('.')
+              if fname.endswith('.py') and fname != 'setup.py'])]
+
+
+def get_webdatafiles(cube):
+    """returns web's data files (css, png, js, etc.) in a suitable
+    format for distutils's data_files parameter
+    """
+    return [(join(CUBES_DIR, cube, 'data'),
+             listdir_with_path('data', filterfunc=no_version_control))]
+
+
+def get_migrationfiles(cube):
+    """returns cube's migration scripts
+    in a suitable format for distutils's data_files parameter
+    """
+    return [(join(CUBES_DIR, cube, 'migration'),
+             listdir_with_path('migration', no_version_control))]
+
+
+def get_schemafiles(cube):
+    """returns cube's schema files
+    in a suitable format for distutils's data_files parameter
+    """
+    return [(join(CUBES_DIR, cube, 'schema'),
+             listdir_with_path('schema', no_version_control))]
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/repotest.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,310 @@
+"""some utilities to ease repository testing
+
+This module contains functions to initialize a new repository.
+
+:organization: Logilab
+:copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from pprint import pprint
+
+def tuplify(list):
+    for i in range(len(list)):
+        if type(list[i]) is not type(()):
+            list[i] = tuple(list[i])
+    return list
+
+def snippet_cmp(a, b):
+    a = (a[0], [e.expression for e in a[1]])
+    b = (b[0], [e.expression for e in b[1]])
+    return cmp(a, b)
+
+def test_plan(self, rql, expected, kwargs=None):
+    plan = self._prepare_plan(rql, kwargs)
+    self.planner.build_plan(plan)
+    try:
+        self.assertEquals(len(plan.steps), len(expected),
+                          'expected %s steps, got %s' % (len(expected), len(plan.steps)))
+        # step order is important
+        for i, step in enumerate(plan.steps):
+            compare_steps(self, step.test_repr(), expected[i])
+    except AssertionError:
+        pprint([step.test_repr() for step in plan.steps])
+        raise
+
+def compare_steps(self, step, expected):
+    try:
+        self.assertEquals(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0]))
+        if len(step) > 2 and isinstance(step[1], list) and isinstance(expected[1], list):
+            queries, equeries = step[1], expected[1]
+            self.assertEquals(len(queries), len(equeries),
+                              'expected %s queries, got %s' % (len(equeries), len(queries)))
+            for i, (rql, sol) in enumerate(queries):
+                self.assertEquals(rql, equeries[i][0])
+                self.assertEquals(sol, equeries[i][1])
+            idx = 2
+        else:
+            idx = 1
+        self.assertEquals(step[idx:-1], expected[idx:-1],
+                          'expected step characteristic \n%s\n, got\n%s' % (expected[1:-1], step[1:-1]))
+        self.assertEquals(len(step[-1]), len(expected[-1]),
+                          'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1])))
+    except AssertionError:
+        print 'error on step ',
+        pprint(step[:-1])
+        raise
+    children = step[-1]
+    if step[0] in ('UnionFetchStep', 'UnionStep'):
+        # sort children
+        children = sorted(children)
+        expectedchildren = sorted(expected[-1])
+    else:
+        expectedchildren = expected[-1]
+    for i, substep in enumerate(children):
+        compare_steps(self, substep, expectedchildren[i])
+
+
+class DumbOrderedDict(list):
+    def __iter__(self):
+        return self.iterkeys()
+    def __contains__(self, key):
+        return key in self.iterkeys()
+    def __getitem__(self, key):
+        for key_, value in self.iteritems():
+            if key == key_:
+                return value
+        raise KeyError(key)
+    def iterkeys(self):
+        return (x for x, y in list.__iter__(self))
+    def iteritems(self):
+        return (x for x in list.__iter__(self))
+
+
+from logilab.common.testlib import TestCase
+from rql import RQLHelper
+from cubicweb.devtools.fake import FakeRepo, FakeSession
+from cubicweb.server import set_debug
+from cubicweb.server.querier import QuerierHelper
+from cubicweb.server.session import Session
+from cubicweb.server.sources.rql2sql import remove_unused_solutions
+
+class RQLGeneratorTC(TestCase):
+    schema = None # set this in concret test
+    
+    def setUp(self):
+        self.rqlhelper = RQLHelper(self.schema, special_relations={'eid': 'uid',
+                                                                   'has_text': 'fti'})
+        self.qhelper = QuerierHelper(FakeRepo(self.schema), self.schema)
+        ExecutionPlan._check_permissions = _dummy_check_permissions
+        rqlannotation._select_principal = _select_principal
+
+    def tearDown(self):
+        ExecutionPlan._check_permissions = _orig_check_permissions
+        rqlannotation._select_principal = _orig_select_principal
+        
+    def _prepare(self, rql):
+        #print '******************** prepare', rql
+        union = self.rqlhelper.parse(rql)
+        #print '********* parsed', union.as_string()
+        self.rqlhelper.compute_solutions(union)
+        #print '********* solutions', solutions
+        self.rqlhelper.simplify(union)
+        #print '********* simplified', union.as_string()
+        plan = self.qhelper.plan_factory(union, {}, FakeSession())
+        plan.preprocess(union)
+        for select in union.children:
+            select.solutions.sort()
+        #print '********* ppsolutions', solutions
+        return union
+
+
+class BaseQuerierTC(TestCase):
+    repo = None # set this in concret test
+    
+    def setUp(self):
+        self.o = self.repo.querier
+        self.session = self.repo._sessions.values()[0]
+        self.ueid = self.session.user.eid
+        assert self.ueid != -1
+        self.repo._type_source_cache = {} # clear cache
+        self.pool = self.session.set_pool()
+        self.maxeid = self.get_max_eid()
+        do_monkey_patch()
+
+    def get_max_eid(self):
+        return self.session.unsafe_execute('Any MAX(X)')[0][0]
+    def cleanup(self):
+        self.session.unsafe_execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
+        
+    def tearDown(self):
+        undo_monkey_patch()
+        self.session.rollback()
+        self.cleanup()
+        self.commit()
+        self.repo._free_pool(self.pool)
+        assert self.session.user.eid != -1
+
+    def set_debug(self, debug):
+        set_debug(debug)
+        
+    def _rqlhelper(self):
+        rqlhelper = self.o._rqlhelper
+        # reset uid_func so it don't try to get type from eids
+        rqlhelper._analyser.uid_func = None
+        rqlhelper._analyser.uid_func_mapping = {}
+        return rqlhelper
+
+    def _prepare_plan(self, rql, kwargs=None):
+        rqlhelper = self._rqlhelper()
+        rqlst = rqlhelper.parse(rql)
+        rqlhelper.compute_solutions(rqlst, kwargs=kwargs)
+        rqlhelper.simplify(rqlst)
+        for select in rqlst.children:
+            select.solutions.sort()
+        return self.o.plan_factory(rqlst, kwargs, self.session)
+        
+    def _prepare(self, rql, kwargs=None):    
+        plan = self._prepare_plan(rql, kwargs)
+        plan.preprocess(plan.rqlst)
+        rqlst = plan.rqlst.children[0]
+        rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, {}, self.repo.schema)[0]
+        return rqlst
+
+    def _user_session(self, groups=('guests',), ueid=None):
+        # use self.session.user.eid to get correct owned_by relation, unless explicit eid
+        if ueid is None:
+            ueid = self.session.user.eid
+        u = self.repo._build_user(self.session, ueid)
+        u._groups = set(groups)
+        s = Session(u, self.repo)
+        s._threaddata.pool = self.pool
+        return u, s
+
+    def execute(self, rql, args=None, eid_key=None, build_descr=True):
+        return self.o.execute(self.session, rql, args, eid_key, build_descr)
+    
+    def commit(self):
+        self.session.commit()
+        self.session.set_pool()        
+
+
+class BasePlannerTC(BaseQuerierTC):
+
+    def _prepare_plan(self, rql, kwargs=None):
+        rqlst = self.o.parse(rql, annotate=True)
+        self.o.solutions(self.session, rqlst, kwargs)
+        if rqlst.TYPE == 'select':
+            self.o._rqlhelper.annotate(rqlst)
+            for select in rqlst.children:
+                select.solutions.sort()
+        else:
+            rqlst.solutions.sort()
+        return self.o.plan_factory(rqlst, kwargs, self.session)
+
+
+# monkey patch some methods to get predicatable results #######################
+
+from cubicweb.server.rqlrewrite import RQLRewriter
+_orig_insert_snippets = RQLRewriter.insert_snippets
+_orig_build_variantes = RQLRewriter.build_variantes
+
+def _insert_snippets(self, snippets, varexistsmap=None):
+    _orig_insert_snippets(self, sorted(snippets, snippet_cmp), varexistsmap)
+
+def _build_variantes(self, newsolutions):
+    variantes = _orig_build_variantes(self, newsolutions)
+    sortedvariantes = []
+    for variante in variantes:
+        orderedkeys = sorted((k[1], k[2], v) for k,v in variante.iteritems())
+        variante = DumbOrderedDict(sorted(variante.iteritems(),
+                                          lambda a,b: cmp((a[0][1],a[0][2],a[1]),
+                                                          (b[0][1],b[0][2],b[1]))))
+        sortedvariantes.append( (orderedkeys, variante) )
+    return [v for ok, v in sorted(sortedvariantes)]
+
+from cubicweb.server.querier import ExecutionPlan
+_orig_check_permissions = ExecutionPlan._check_permissions
+_orig_init_temp_table = ExecutionPlan.init_temp_table
+
+def _check_permissions(*args, **kwargs):
+    res, restricted = _orig_check_permissions(*args, **kwargs)
+    res = DumbOrderedDict(sorted(res.iteritems(), lambda a,b: cmp(a[1], b[1])))
+    return res, restricted
+
+def _dummy_check_permissions(self, rqlst):
+    return {(): rqlst.solutions}, set()
+
+def _init_temp_table(self, table, selection, solution):
+    if self.tablesinorder is None:
+        tablesinorder = self.tablesinorder = {}
+    else:
+        tablesinorder = self.tablesinorder
+    if not table in tablesinorder:
+        tablesinorder[table] = 'table%s' % len(tablesinorder)
+    return _orig_init_temp_table(self, table, selection, solution)
+
+from cubicweb.server import rqlannotation
+_orig_select_principal = rqlannotation._select_principal
+
+def _select_principal(scope, relations):
+    return _orig_select_principal(scope, sorted(relations, key=lambda x: x.r_type))
+
+try:
+    from cubicweb.server.msplanner import PartPlanInformation
+except ImportError:
+    class PartPlanInformation(object):
+        def merge_input_maps(*args):
+            pass
+        def _choose_var(self, sourcevars):
+            pass    
+_orig_merge_input_maps = PartPlanInformation.merge_input_maps
+_orig_choose_var = PartPlanInformation._choose_var
+
+def _merge_input_maps(*args):
+    return sorted(_orig_merge_input_maps(*args))
+
+def _choose_var(self, sourcevars):
+    # predictable order for test purpose
+    def get_key(x):
+        try:
+            # variable
+            return x.name
+        except AttributeError:
+            try:
+                # relation
+                return x.r_type
+            except AttributeError:
+                # const
+                return x.value
+    varsinorder = sorted(sourcevars, key=get_key)
+    if len(self._sourcesvars) > 1:
+        for var in varsinorder:
+            if not var.scope is self.rqlst:
+                return var, sourcevars.pop(var)
+    else:
+        for var in varsinorder:
+            if var.scope is self.rqlst:
+                return var, sourcevars.pop(var)
+    var = varsinorder[0]
+    return var, sourcevars.pop(var)
+
+
+def do_monkey_patch():
+    RQLRewriter.insert_snippets = _insert_snippets
+    RQLRewriter.build_variantes = _build_variantes
+    ExecutionPlan._check_permissions = _check_permissions
+    ExecutionPlan.tablesinorder = None
+    ExecutionPlan.init_temp_table = _init_temp_table
+    PartPlanInformation.merge_input_maps = _merge_input_maps
+    PartPlanInformation._choose_var = _choose_var
+
+def undo_monkey_patch():
+    RQLRewriter.insert_snippets = _orig_insert_snippets
+    RQLRewriter.build_variantes = _orig_build_variantes
+    ExecutionPlan._check_permissions = _orig_check_permissions
+    ExecutionPlan.init_temp_table = _orig_init_temp_table
+    PartPlanInformation.merge_input_maps = _orig_merge_input_maps
+    PartPlanInformation._choose_var = _orig_choose_var
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/stresstester.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,182 @@
+""" Usage: %s [OPTIONS] <application id> <queries file>
+
+Stress test a CubicWeb repository
+
+OPTIONS:
+  -h / --help
+     Display this help message and exit.
+     
+  -u / --user <user>
+     Connect as <user> instead of being prompted to give it.
+  -p / --password <password>
+     Automatically give <password> for authentication instead of being prompted
+     to give it.
+     
+  -n / --nb-times <num>
+     Repeat queries <num> times.
+  -t / --nb-threads <num>
+     Execute queries in <num> parallel threads.
+  -P / --profile <prof_file>
+     dumps profile results (hotshot) in <prof_file>
+  -o / --report-output <filename>
+     Write profiler report into <filename> rather than on stdout
+
+Copyright (c) 2003-2006 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__revision__ = "$Id: stresstester.py,v 1.3 2006-03-05 14:35:27 syt Exp $"
+
+import os
+import sys
+import threading
+import getopt
+import traceback
+from getpass import getpass
+from os.path import basename
+from time import clock
+
+from logilab.common.fileutils import lines
+from logilab.common.ureports import Table, TextWriter
+from cubicweb.server.repository import Repository
+from cubicweb.dbapi import Connection
+
+TB_LOCK = threading.Lock()
+
+class QueryExecutor:
+    def __init__(self, cursor, times, queries, reporter = None):
+        self._cursor = cursor
+        self._times = times
+        self._queries = queries
+        self._reporter = reporter
+        
+    def run(self):
+        cursor = self._cursor
+        times = self._times
+        while times:
+            for index, query in enumerate(self._queries):
+                start = clock()
+                try:
+                    cursor.execute(query)
+                except KeyboardInterrupt:
+                    raise
+                except:
+                    TB_LOCK.acquire()
+                    traceback.print_exc()
+                    TB_LOCK.release()
+                    return
+                if self._reporter is not None:
+                    self._reporter.add_proftime(clock() - start, index)
+            times -= 1
+
+def usage(status=0):
+    """print usage string and exit"""
+    print __doc__ % basename(sys.argv[0])
+    sys.exit(status)
+
+
+class ProfileReporter:
+    """a profile reporter gathers all profile informations from several
+    threads and can write a report that summarizes all profile informations
+    """
+    profiler_lock = threading.Lock()
+    
+    def __init__(self, queries):
+        self._queries = tuple(queries)
+        self._profile_results = [(0., 0)] * len(self._queries)
+        # self._table_report = Table(3, rheaders = True)
+        len_max = max([len(query) for query in self._queries]) + 5
+        self._query_fmt = '%%%ds' % len_max
+
+    def add_proftime(self, elapsed_time, query_index):
+        """add a new time measure for query"""
+        ProfileReporter.profiler_lock.acquire()
+        cumul_time, times = self._profile_results[query_index]
+        cumul_time += elapsed_time
+        times += 1.
+        self._profile_results[query_index] = (cumul_time, times)
+        ProfileReporter.profiler_lock.release()
+
+    def dump_report(self, output = sys.stdout):
+        """dump report in 'output'"""
+        table_elems = ['RQL Query', 'Times', 'Avg Time']
+        total_time = 0.
+        for query, (cumul_time, times) in zip(self._queries, self._profile_results):
+            avg_time = cumul_time / float(times)
+            table_elems += [str(query), '%f' % times, '%f' % avg_time ]
+            total_time += cumul_time
+        table_elems.append('Total time :')
+        table_elems.append(str(total_time))
+        table_elems.append(' ')
+        table_layout = Table(3, rheaders = True, children = table_elems)
+        TextWriter().format(table_layout, output)
+        # output.write('\n'.join(tmp_output))
+        
+        
+def run(args):
+    """run the command line tool"""
+    try:
+        opts, args = getopt.getopt(args, 'hn:t:u:p:P:o:', ['help', 'user=', 'password=',
+                                                           'nb-times=', 'nb-threads=',
+                                                           'profile', 'report-output=',])
+    except Exception, ex:
+        print ex
+        usage(1)
+    repeat = 100
+    threads = 1
+    user = os.environ.get('USER', os.environ.get('LOGNAME'))
+    password = None
+    report_output = sys.stdout
+    prof_file = None
+    for opt, val in opts:
+        if opt in ('-h', '--help'):
+            usage()
+        if opt in ('-u', '--user'):
+            user = val
+        elif opt in ('-p', '--password'):
+            password = val
+        elif opt in ('-n', '--nb-times'):
+            repeat = int(val)
+        elif opt in ('-t', '--nb-threads'):
+            threads = int(val)
+        elif opt in ('-P', '--profile'):
+            prof_file = val
+        elif opt in ('-o', '--report-output'):
+            report_output = file(val, 'w')
+    if len(args) != 2:
+        usage(1)
+    queries =  [query for query in lines(args[1]) if not query.startswith('#')]
+    if user is None:
+        user = raw_input('login: ')
+    if password is None:
+        password = getpass('password: ')
+    from cubicweb.cwconfig import application_configuration 
+    config = application_configuration(args[0])
+    # get local access to the repository
+    print "Creating repo", prof_file
+    repo = Repository(config, prof_file)
+    cnxid = repo.connect(user, password)
+    # connection to the CubicWeb repository
+    repo_cnx = Connection(repo, cnxid)
+    repo_cursor = repo_cnx.cursor()
+    reporter = ProfileReporter(queries)
+    if threads > 1:
+        executors = []
+        while threads:
+            qe = QueryExecutor(repo_cursor, repeat, queries, reporter = reporter)
+            executors.append(qe)
+            thread = threading.Thread(target=qe.run)
+            qe.thread = thread
+            thread.start()
+            threads -= 1
+        for qe in executors:
+            qe.thread.join()
+##         for qe in executors:
+##             print qe.thread, repeat - qe._times, 'times'
+    else:
+        QueryExecutor(repo_cursor, repeat, queries, reporter = reporter).run()
+    reporter.dump_report(report_output)
+    
+    
+if __name__ == '__main__':
+    run(sys.argv[1:])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+eperson, ecomment
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/dbfill.conf	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,22 @@
+[BASE]
+APPLICATION_SCHEMA = /home/adim/cvs_work/soft_prive/ginco/applications/crm/schema
+APPLICATION_HOME = /home/adim/etc/erudi.d/crmadim # ???
+FAKEDB_NAME = crmtest
+ENCODING = UTF-8
+HOST = crater
+USER = adim
+PASSWORD = adim
+
+
+[ENTITIES]
+default = 20 #means default is 20 entities
+Person = 10 # means 10 Persons
+Company = 5# means 5 companies
+
+
+[RELATIONS]
+Person works_for Company = 4
+Division subsidiary_of Company = 3
+
+[DEFAULT_VALUES]
+Person.firstname = data/firstnames.txt
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/firstnames.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1599 @@
+ash
+pasqualino
+asl
+benjy
+wolodymyr
+dionysos
+launce
+khaleel
+sondra
+maaike
+lavinia
+giosu
+daisy
+xiang
+belgin
+edda
+olympia
+treasa
+katya
+misi
+ville
+mahon
+yngve
+moritz
+elder
+gawel
+horsa
+blossom
+deanne
+imelda
+deanna
+cairbre
+eddy
+horst
+gaenor
+breanne
+hewie
+breanna
+jarvis
+jamin
+éloise
+jamil
+fingall
+giselle
+jamie
+shinju
+gisella
+akilina
+jordan
+gertie
+cardea
+eiran
+valdemar
+sebestyen
+galia
+bride
+greg
+fausta
+eniola
+rudo
+pratibha
+kisha
+mickey
+charlotte
+karp
+charlotta
+nunzia
+nunzio
+patrice
+kara
+hallam
+collyn
+kari
+karl
+dusan
+lia
+cherokee
+lim
+lin
+yvain
+madlyn
+liv
+lir
+lis
+tullio
+norma
+liz
+lettice
+kae
+kaj
+kai
+tatyanna
+kam
+freddie
+elton
+meinir
+blaise
+kat
+japeth
+alpha
+kay
+mack
+jayna
+jayne
+hormazed
+lupita
+humbert
+vitya
+neoptolemus
+richardine
+hallvard
+diogo
+larkin
+ravi
+louiza
+hermogenes
+alanis
+yadira
+leandra
+milburga
+leandro
+sorin
+randi
+kaleb
+rogerio
+sanna
+kalea
+justice
+kaleo
+dijana
+shprintza
+randy
+colby
+otthild
+mariamne
+patrycja
+darwin
+christal
+khalida
+kaley
+allegria
+vidya
+renaud
+sisel
+suibhne
+lonny
+julienne
+calliope
+rocco
+alexander
+aristide
+edwige
+xzavier
+rajesh
+egil
+gell
+mahavir
+charline
+sigi
+theophania
+maurice
+afon
+konnor
+kiran
+angie
+jalila
+tolly
+havva
+metody
+engel
+philander
+lancelot
+nathalie
+leilah
+dane
+æðelm
+chatzkel
+keaton
+ashlie
+kudret
+rava
+danette
+eachann
+wilburn
+jeff
+kazimiera
+rukmini
+lauryn
+femie
+mahvash
+berkant
+alesha
+daedalus
+aphra
+karla
+tetty
+agostinho
+bolivar
+savitri
+karly
+forbes
+vencesl
+bahija
+walter
+imam
+iman
+krzys
+imad
+elsa
+neville
+tracie
+else
+anthony
+shevon
+katherine
+marylou
+wojtek
+oddmund
+tristand
+areli
+valkyrie
+garfield
+wyatt
+luanne
+ossia
+luanna
+luciana
+guido
+luciano
+shachar
+astraea
+paco
+leland
+avra
+amenhotep
+kekoa
+gorden
+sameera
+boutros
+ruaidhr
+friedemann
+darrell
+hideaki
+petar
+donatien
+fannie
+eliana
+iason
+fedora
+grant
+shay
+estee
+marcelle
+marcella
+lothair
+shae
+ester
+marcello
+estev
+cassian
+allyson
+dima
+goodwin
+cezar
+blair
+monique
+elwin
+ihsan
+olufunmilayo
+arturo
+nanaia
+greetje
+clovia
+beowulf
+vassily
+madail
+emmeline
+guendolen
+nandag
+eilish
+sakari
+elisheva
+crispin
+aksel
+alvin
+cernunnos
+feardorcha
+heshel
+afra
+iqbal
+pryce
+siddhartha
+mikkel
+alvis
+myrtie
+khajag
+yesenia
+nikki
+grigory
+grigore
+maeve
+rebeca
+diederick
+maeva
+grigori
+cheryl
+rahim
+marco
+marci
+stein
+trista
+olufemi
+emmanuelle
+nadezhda
+wahid
+marcy
+vanda
+lavra
+alida
+amara
+hipolito
+valent
+renatus
+moira
+donny
+lucretia
+donna
+vesta
+cadoc
+reetta
+erma
+markku
+rosamond
+gracia
+tuyet
+sieffre
+gracie
+kodey
+debra
+photine
+jacek
+yanick
+isiah
+khordad
+rui
+stef
+rub
+foma
+sten
+kassy
+rue
+nelly
+merrick
+ayn
+macy
+vincente
+anargyros
+rut
+lenox
+jenessa
+faith
+barnaby
+manny
+jyotsana
+hasan
+iakopa
+edvard
+narcisa
+loredana
+ida
+torborg
+rollo
+stamatios
+pero
+natalya
+maudie
+carlton
+paulina
+aliyah
+lanty
+tadg
+deiniol
+dwayne
+alison
+fabius
+þórbj
+latasha
+maarit
+roxanna
+katinka
+publius
+augustijn
+ferdy
+khadiga
+akosua
+rees
+quetzalcoatl
+kristian
+larry
+reed
+krystal
+micheil
+paolo
+chelsey
+ute
+paola
+hamilcar
+malin
+deangelo
+munir
+velma
+malik
+utz
+malie
+govad
+chelsea
+malia
+willem
+seetha
+andrina
+rupert
+myrrine
+theodoros
+tito
+ivonne
+nan
+beryl
+nat
+tawnie
+korn
+marzena
+tinek
+hermine
+kora
+frances
+william
+tianna
+evan
+kory
+merletta
+kort
+nevan
+naheed
+heath
+tyreek
+shona
+amyas
+urjasz
+katy
+gu
+gr
+hilde
+mehmud
+gy
+hilda
+psyche
+olive
+nuno
+vinnie
+ga
+kato
+kata
+jeunesse
+kate
+chandrakant
+caoilainn
+arik
+rhonda
+leocadio
+euan
+aric
+leocadia
+aria
+bronwen
+marcellin
+vladislav
+ferapont
+nichole
+kizzy
+duilio
+jafet
+maas
+tue
+felicity
+mansoor
+órfhlaith
+brigitta
+fishke
+akua
+izabela
+olaf
+vittore
+michael
+óskar
+ryan
+gretta
+alvena
+olav
+brigitte
+euterpe
+barbara
+aiolos
+carter
+khalifa
+tziporah
+honora
+feich
+marilena
+onesime
+theo
+gunvor
+sa'id
+katlyn
+nicholas
+preeti
+etzel
+ekewaka
+vinal
+jubal
+ramsey
+rowley
+jocelin
+alfsigr
+kalliope
+micah
+frantisek
+holger
+alysha
+chant
+derry
+corin
+janus
+morcant
+chang
+corie
+gena
+randa
+joost
+vasile
+clark
+clare
+wim
+wil
+clara
+danika
+jory
+eleonoora
+ayelet
+caligula
+zakiah
+kilie
+meliora
+ottavio
+idoya
+ninette
+hudson
+deon
+gawdat
+frida
+jonathan
+reynold
+laocadia
+cerise
+cosmo
+hezekiah
+winston
+isak
+allyn
+noelene
+trajan
+vijaya
+cosma
+tresha
+astrithr
+priya
+astrophel
+pocahontas
+eliphalet
+stafford
+salah
+salal
+pauliina
+lazer
+feidhlim
+jackalyn
+kenny
+alayna
+wilfried
+wasim
+blaine
+femke
+jehu
+kenna
+lenore
+nkechi
+letizia
+kian
+kayleigh
+spartacus
+manuela
+leyton
+lesley
+georg
+ferdinand
+cuauhtemoc
+aeron
+lavrenti
+nyx
+ronald
+yoshiko
+gundula
+eluf
+toma
+riccardo
+ruadh
+matylda
+winter
+mayson
+llew
+clytia
+jamila
+fariha
+aegle
+octavio
+steafan
+jacqui
+mikelo
+dovid
+modestus
+blake
+jeanna
+alessa
+conway
+brook
+sunday
+kizzie
+hande
+catherine
+eckhard
+þórr
+gwyneth
+aukusti
+placid
+rufino
+kyleigh
+helah
+benoite
+eluned
+sanaz
+cnaeus
+ettie
+benaiah
+brendan
+wenonah
+nye
+candela
+dragan
+sanda
+naveen
+margar
+naveed
+austen
+sandu
+britta
+brodie
+morton
+kamilla
+sandy
+guilherme
+dorothea
+calix
+braxton
+wigburg
+tryphena
+ricky
+may
+sylwia
+libor
+marek
+ece
+trinity
+katsuro
+tercero
+'ismat
+mared
+jill
+amato
+achim
+princess
+jaquelyn
+eustathios
+tapio
+aglea
+kees
+evstathios
+edwyna
+austin
+cristian
+jouko
+nikandros
+leonora
+kaitlynn
+christoph
+mai
+parthalan
+tancredo
+rosaleen
+lynnette
+yasamin
+encarnacion
+gerolt
+ionut
+harmon
+ailbhe
+islwyn
+muirenn
+nyah
+mariana
+viktor
+greta
+kreszentia
+grete
+hormazd
+foka
+poseidon
+kazimir
+ultan
+ben
+sudhir
+bea
+bee
+saburo
+elnora
+ber
+michelyne
+clytemnestra
+yardena
+gavrel
+michelangelo
+wystan
+odhiambo
+miquel
+bertha
+su
+berthe
+alisia
+kelley
+leonhard
+rodger
+ewald
+oluwaseyi
+celandine
+kunegunda
+luisa
+khayyam
+iisakki
+luise
+ligia
+zaina
+tatiana
+siarl
+jorge
+bronislaw
+bronislav
+montana
+edric
+miloslava
+achilles
+donaldina
+wilfredo
+laurens
+haifa
+stelian
+glenice
+calvino
+rodica
+hulda
+indy
+uri
+laurena
+tzeitel
+laurene
+urs
+danita
+platon
+parker
+chadwick
+lorne
+narinder
+theodoric
+florentina
+ambrosine
+nikephoros
+kapel
+aeolus
+cenek
+hadi
+perle
+alyona
+cyril
+perla
+cicely
+darby
+madhav
+hector
+ethan
+aretha
+ilker
+avdotya
+boris
+sassa
+misty
+bonaventure
+kiefer
+emmet
+arkadios
+farrah
+tivoli
+pietari
+mohammed
+shoshana
+felipe
+felipa
+maurene
+tancred
+raymonde
+sho
+faron
+arundhati
+esteri
+silvanus
+nuha
+aloisia
+baris
+tammie
+fabricio
+lux
+luz
+driskoll
+tyra
+luc
+marsha
+luk
+aron
+joye
+ken
+gethsemane
+kelan
+yuko
+merry
+proserpine
+precious
+suibne
+mindy
+vitus
+olga
+jia
+kalysta
+angharad
+ciera
+careen
+inglebert
+apphia
+muadhnait
+christen
+rebekah
+dominique
+gita
+tori
+harmonie
+anatolius
+harmonia
+denise
+johann
+johano
+denisa
+viktoria
+padmini
+johana
+christer
+barakat
+willy
+sari
+fitzroy
+yaw
+sara
+yan
+quim
+quin
+yaa
+katelin
+pontus
+raelene
+alexus
+gwandoya
+venceslav
+ott
+artemidoros
+zaynab
+folant
+salman
+ealdgy
+randal
+macey
+heriberto
+kimball
+ekin
+dema
+evelyn
+demi
+pip
+simona
+daniil
+emmerson
+kausalya
+kortney
+gavriil
+yered
+parth
+fido
+solange
+oona
+anka
+renie
+anke
+habakkuk
+linwood
+teofilo
+grazyna
+enitan
+bhaskar
+finnian
+perseus
+mordechai
+fyodor
+ashley
+philo
+i
+hecate
+phile
+theodor
+kiaran
+ashlee
+dollie
+savannah
+upton
+sofia
+noak
+sofie
+laurel
+lauren
+dubaku
+zacharjasz
+patricio
+trudi
+sophus
+vida
+patricia
+trudy
+tapani
+mavreena
+jesper
+sandrine
+sonia
+livio
+mikolaj
+laurine
+livia
+finnegan
+oprah
+waheed
+lavonne
+perdita
+liviu
+imen
+attila
+lincoln
+fernanda
+evrard
+fernande
+jaana
+artair
+fernando
+candy
+cande
+kazimierz
+kaija
+shamgar
+laxmi
+martie
+page
+candi
+brody
+piaras
+shea
+herbie
+shem
+kristaps
+sher
+cleveland
+carreen
+margaid
+phinehas
+justina
+wendi
+linus
+wenda
+matrona
+christiane
+wendy
+kerensa
+roch
+fergal
+fanny
+kamila
+oswin
+camilo
+everette
+katashi
+myron
+ridley
+shavonne
+blythe
+nader
+marlowe
+miha
+carolyn
+glenn
+gadar
+rainard
+sybella
+raquel
+rozabela
+serhat
+bashemath
+jing
+gobnet
+yentl
+sylvana
+dolores
+sanjit
+tamsin
+sanjiv
+innes
+daniela
+daniele
+margr
+keysha
+rogelio
+ean
+hj
+philipp
+valerian
+marge
+gail
+margh
+gaia
+engelbert
+kathie
+artemisia
+margo
+stefan
+pansy
+swanhilda
+swanhilde
+alessio
+beata
+beate
+babur
+beatrice
+eris
+erin
+maura
+camryn
+conan
+erik
+krysia
+nigelia
+mauri
+averill
+draco
+eric
+sophronius
+mauro
+diego
+simcha
+malachy
+barth
+maoilios
+germaine
+malachi
+katariina
+lianne
+ferdinando
+donagh
+kelemen
+taletta
+star
+gilah
+faustus
+ælfwine
+rayna
+gotthard
+sa'd
+stan
+klemen
+pranay
+howie
+dewey
+tiarnan
+katherina
+uzma
+jabril
+hakan
+martin
+elsie
+cleve
+imani
+moshe
+padma
+inmaculada
+augustine
+trenton
+ghislain
+aiden
+alfhild
+ireneus
+gottschalk
+andra
+jahzeel
+andro
+fredrik
+wynter
+kohar
+tobin
+giustino
+buddy
+marcos
+mieszko
+giustina
+khalil
+aur
+helladius
+riccarda
+elettra
+glykeria
+yeva
+trahaearn
+ulisse
+wilfred
+sorrel
+saara
+ekwueme
+sarita
+finella
+waldo
+herbert
+elissa
+bevan
+lavern
+till
+ruxandra
+lavender
+ghalib
+eldon
+masterman
+tameka
+mihajlo
+mahin
+neo
+asim
+jordon
+pace
+ned
+giampiero
+asia
+nea
+haze
+bearach
+cheng
+pieter
+yonah
+chikako
+maverick
+fonsie
+ozzy
+meg
+mitxel
+filbert
+mel
+neves
+henrik
+mei
+hilaire
+drew
+deemer
+liborio
+dubhghlas
+bogdan
+dipak
+rapha
+golda
+maighread
+masha
+pranciskis
+mitchell
+titilayo
+aydin
+ippolit
+toiba
+omar
+cindy
+alexandrina
+lyubov
+hiltraud
+joshua
+moray
+baptiste
+bahiyya
+marquita
+benedicta
+reagan
+latifah
+scevola
+ardashir
+pakpao
+topaz
+janine
+omolara
+janina
+morag
+euripides
+lennart
+orb
+helmuth
+armo
+diederik
+lennard
+raeburn
+oscar
+odell
+ualan
+noemi
+melba
+berlin
+lazarus
+merla
+meera
+anastas
+rhamantus
+yussel
+meshullam
+esdras
+kumar
+flora
+norwood
+rio
+apollinaris
+oleg
+rim
+nadzeija
+akio
+akim
+efisio
+jayda
+olek
+rowanne
+honey
+karola
+chetana
+candelas
+friede
+phaedrus
+frieda
+joann
+braidy
+hitomi
+kieron
+dakarai
+teofil
+dervila
+ria
+pietrina
+becky
+alechjo
+santos
+egon
+olwin
+ove
+balthazar
+reeta
+becka
+tillo
+royce
+peninnah
+earnestine
+janis
+jakab
+janie
+rosalba
+hosanna
+aharon
+fife
+zacharias
+fifi
+aleesha
+murray
+helena
+helene
+rashmi
+afia
+oswald
+zachariah
+shawnee
+pius
+zdenek
+kichiro
+melchiorre
+erland
+yaroslava
+anushka
+cree
+iser
+rachel
+anik
+fabiola
+ania
+aneurin
+hernando
+ernesto
+ernesta
+astor
+manasseh
+naphtali
+shai
+lorena
+lazar
+luce
+lorenz
+luca
+briana
+rosemary
+dawid
+nava
+payton
+linos
+aida
+gunne
+milan
+tuomas
+sahar
+doug
+mikala
+dawn
+vincenza
+saturninus
+channah
+mandy
+reuven
+cormag
+cormac
+mandi
+sachie
+ladonna
+phuong
+tasha
+ramon
+hashim
+fachtna
+euphemia
+tisha
+jozafat
+horatius
+imke
+venus
+rodolf
+binyamin
+cosmin
+oluwafunmilayo
+nekane
+loup
+kohinoor
+teuvo
+xue
+innokenti
+vincenzo
+kiley
+isa
+hannibal
+vijay
+kornelia
+afanasy
+vittorio
+tuor
+adalia
+damayanti
+afanasi
+grady
+evangelos
+ermete
+brock
+bonita
+arisha
+pelagia
+solvej
+parthenope
+peggie
+kierra
+jozefa
+garry
+giuditta
+ladislas
+jozefo
+swietoslaw
+yildiz
+nasira
+eshe
+helen
+gretchen
+shekhar
+daren
+lenuta
+dymphna
+daina
+matteo
+berjouhi
+jerusha
+solomon
+gernot
+murtagh
+meaveen
+godwin
+ladislao
+minh
+hachiro
+farquhar
+ichabod
+mina
+caleb
+veera
+ginger
+ming
+jaynie
+sharyn
+seonag
+ferdie
+ilana
+gabriela
+gabriele
+lloren
+hooda
+mabelle
+timeus
+teagan
+gorka
+ulrich
+philadelphia
+razvan
+lamprecht
+marit
+kean
+marin
+mario
+rhonwen
+vilko
+konstantin
+tyr
+maria
+fastred
+kazuki
+krister
+don
+dom
+iekika
+ruben
+m
+calanthe
+luchjo
+vicki
+sheryl
+afanen
+kirabo
+dov
+kristel
+dot
+kristen
+pavao
+donelle
+antti
+donella
+katerina
+liza
+wladyslaw
+gerlach
+hrodohaidis
+samnang
+ashok
+raelyn
+tipene
+kallias
+kun
+gebhard
+folke
+katica
+lennie
+rupinder
+maryann
+adolphus
+lachtna
+petri
+monica
+kyriakos
+brannon
+deforest
+shankara
+hourig
+haniyya
+christopher
+griogair
+saturn
+tola
+earl
+decebal
+bas
+petra
+adelia
+cleto
+bao
+bal
+bai
+julien
+clarette
+dimitar
+fioralba
+tommie
+domhnall
+ragnhei
+gunnar
+ailill
+juliet
+pete
+vasya
+peta
+duff
+imaculada
+peti
+manola
+kolab
+petr
+neriah
+manolo
+edoardo
+onora
+elisud
+graciano
+fayza
+as'ad
+romola
+vernon
+pluto
+genevra
+yahweh
+mukesh
+fiacre
+sudarshana
+shahriar
+athanasius
+una
+casimir
+derval
+ernst
+sherilyn
+taranis
+enzo
+bedelia
+winnie
+kalyan
+jinan
+plamen
+quinn
+monat
+alcaeus
+mathieu
+aindri
+raffaella
+armin
+lovell
+cyrus
+chelo
+sidonius
+basia
+tina
+basil
+basim
+fuad
+riley
+tracee
+chun
+talia
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/Bug.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+title		ivarchar(64) not null
+state		CHOICE('open', 'rejected', 'validation pending', 'resolved') default 'open'
+severity	CHOICE('important', 'normal', 'minor') default 'normal'
+cost 		integer
+description	ivarchar(4096)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/Project.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+name		ivarchar(64) not null
+summary		ivarchar(128)	
+vcsurl		varchar(256)
+reporturl	varchar(256)
+description	ivarchar(1024)
+url		varchar(128)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/Story.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+title		ivarchar(64) not null
+state		CHOICE('open', 'rejected', 'validation pending', 'resolved') default 'open'
+priority	CHOICE('minor', 'normal', 'important') default 'normal'
+cost	        integer
+description	ivarchar(4096)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/Version.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+num	varchar(16) not null
+diem	date	
+status 	CHOICE('planned', 'dev', 'published') default 'planned'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/custom.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+Person = import_erschema('Person')
+Person.add_relation(Date(), 'birthday')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/schema/relations.rel	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,33 @@
+Bug concerns Project inline
+Story concerns Project inline
+
+Bug corrected_in Version inline CONSTRAINT E concerns P, X version_of P
+Story done_in Version inline CONSTRAINT E concerns P, X version_of P
+
+Bug   identical_to Bug   symetric
+Bug   identical_to Story symetric
+Story identical_to Story symetric
+
+Story depends_on Story
+Story depends_on Bug
+Bug   depends_on Story
+Bug   depends_on Bug
+
+Bug     see_also Bug	 symetric
+Bug	see_also Story	 symetric
+Bug	see_also Project symetric
+Story	see_also Story	 symetric
+Story	see_also Project symetric
+Project see_also Project symetric
+
+Project uses Project
+
+Version version_of Project inline
+Version todo_by EUser
+
+Comment about Bug inline
+Comment about Story inline
+Comment about Comment inline
+
+EUser interested_in Project
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/data/views/bug.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+"""only for unit tests !"""
+
+from cubicweb.common.view import EntityView
+
+HTML_PAGE = u"""<html>
+  <body>
+    <h1>Hello World !</h1>
+  </body>
+</html>
+"""
+
+class SimpleView(EntityView):
+    id = 'simple'
+    accepts = ('Bug',)
+
+    def call(self, **kwargs):
+        self.cell_call(0, 0)
+
+    def cell_call(self, row, col):
+        self.w(HTML_PAGE)
+
+class RaisingView(EntityView):
+    id = 'raising'
+    accepts = ('Bug',)
+
+    def cell_call(self, row, col):
+        raise ValueError()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/runtests.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+from logilab.common.testlib import main
+
+if __name__ == '__main__':
+    import sys, os
+    main(os.path.dirname(sys.argv[0]) or '.')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/unittest_dbfill.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,115 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for database value generator"""
+
+import os.path as osp
+import re
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.schema import Schema, EntitySchema
+from cubicweb.devtools.fill import ValueGenerator, make_tel
+from cubicweb.devtools import ApptestConfiguration
+
+DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data')
+ISODATE_SRE = re.compile('(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})$')
+
+
+class MyValueGenerator(ValueGenerator):
+
+    def generate_Bug_severity(self, index):
+        return u'dangerous'
+
+    def generate_Any_description(self, index, format=None):
+        return u'yo'
+
+
+class ValueGeneratorTC(TestCase):
+    """test case for ValueGenerator"""
+
+    def _choice_func(self, etype, attrname):
+        try:
+            return getattr(self, '_available_%s_%s' % (etype, attrname))(etype, attrname)
+        except AttributeError:
+            return None
+    
+    def _available_Person_firstname(self, etype, attrname):
+        return [f.strip() for f in file(osp.join(DATADIR, 'firstnames.txt'))]
+
+
+    def setUp(self):
+        config = ApptestConfiguration('data')
+        config.bootstrap_cubes()
+        schema = config.load_schema()
+        e_schema = schema.entity_schema('Person')
+        self.person_valgen = ValueGenerator(e_schema, self._choice_func)
+        e_schema = schema.entity_schema('Bug')
+        self.bug_valgen = MyValueGenerator(e_schema)
+        self.config = config
+
+    def _check_date(self, date):
+        """checks that 'date' is well-formed"""
+        year = date.year
+        month = date.month
+        day = date.day
+        self.failUnless(day in range(1, 29), '%s not in [0;28]' % day) 
+        self.failUnless(month in range(1, 13), '%s not in [1;12]' % month)
+        self.failUnless(year in range(2000, 2005),
+                        '%s not in [2000;2004]' % year)
+        
+
+    def test_string(self):
+        """test string generation"""
+        surname = self.person_valgen._generate_value('surname', 12)
+        self.assertEquals(surname, u'é&surname12')
+
+    def test_domain_value(self):
+        """test value generation from a given domain value"""
+        firstname = self.person_valgen._generate_value('firstname', 12)
+        possible_choices = self._choice_func('Person', 'firstname')
+        self.failUnless(firstname in possible_choices,
+                        '%s not in %s' % (firstname, possible_choices))
+
+    def test_choice(self):
+        """test choice generation"""
+        # Test for random index
+        for index in range(5):
+            sx_value = self.person_valgen._generate_value('civility', index)
+            self.failUnless(sx_value in ('Mr', 'Mrs', 'Ms'))
+
+    def test_integer(self):
+        """test integer generation"""
+        # Test for random index
+        for index in range(5):
+            cost_value = self.bug_valgen._generate_value('cost', index)
+            self.failUnless(cost_value in range(index+1))
+
+    def test_date(self):
+        """test date generation"""
+        # Test for random index
+        for index in range(5):
+            date_value = self.person_valgen._generate_value('birthday', index)
+            self._check_date(date_value)
+        
+    def test_phone(self):
+        """tests make_tel utility"""
+        self.assertEquals(make_tel(22030405), '22 03 04 05')
+
+
+    def test_customized_generation(self):
+        self.assertEquals(self.bug_valgen._generate_value('severity', 12),
+                          u'dangerous')
+        self.assertEquals(self.bug_valgen._generate_value('description', 12),
+                          u'yo')
+        self.assertEquals(self.person_valgen._generate_value('description', 12),
+                          u'yo')
+                          
+        
+
+class ConstraintInsertionTC(TestCase):
+
+    def test_writeme(self):
+        self.skip('Test automatic insertion / Schema Constraints')
+    
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/unittest_fill.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,53 @@
+"""unit tests for cubicweb.devtools.fill module"""
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.schema import Schema, EntitySchema
+
+import re
+from cubicweb.devtools.fill import ValueGenerator, _ValueGenerator
+
+ISODATE_SRE = re.compile('(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})$')
+
+
+class AutoExtendableTC(TestCase):
+
+    def setUp(self):
+        self.attrvalues = dir(_ValueGenerator)
+
+    def tearDown(self):
+        attrvalues = set(dir(_ValueGenerator))
+        for attrname in attrvalues - set(self.attrvalues):
+            delattr(_ValueGenerator, attrname)
+
+        
+    def test_autoextend(self):
+        self.failIf('generate_server' in dir(ValueGenerator))
+        class MyValueGenerator(ValueGenerator):
+            def generate_server(self, index):
+                return attrname
+        self.failUnless('generate_server' in dir(ValueGenerator))
+
+
+    def test_bad_signature_detection(self):
+        self.failIf('generate_server' in dir(ValueGenerator))
+        try:
+            class MyValueGenerator(ValueGenerator):
+                def generate_server(self):
+                    pass
+        except TypeError:
+            self.failIf('generate_server' in dir(ValueGenerator))
+        else:
+            self.fail('TypeError not raised')
+
+
+    def test_signature_extension(self):
+        self.failIf('generate_server' in dir(ValueGenerator))
+        class MyValueGenerator(ValueGenerator):
+            def generate_server(self, index, foo):
+                pass
+        self.failUnless('generate_server' in dir(ValueGenerator))
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/test/unittest_testlib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,154 @@
+"""unittests for gct.apptest module"""
+
+from cStringIO import StringIO
+from unittest import TestSuite
+
+
+from logilab.common.testlib import (TestCase, unittest_main, mock_object,
+                                    SkipAwareTextTestRunner)
+from cubicweb.devtools import htmlparser
+
+from cubicweb.devtools.testlib import WebTest, EnvBasedTC
+
+class WebTestTC(TestCase):
+
+    def setUp(self):
+        output = StringIO()
+        self.runner = SkipAwareTextTestRunner(stream=output)
+
+    def test_error_raised(self):
+        class MyWebTest(WebTest):
+
+            def test_error_view(self):
+                self.add_entity('Bug', title=u"bt")
+                self.view('raising', self.execute('Bug B'), template=None)
+            
+            def test_correct_view(self):
+                self.view('primary', self.execute('EUser U'), template=None)
+            
+        tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')]
+        result = self.runner.run(TestSuite(tests))
+        self.assertEquals(result.testsRun, 2)
+        self.assertEquals(len(result.errors), 0)        
+        self.assertEquals(len(result.failures), 1)
+
+
+class TestLibTC(EnvBasedTC):
+    def test_add_entity_with_relation(self):
+        bug = self.add_entity(u'Bug', title=u"toto")
+        self.add_entity(u'Bug', title=u"tata", identical_to=bug)
+
+        rset = self.execute('Any BA WHERE BA is Bug, BA title "toto"')
+        self.assertEquals(len(rset), 1)
+        bug = tuple(rset.entities())[0]
+        self.assertEquals(bug.identical_to[0].title, "tata")
+
+
+
+HTML_PAGE = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+  <head><title>need a title</title></head>
+  <body>
+    <h1>Hello World !</h1>
+  </body>
+</html>
+"""
+
+HTML_PAGE2 = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+ <head><title>need a title</title></head>
+ <body>
+   <h1>Test</h1>
+   <h1>Hello <a href="http://www.google.com">world</a> !</h1>
+   <h2>h2 title</h2>
+   <h3>h3 title</h3>
+   <h2>antoher h2 title</h2>
+   <h4>h4 title</h4>
+   <p><a href="http://www.logilab.org">Logilab</a> introduces CW !</p>
+ </body>
+</html>
+"""
+
+HTML_PAGE_ERROR = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+ <head><title>need a title</title></head>
+ <body>
+   Logilab</a> introduces CW !
+ </body>
+</html>
+"""
+
+HTML_NON_STRICT = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+  <head><title>need a title</title></head>
+  <body>
+    <h1><a href="something.com">title</h1>
+  </body>
+</html>
+"""
+
+
+class HTMLPageInfoTC(TestCase):
+    """test cases for PageInfo"""
+    def setUp(self):
+        parser = htmlparser.DTDValidator()
+        self.page_info = parser.parse_string(HTML_PAGE2)
+
+    def test_source1(self):
+        """make sure source is stored correctly"""
+        self.assertEquals(self.page_info.source, HTML_PAGE2)
+        
+    def test_source2(self):
+        """make sure source is stored correctly - raise exception"""
+        parser = htmlparser.DTDValidator()
+        self.assertRaises(AssertionError, parser.parse_string, HTML_PAGE_ERROR)
+
+        
+    def test_has_title_no_level(self):
+        """tests h? tags information"""
+        self.assertEquals(self.page_info.has_title('Test'), True)
+        self.assertEquals(self.page_info.has_title('Test '), False)
+        self.assertEquals(self.page_info.has_title('Tes'), False)
+        self.assertEquals(self.page_info.has_title('Hello world !'), True)
+
+    def test_has_title_level(self):
+        """tests h? tags information"""
+        self.assertEquals(self.page_info.has_title('Test', level = 1), True)
+        self.assertEquals(self.page_info.has_title('Test', level = 2), False)
+        self.assertEquals(self.page_info.has_title('Test', level = 3), False)
+        self.assertEquals(self.page_info.has_title('Test', level = 4), False)
+        self.assertRaises(IndexError, self.page_info.has_title, 'Test', level = 5)
+
+    def test_has_title_regexp_no_level(self):
+        """tests has_title_regexp() with no particular level specified"""
+        self.assertEquals(self.page_info.has_title_regexp('h[23] title'), True)
+
+    def test_has_title_regexp_level(self):
+        """tests has_title_regexp() with a particular level specified"""
+        self.assertEquals(self.page_info.has_title_regexp('h[23] title', 2), True)
+        self.assertEquals(self.page_info.has_title_regexp('h[23] title', 3), True)
+        self.assertEquals(self.page_info.has_title_regexp('h[23] title', 4), False)
+    
+    def test_appears(self):
+        """tests PageInfo.appears()"""
+        self.assertEquals(self.page_info.appears('CW'), True)
+        self.assertEquals(self.page_info.appears('Logilab'), True)
+        self.assertEquals(self.page_info.appears('Logilab introduces'), True)
+        self.assertEquals(self.page_info.appears('H2 title'), False)
+
+    def test_has_link(self):
+        """tests has_link()"""
+        self.assertEquals(self.page_info.has_link('Logilab'), True)
+        self.assertEquals(self.page_info.has_link('logilab'), False)
+        self.assertEquals(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True)
+        self.assertEquals(self.page_info.has_link('Logilab', 'http://www.google.com'), False)
+
+    def test_has_link_regexp(self):
+        """test has_link_regexp()"""
+        self.assertEquals(self.page_info.has_link_regexp('L[oi]gilab'), True)
+        self.assertEquals(self.page_info.has_link_regexp('L[ai]gilab'), False)
+
+
+if __name__ == '__main__':
+    unittest_main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/testlib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,393 @@
+"""this module contains base classes for web tests
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from math import log
+
+from logilab.common.debugger import Debugger
+from logilab.common.testlib import InnerTest
+from logilab.common.pytest import nocoverage
+
+from rql import parse
+
+from cubicweb.devtools import VIEW_VALIDATORS
+from cubicweb.devtools.apptest import EnvBasedTC
+from cubicweb.devtools._apptest import unprotected_entities, SYSTEM_RELATIONS
+from cubicweb.devtools.htmlparser import DTDValidator, SaxOnlyValidator, HTMLValidator
+from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries
+
+from cubicweb.sobjects.notification import NotificationView
+
+from cubicweb.vregistry import NoSelectableObject
+from cubicweb.web.action import Action
+from cubicweb.web.views.basetemplates import TheMainTemplate
+
+
+## TODO ###############
+# creation tests: make sure an entity was actually created
+# Existing Test Environment
+
+class CubicWebDebugger(Debugger):
+
+    def do_view(self, arg):
+        import webbrowser
+        data = self._getval(arg)
+        file('/tmp/toto.html', 'w').write(data)
+        webbrowser.open('file:///tmp/toto.html')
+
+def how_many_dict(schema, cursor, how_many, skip):
+    """compute how many entities by type we need to be able to satisfy relations
+    cardinality
+    """
+    # compute how many entities by type we need to be able to satisfy relation constraint
+    relmap = {}
+    for rschema in schema.relations():
+        if rschema.meta or rschema.is_final(): # skip meta relations
+            continue
+        for subj, obj in rschema.iter_rdefs():
+            card = rschema.rproperty(subj, obj, 'cardinality')
+            if card[0] in '1?' and len(rschema.subjects(obj)) == 1:
+                relmap.setdefault((rschema, subj), []).append(str(obj))
+            if card[1] in '1?' and len(rschema.objects(subj)) == 1:
+                relmap.setdefault((rschema, obj), []).append(str(subj))
+    unprotected = unprotected_entities(schema)
+    for etype in skip:
+        unprotected.add(etype)
+    howmanydict = {}
+    for etype in unprotected_entities(schema, strict=True):
+        howmanydict[str(etype)] = cursor.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0]
+        if etype in unprotected:
+            howmanydict[str(etype)] += how_many
+    for (rschema, etype), targets in relmap.iteritems():
+        # XXX should 1. check no cycle 2. propagate changes
+        relfactor = sum(howmanydict[e] for e in targets)
+        howmanydict[str(etype)] = max(relfactor, howmanydict[etype])
+    return howmanydict
+
+
+def line_context_filter(line_no, center, before=3, after=None):
+    """return true if line are in context
+    if after is None: after = before"""
+    if after is None:
+        after = before
+    return center - before <= line_no <= center + after
+
+## base webtest class #########################################################
+class WebTest(EnvBasedTC):
+    """base class for web tests"""
+    __abstract__ = True
+
+    pdbclass = CubicWebDebugger
+    # this is a hook to be able to define a list of rql queries
+    # that are application dependent and cannot be guessed automatically
+    application_rql = []
+
+    # validators are used to validate (XML, DTD, whatever) view's content
+    # validators availables are :
+    #  DTDValidator : validates XML + declared DTD
+    #  SaxOnlyValidator : guarantees XML is well formed
+    #  None : do not try to validate anything
+    # validators used must be imported from from.devtools.htmlparser
+    validators = {
+        # maps vid : validator name
+        'hcal' : SaxOnlyValidator,
+        'rss' : SaxOnlyValidator,
+        'rssitem' : None,
+        'xml' : SaxOnlyValidator,
+        'xmlitem' : None,
+        'xbel' : SaxOnlyValidator,
+        'xbelitem' : None,
+        'vcard' : None,
+        'fulltext': None,
+        'fullthreadtext': None,
+        'fullthreadtext_descending': None,
+        'text' : None,
+        'treeitemview': None,
+        'textincontext' : None,
+        'textoutofcontext' : None,
+        'combobox' : None,
+        'csvexport' : None,
+        'ecsvexport' : None,
+        }
+    valmap = {None: None, 'dtd': DTDValidator, 'xml': SaxOnlyValidator}
+    no_auto_populate = ()
+    ignored_relations = ()
+    
+    def __init__(self, *args, **kwargs):
+        EnvBasedTC.__init__(self, *args, **kwargs)
+        for view, valkey in VIEW_VALIDATORS.iteritems():
+            self.validators[view] = self.valmap[valkey]
+        
+    def custom_populate(self, how_many, cursor):
+        pass
+        
+    def post_populate(self, cursor):
+        pass
+    
+    @nocoverage
+    def auto_populate(self, how_many):
+        """this method populates the database with `how_many` entities
+        of each possible type. It also inserts random relations between them
+        """
+        cu = self.cursor()
+        self.custom_populate(how_many, cu)
+        vreg = self.vreg
+        howmanydict = how_many_dict(self.schema, cu, how_many, self.no_auto_populate)
+        for etype in unprotected_entities(self.schema):
+            if etype in self.no_auto_populate:
+                continue
+            nb = howmanydict.get(etype, how_many)
+            for rql, args in insert_entity_queries(etype, self.schema, vreg, nb):
+                cu.execute(rql, args)
+        edict = {}
+        for etype in unprotected_entities(self.schema, strict=True):
+            rset = cu.execute('%s X' % etype)
+            edict[str(etype)] = set(row[0] for row in rset.rows)
+        existingrels = {}
+        ignored_relations = SYSTEM_RELATIONS + self.ignored_relations
+        for rschema in self.schema.relations():
+            if rschema.is_final() or rschema in ignored_relations:
+                continue
+            rset = cu.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema)
+            existingrels.setdefault(rschema.type, set()).update((x,y) for x, y in rset)
+        q = make_relations_queries(self.schema, edict, cu, ignored_relations,
+                                   existingrels=existingrels)
+        for rql, args in q:
+            cu.execute(rql, args)
+        self.post_populate(cu)
+        self.commit()
+
+    @nocoverage
+    def _check_html(self, output, vid, template='main'):
+        """raises an exception if the HTML is invalid"""
+        if template is None:
+            default_validator = HTMLValidator
+        else:
+            default_validator = DTDValidator
+        validatorclass = self.validators.get(vid, default_validator)
+        if validatorclass is None:
+            return None
+        validator = validatorclass()
+        output = output.strip()
+        return validator.parse_string(output)
+
+
+    def view(self, vid, rset, req=None, template='main', htmlcheck=True, **kwargs):
+        """This method tests the view `vid` on `rset` using `template`
+
+        If no error occured while rendering the view, the HTML is analyzed
+        and parsed.
+
+        :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo`
+                  encapsulation the generated HTML
+        """
+        req = req or rset.req
+        # print "testing ", vid,
+        # if rset:
+        #     print rset, len(rset), id(rset)
+        # else:
+        #     print 
+        req.form['vid'] = vid
+        view = self.vreg.select_view(vid, req, rset, **kwargs)
+        if view.content_type not in ('application/xml', 'application/xhtml+xml', 'text/html'):
+            htmlcheck = False
+        # set explicit test description
+        if rset is not None:
+            self.set_description("testing %s, mod=%s (%s)" % (vid, view.__module__, rset.printable_rql()))
+        else:
+            self.set_description("testing %s, mod=%s (no rset)" % (vid, view.__module__))
+        viewfunc = lambda **k: self.vreg.main_template(req, template, **kwargs)
+        if template is None: # raw view testing, no template
+            viewfunc = view.dispatch
+        elif template == 'main':
+            _select_view_and_rset = TheMainTemplate._select_view_and_rset
+            # patch TheMainTemplate.process_rql to avoid recomputing resultset
+            TheMainTemplate._select_view_and_rset = lambda *a, **k: (view, rset)
+        try:
+            return self._test_view(viewfunc, vid, htmlcheck, template, **kwargs)
+        finally:
+            if template == 'main':
+                TheMainTemplate._select_view_and_rset = _select_view_and_rset
+
+
+    def _test_view(self, viewfunc, vid, htmlcheck=True, template='main', **kwargs):
+        """this method does the actual call to the view
+
+        If no error occured while rendering the view, the HTML is analyzed
+        and parsed.
+
+        :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo`
+                  encapsulation the generated HTML
+        """
+        output = None
+        try:
+            output = viewfunc(**kwargs)
+            if htmlcheck:
+                return self._check_html(output, vid, template)
+            else:
+                return output
+        except (SystemExit, KeyboardInterrupt):
+            raise
+        except:
+            # hijack exception: generative tests stop when the exception
+            # is not an AssertionError
+            klass, exc, tcbk = sys.exc_info()
+            try:
+                msg = '[%s in %s] %s' % (klass, vid, exc)
+            except:
+                msg = '[%s in %s] undisplayable exception' % (klass, vid)
+            if output is not None:
+                position = getattr(exc, "position", (0,))[0]
+                if position:
+                    # define filter
+                    
+                    
+                    output = output.splitlines()
+                    width = int(log(len(output), 10)) + 1
+                    line_template = " %" + ("%i" % width) + "i: %s"
+
+                    # XXX no need to iterate the whole file except to get
+                    # the line number
+                    output = '\n'.join(line_template % (idx + 1, line)
+                                for idx, line in enumerate(output)
+                                if line_context_filter(idx+1, position))
+                    msg+= '\nfor output:\n%s' % output
+            raise AssertionError, msg, tcbk
+
+        
+    def iter_automatic_rsets(self):
+        """generates basic resultsets for each entity type"""
+        etypes = unprotected_entities(self.schema, strict=True)
+        for etype in etypes:
+            yield self.execute('Any X WHERE X is %s' % etype)
+
+        etype1 = etypes.pop()
+        etype2 = etypes.pop()
+        # test a mixed query (DISTINCT/GROUP to avoid getting duplicate
+        # X which make muledit view failing for instance (html validation fails
+        # because of some duplicate "id" attributes)
+        yield self.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % (etype1, etype2))
+        # test some application-specific queries if defined
+        for rql in self.application_rql:
+            yield self.execute(rql)
+
+                
+    def list_views_for(self, rset):
+        """returns the list of views that can be applied on `rset`"""
+        req = rset.req
+        only_once_vids = ('primary', 'secondary', 'text')
+        skipped = ('restriction', 'cell')
+        req.data['ex'] = ValueError("whatever")
+        for vid, views in self.vreg.registry('views').items():
+            if vid[0] == '_':
+                continue
+            try:
+                view = self.vreg.select(views, req, rset)
+                if view.id in skipped:
+                    continue
+                if view.category == 'startupview':
+                    continue
+                if rset.rowcount > 1 and view.id in only_once_vids:
+                    continue
+                if not isinstance(view, NotificationView):
+                    yield view
+            except NoSelectableObject:
+                continue
+
+    def list_actions_for(self, rset):
+        """returns the list of actions that can be applied on `rset`"""
+        req = rset.req
+        for action in self.vreg.possible_objects('actions', req, rset):
+            yield action
+
+        
+    def list_boxes_for(self, rset):
+        """returns the list of boxes that can be applied on `rset`"""
+        req = rset.req
+        for box in self.vreg.possible_objects('boxes', req, rset):
+            yield box
+            
+        
+    def list_startup_views(self):
+        """returns the list of startup views"""
+        req = self.request()
+        for view in self.vreg.possible_views(req, None):
+            if view.category != 'startupview':
+                continue
+            yield view.id
+
+    def _test_everything_for(self, rset):
+        """this method tries to find everything that can be tested
+        for `rset` and yields a callable test (as needed in generative tests)
+        """
+        rqlst = parse(rset.rql)
+        propdefs = self.vreg['propertydefs']
+        # make all components visible
+        for k, v in propdefs.items():
+            if k.endswith('visible') and not v['default']:
+                propdefs[k]['default'] = True
+        for view in self.list_views_for(rset):
+            backup_rset = rset._prepare_copy(rset.rows, rset.description)
+            yield InnerTest(self._testname(rset, view.id, 'view'),
+                            self.view, view.id, rset,
+                            rset.req.reset_headers(), 'main', not view.binary)
+            # We have to do this because some views modify the
+            # resultset's syntax tree
+            rset = backup_rset
+        for action in self.list_actions_for(rset):
+            # XXX this seems a bit dummy
+            #yield InnerTest(self._testname(rset, action.id, 'action'),
+            #                self.failUnless,
+            #                isinstance(action, Action))
+            yield InnerTest(self._testname(rset, action.id, 'action'), action.url)
+        for box in self.list_boxes_for(rset):
+            yield InnerTest(self._testname(rset, box.id, 'box'), box.dispatch)
+
+
+
+    @staticmethod
+    def _testname(rset, objid, objtype):
+        return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype)
+            
+
+class AutomaticWebTest(WebTest):
+    """import this if you wan automatic tests to be ran"""
+    ## one each
+    def test_one_each_config(self):
+        self.auto_populate(1)
+        for rset in self.iter_automatic_rsets():
+            for testargs in self._test_everything_for(rset):
+                yield testargs
+
+    ## ten each
+    def test_ten_each_config(self):
+        self.auto_populate(10)
+        for rset in self.iter_automatic_rsets():
+            for testargs in self._test_everything_for(rset):
+                yield testargs
+                
+    ## startup views
+    def test_startup_views(self):
+        for vid in self.list_startup_views():
+            req = self.request()
+            yield self.view, vid, None, req
+
+
+class RealDBTest(WebTest):
+
+    def iter_individual_rsets(self, etypes=None, limit=None):
+        etypes = etypes or unprotected_entities(self.schema, strict=True)
+        for etype in etypes:
+            rset = self.execute('Any X WHERE X is %s' % etype)
+            for row in xrange(len(rset)):
+                if limit and row > limit:
+                    break
+                rset2 = rset.limit(limit=1, offset=row)
+                yield rset2
+
+        
Binary file doc/.static/logilab.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/.static/sphinx-default.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,860 @@
+/**
+ * Sphinx Doc Design
+ */
+
+html, body {
+    background: white;	
+}
+
+body {
+    font-family: Verdana, sans-serif;
+    font-size: 100%;
+    background-color: white;
+    color: black;
+    margin: 0;
+    padding: 0;
+}
+
+/* :::: LAYOUT :::: */
+
+div.logilablogo {
+    padding: 10px 10px 10px 10px;
+    height:75;
+}
+
+
+div.document {
+    background-color: white;
+}
+
+div.documentwrapper {
+    float: left;
+    width: 100%;
+}
+
+div.bodywrapper {
+    margin: 0 0 0 230px;
+}
+
+div.body {
+    background-color: white;
+    padding: 0 20px 30px 20px;
+    border-left:solid;
+    border-left-color:#e2e2e2;
+    border-left-width:thin;
+}
+
+div.sphinxsidebarwrapper {
+    padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+    float: left;
+    width: 230px;
+    margin-left: -100%;
+    font-size: 90%;
+}
+
+div.clearer {
+    clear: both;
+}
+
+div.footer {
+    color: #ff4500;
+    width: 100%;
+    padding: 9px 0 9px 0;
+    text-align: center;
+    font-size: 75%;
+}
+
+div.footer a {
+    color: #ff4500;
+    text-decoration: underline;
+}
+
+div.related {
+    background-color: #ff7700;
+    color: white;
+    width: 100%;
+    height: 30px;
+    line-height: 30px;
+    font-size: 90%;
+}
+
+div.related h3 {
+    display: none;
+}
+
+div.related ul {
+    margin: 0;
+    padding: 0 0 0 10px;
+    list-style: none;
+}
+
+div.related li {
+    display: inline;
+}
+
+div.related li.right {
+    float: right;
+    margin-right: 5px;
+}
+
+div.related a {
+    color: white;
+    font-weight:bold;
+}
+
+/* ::: TOC :::: */
+
+div.sphinxsidebar {
+    border-style:solid;
+    border-color: white;
+/*    background-color:#e2e2e2;*/
+    padding-bottom:5px;
+}
+
+div.sphinxsidebar h3 {
+    font-family: 'Verdanda', sans-serif;
+    color: black;
+    font-size: 1.2em;
+    font-weight: normal;
+    margin: 0;
+    padding: 0;
+    font-weight:bold;
+    font-style:italic;
+}
+
+div.sphinxsidebar h4 {
+    font-family: 'Verdana', sans-serif;
+    color: black;
+    font-size: 1.1em;
+    font-weight: normal;
+    margin: 5px 0 0 0;
+    padding: 0;
+    font-weight:bold;
+    font-style:italic;
+}
+
+div.sphinxsidebar p {
+    color: black;
+}
+
+div.sphinxsidebar p.topless {
+    margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+    margin: 10px;
+    padding: 0;
+    list-style: none;
+    color: black;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+    margin-left: 20px;
+    list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+div.sphinxsidebar a {
+    color: black;
+}
+
+div.sphinxsidebar form {
+    margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+    border: 1px solid #e2e2e2;
+    font-family: sans-serif;
+    font-size: 1em;
+    padding-bottom: 5px;
+}
+
+/* :::: MODULE CLOUD :::: */
+div.modulecloud {
+    margin: -5px 10px 5px 10px;
+    padding: 10px;
+    line-height: 160%;
+    border: 1px solid #cbe7e5;
+    background-color: #f2fbfd;
+}
+
+div.modulecloud a {
+    padding: 0 5px 0 5px;
+}
+
+/* :::: SEARCH :::: */
+ul.search {
+    margin: 10px 0 0 20px;
+    padding: 0;
+}
+
+ul.search li {
+    padding: 5px 0 5px 20px;
+    background-image: url(file.png);
+    background-repeat: no-repeat;
+    background-position: 0 7px;
+}
+
+ul.search li a {
+    font-weight: bold;
+}
+
+ul.search li div.context {
+    color: #888;
+    margin: 2px 0 0 30px;
+    text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+    font-weight: bold;
+}
+
+/* :::: COMMON FORM STYLES :::: */
+
+div.actions {
+    padding: 5px 10px 5px 10px;
+    border-top: 1px solid #cbe7e5;
+    border-bottom: 1px solid #cbe7e5;
+    background-color: #e0f6f4;
+}
+
+form dl {
+    color: #333;
+}
+
+form dt {
+    clear: both;
+    float: left;
+    min-width: 110px;
+    margin-right: 10px;
+    padding-top: 2px;
+}
+
+input#homepage {
+    display: none;
+}
+
+div.error {
+    margin: 5px 20px 0 0;
+    padding: 5px;
+    border: 1px solid #d00;
+    font-weight: bold;
+}
+
+/* :::: INLINE COMMENTS :::: */
+
+div.inlinecomments {
+    position: absolute;
+    right: 20px;
+}
+
+div.inlinecomments a.bubble {
+    display: block;
+    float: right;
+    background-image: url(style/comment.png);
+    background-repeat: no-repeat;
+    width: 25px;
+    height: 25px;
+    text-align: center;
+    padding-top: 3px;
+    font-size: 0.9em;
+    line-height: 14px;
+    font-weight: bold;
+    color: black;
+}
+
+div.inlinecomments a.bubble span {
+    display: none;
+}
+
+div.inlinecomments a.emptybubble {
+    background-image: url(style/nocomment.png);
+}
+
+div.inlinecomments a.bubble:hover {
+    background-image: url(style/hovercomment.png);
+    text-decoration: none;
+    color: #3ca0a4;
+}
+
+div.inlinecomments div.comments {
+    float: right;
+    margin: 25px 5px 0 0;
+    max-width: 50em;
+    min-width: 30em;
+    border: 1px solid #2eabb0;
+    background-color: #f2fbfd;
+    z-index: 150;
+}
+
+div#comments {
+    border: 1px solid #2eabb0;
+    margin-top: 20px;
+}
+
+div#comments div.nocomments {
+    padding: 10px;
+    font-weight: bold;
+}
+
+div.inlinecomments div.comments h3,
+div#comments h3 {
+    margin: 0;
+    padding: 0;
+    background-color: #2eabb0;
+    color: white;
+    border: none;
+    padding: 3px;
+}
+
+div.inlinecomments div.comments div.actions {
+    padding: 4px;
+    margin: 0;
+    border-top: none;
+}
+
+div#comments div.comment {
+    margin: 10px;
+    border: 1px solid #2eabb0;
+}
+
+div.inlinecomments div.comment h4,
+div.commentwindow div.comment h4,
+div#comments div.comment h4 {
+    margin: 10px 0 0 0;
+    background-color: #2eabb0;
+    color: white;
+    border: none;
+    padding: 1px 4px 1px 4px;
+}
+
+div#comments div.comment h4 {
+    margin: 0;
+}
+
+div#comments div.comment h4 a {
+    color: #d5f4f4;
+}
+
+div.inlinecomments div.comment div.text,
+div.commentwindow div.comment div.text,
+div#comments div.comment div.text {
+    margin: -5px 0 -5px 0;
+    padding: 0 10px 0 10px;
+}
+
+div.inlinecomments div.comment div.meta,
+div.commentwindow div.comment div.meta,
+div#comments div.comment div.meta {
+    text-align: right;
+    padding: 2px 10px 2px 0;
+    font-size: 95%;
+    color: #538893;
+    border-top: 1px solid #cbe7e5;
+    background-color: #e0f6f4;
+}
+
+div.commentwindow {
+    position: absolute;
+    width: 500px;
+    border: 1px solid #cbe7e5;
+    background-color: #f2fbfd;
+    display: none;
+    z-index: 130;
+}
+
+div.commentwindow h3 {
+    margin: 0;
+    background-color: #2eabb0;
+    color: white;
+    border: none;
+    padding: 5px;
+    font-size: 1.5em;
+    cursor: pointer;
+}
+
+div.commentwindow div.actions {
+    margin: 10px -10px 0 -10px;
+    padding: 4px 10px 4px 10px;
+    color: #538893;
+}
+
+div.commentwindow div.actions input {
+    border: 1px solid #2eabb0;
+    background-color: white;
+    color: #135355;
+    cursor: pointer;
+}
+
+div.commentwindow div.form {
+    padding: 0 10px 0 10px;
+}
+
+div.commentwindow div.form input,
+div.commentwindow div.form textarea {
+    border: 1px solid #3c9ea2;
+    background-color: white;
+    color: black;
+}
+
+div.commentwindow div.error {
+    margin: 10px 5px 10px 5px;
+    background-color: #fbe5dc;
+    display: none;
+}
+
+div.commentwindow div.form textarea {
+    width: 99%;
+}
+
+div.commentwindow div.preview {
+    margin: 10px 0 10px 0;
+    background-color: #70d0d4;
+    padding: 0 1px 1px 25px;
+}
+
+div.commentwindow div.preview h4 {
+    margin: 0 0 -5px -20px;
+    padding: 4px 0 0 4px;
+    color: white;
+    font-size: 1.3em;
+}
+
+div.commentwindow div.preview div.comment {
+    background-color: #f2fbfd;
+}
+
+div.commentwindow div.preview div.comment h4 {
+    margin: 10px 0 0 0!important;
+    padding: 1px 4px 1px 4px!important;
+    font-size: 1.2em;
+}
+
+/* :::: SUGGEST CHANGES :::: */
+div#suggest-changes-box input, div#suggest-changes-box textarea {
+    border: 1px solid #ccc;
+    background-color: white;
+    color: black;
+}
+
+div#suggest-changes-box textarea {
+    width: 99%;
+    height: 400px;
+}
+
+
+/* :::: PREVIEW :::: */
+div.preview {
+    background-image: url(style/preview.png);
+    padding: 0 20px 20px 20px;
+    margin-bottom: 30px;
+}
+
+
+/* :::: INDEX PAGE :::: */
+
+table.contentstable {
+    width: 90%;
+}
+
+table.contentstable p.biglink {
+    line-height: 150%;
+}
+
+a.biglink {
+    font-size: 1.3em;
+}
+
+span.linkdescr {
+    font-style: italic;
+    padding-top: 5px;
+    font-size: 90%;
+}
+
+/* :::: INDEX STYLES :::: */
+
+table.indextable td {
+    text-align: left;
+    vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+    height: 10px;
+}
+
+table.indextable tr.cap {
+    margin-top: 10px;
+    background-color: #f2f2f2;
+}
+
+img.toggler {
+    margin-right: 3px;
+    margin-top: 3px;
+    cursor: pointer;
+}
+
+form.pfform {
+    margin: 10px 0 20px 0;
+}
+
+/* :::: GLOBAL STYLES :::: */
+
+.docwarning {
+    background-color: #ffe4e4;
+    padding: 10px;
+    margin: 0 -20px 0 -20px;
+    border-bottom: 1px solid #f66;
+}
+
+p.subhead {
+    font-weight: bold;
+    margin-top: 20px;
+}
+
+a {
+    color: black;
+    text-decoration: none;
+}
+
+a:hover {
+    text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+    font-family: 'Verdana', sans-serif;
+    background-color: white;
+    font-weight: bold;
+    color: black;
+    border-bottom: 1px solid #ccc;
+    margin: 20px -20px 10px -20px;
+    padding: 3px 0 3px 10px;
+}
+
+div.body h1 { margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 160%; }
+div.body h3 { font-size: 120%; }
+div.body h4 { font-size: 100%; }
+div.body h5 { font-size: 90%; }
+div.body h6 { font-size: 70%; }
+
+a.headerlink {
+    color: #c60f0f;
+    font-size: 0.8em;
+    padding: 0 4px 0 4px;
+    text-decoration: none;
+    visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+    visibility: visible;
+}
+
+a.headerlink:hover {
+    background-color: #c60f0f;
+    color: white;
+}
+
+div.body p, div.body dd, div.body li {
+    text-align: justify;
+    line-height: 130%;
+}
+
+div.body p.caption {
+    text-align: inherit;
+}
+
+div.body td {
+    text-align: left;
+}
+
+ul.fakelist {
+    list-style: none;
+    margin: 10px 0 10px 20px;
+    padding: 0;
+}
+
+.field-list ul {
+    padding-left: 1em;
+}
+
+.first {
+    margin-top: 0 !important;
+}
+
+/* "Footnotes" heading */
+p.rubric {
+    margin-top: 30px;
+    font-weight: bold;
+}
+
+/* "Topics" */
+
+div.topic {
+    background-color: #eee;
+    border: 1px solid #ccc;
+    padding: 0 7px 0 7px;
+    margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+    font-size: 1.1em;
+    font-weight: bold;
+    margin-top: 10px;
+}
+
+/* Admonitions */
+
+div.admonition {
+    margin-top: 10px;
+    margin-bottom: 10px;
+    padding: 7px;
+}
+
+div.admonition dt {
+    font-weight: bold;
+}
+
+div.admonition dl {
+    margin-bottom: 0;
+}
+
+div.admonition p {
+    display: inline;
+}
+
+div.seealso {
+    background-color: #ffc;
+    border: 1px solid #ff6;
+}
+
+div.warning {
+    background-color: #ffe4e4;
+    border: 1px solid #f66;
+}
+
+div.note {
+    background-color: #eee;
+    border: 1px solid #ccc;
+}
+
+p.admonition-title {
+    margin: 0px 10px 5px 0px;
+    font-weight: bold;
+    display: inline;
+}
+
+p.admonition-title:after {
+    content: ":";
+}
+
+div.body p.centered {
+    text-align: center;
+    margin-top: 25px;
+}
+
+table.docutils {
+    border: 0;
+}
+
+table.docutils td, table.docutils th {
+    padding: 1px 8px 1px 0;
+    border-top: 0;
+    border-left: 0;
+    border-right: 0;
+    border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+    border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+    border: 0 !important;
+}
+
+.field-list ul {
+    margin: 0;
+    padding-left: 1em;
+}
+
+.field-list p {
+    margin: 0;
+}
+
+dl {
+    margin-bottom: 15px;
+    clear: both;
+}
+
+dd p {
+    margin-top: 0px;
+}
+
+dd ul, dd table {
+    margin-bottom: 10px;
+}
+
+dd {
+    margin-top: 3px;
+    margin-bottom: 10px;
+    margin-left: 30px;
+}
+
+.refcount {
+    color: #060;
+}
+
+dt:target,
+.highlight {
+    background-color: #fbe54e;
+}
+
+dl.glossary dt {
+    font-weight: bold;
+    font-size: 1.1em;
+}
+
+th {
+    text-align: left;
+    padding-right: 5px;
+}
+
+pre {
+    padding: 5px;
+    background-color: #efc;
+    color: #333;
+    border: 1px solid #ac9;
+    border-left: none;
+    border-right: none;
+    overflow: auto;
+}
+
+td.linenos pre {
+    padding: 5px 0px;
+    border: 0;
+    background-color: transparent;
+    color: #aaa;
+}
+
+table.highlighttable {
+    margin-left: 0.5em;
+}
+
+table.highlighttable td {
+    padding: 0 0.5em 0 0.5em;
+}
+
+tt {
+    background-color: #ecf0f3;
+    padding: 0 1px 0 1px;
+    font-size: 0.95em;
+}
+
+tt.descname {
+    background-color: transparent;
+    font-weight: bold;
+    font-size: 1.2em;
+}
+
+tt.descclassname {
+    background-color: transparent;
+}
+
+tt.xref, a tt {
+    background-color: transparent;
+    font-weight: bold;
+}
+
+.footnote:target  { background-color: #ffa }
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+    background-color: transparent;
+}
+
+.optional {
+    font-size: 1.3em;
+}
+
+.versionmodified {
+    font-style: italic;
+}
+
+form.comment {
+    margin: 0;
+    padding: 10px 30px 10px 30px;
+    background-color: #eee;
+}
+
+form.comment h3 {
+    background-color: #326591;
+    color: white;
+    margin: -10px -30px 10px -30px;
+    padding: 5px;
+    font-size: 1.4em;
+}
+
+form.comment input,
+form.comment textarea {
+    border: 1px solid #ccc;
+    padding: 2px;
+    font-family: sans-serif;
+    font-size: 100%;
+}
+
+form.comment input[type="text"] {
+    width: 240px;
+}
+
+form.comment textarea {
+    width: 100%;
+    height: 200px;
+    margin-bottom: 10px;
+}
+
+.system-message {
+    background-color: #fda;
+    padding: 5px;
+    border: 3px solid red;
+}
+
+/* :::: PRINT :::: */
+@media print {
+    div.document,
+    div.documentwrapper,
+    div.bodywrapper {
+        margin: 0;
+        width : 100%;
+    }
+
+    div.sphinxsidebar,
+    div.related,
+    div.footer,
+    div#comments div.new-comment-box,
+    #top-link {
+        display: none;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/.templates/layout.html	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,195 @@
+{%- block doctype -%}
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+{%- endblock %}
+{%- set reldelim1 = reldelim1 is not defined and ' &raquo;' or reldelim1 %}
+{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
+{%- macro relbar %}
+    <div class="related">
+      <h3>Navigation</h3>
+      <ul>
+        {%- for rellink in rellinks %}
+        <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
+          <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags }}"
+             accesskey="{{ rellink[2] }}">{{ rellink[3] }}</a>
+          {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
+        {%- endfor %}
+        {%- block rootrellink %}
+        <li><a href="{{ pathto('index') }}">{{ shorttitle }}</a>{{ reldelim1 }}</li>
+        {%- endblock %}
+        {%- for parent in parents %}
+          <li><a href="{{ parent.link|e }}" accesskey="U">{{ parent.title }}</a>{{ reldelim1 }}</li>
+        {%- endfor %}
+        {%- block relbaritems %}{% endblock %}
+      </ul>
+    </div>
+{%- endmacro %}
+{%- macro sidebar %}
+      {%- if builder != 'htmlhelp' %}
+      <div class="sphinxsidebar">
+        <div class="sphinxsidebarwrapper">
+          {%- block sidebarlogo %}
+          {%- if logo %}
+            <p class="logo"><img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/></p>
+          {%- endif %}
+          {%- endblock %}
+          {%- block sidebartoc %}
+          {%- if display_toc %}
+            <h3>Table Of Contents</h3>
+            {{ toc }}
+          {%- endif %}
+          {%- endblock %}
+          {%- block sidebarrel %}
+          {%- if prev %}
+            <h4>Previous topic</h4>
+            <p class="topless"><a href="{{ prev.link|e }}" title="previous chapter">{{ prev.title }}</a></p>
+          {%- endif %}
+          {%- if next %}
+            <h4>Next topic</h4>
+            <p class="topless"><a href="{{ next.link|e }}" title="next chapter">{{ next.title }}</a></p>
+          {%- endif %}
+          {%- endblock %}
+          {%- if sourcename %}
+            <h3>This Page</h3>
+            <ul class="this-page-menu">
+            {%- if builder == 'web' %}
+              <li><a href="#comments">Comments ({{ comments|length }} so far)</a></li>
+              <li><a href="{{ pathto('@edit/' + sourcename)|e }}">Suggest Change</a></li>
+              <li><a href="{{ pathto('@source/' + sourcename)|e }}">Show Source</a></li>
+            {%- elif builder == 'html' %}
+              <li><a href="{{ pathto('_sources/' + sourcename, true)|e }}">Show Source</a></li>
+            {%- endif %}
+            </ul>
+          {%- endif %}
+          {%- if customsidebar %}
+          {{ rendertemplate(customsidebar) }}
+          {%- endif %}
+          {%- block sidebarsearch %}
+          {%- if pagename != "search" %}
+            <h3>{{ builder == 'web' and 'Keyword' or 'Quick' }} search</h3>
+            <form class="search" action="{{ pathto('search') }}" method="get">
+              <input type="text" name="q" size="18" /> <input type="submit" value="Go" />
+              <input type="hidden" name="check_keywords" value="yes" />
+              <input type="hidden" name="area" value="default" />
+            </form>
+            {%- if builder == 'web' %}
+            <p style="font-size: 90%">Enter a module, class or function name.</p>
+            {%- endif %}
+          {%- endif %}
+          {%- endblock %}
+        </div>
+      </div>
+      {%- endif %}
+{%- endmacro -%}
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    {%- if builder != 'htmlhelp' %}
+      {%- set titlesuffix = " &mdash; " + docstitle %}
+    {%- endif %}
+    <title>{{ title|striptags }}{{ titlesuffix }}</title>
+    {%- if builder == 'web' %}
+    <link rel="stylesheet" href="{{ pathto('index') }}?do=stylesheet{%
+      if in_admin_panel %}&admin=yes{% endif %}" type="text/css" />
+    {%- for link, type, title in page_links %}
+    <link rel="alternate" type="{{ type|e(true) }}" title="{{ title|e(true) }}" href="{{ link|e(true) }}" />
+    {%- endfor %}
+    {%- else %}
+    <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
+    <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
+    {%- endif %}
+    {%- if builder != 'htmlhelp' %}
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+          URL_ROOT:    '{{ pathto("", 1) }}',
+          VERSION:     '{{ release }}',
+          COLLAPSE_MODINDEX: false,
+          FILE_SUFFIX: '{{ file_suffix }}'
+      };
+    </script>
+    <script type="text/javascript" src="{{ pathto('_static/jquery.js', 1) }}"></script>
+    <script type="text/javascript" src="{{ pathto('_static/interface.js', 1) }}"></script>
+    <script type="text/javascript" src="{{ pathto('_static/doctools.js', 1) }}"></script>
+    {%- if use_opensearch %}
+    <link rel="search" type="application/opensearchdescription+xml"
+          title="Search within {{ docstitle }}"
+          href="{{ pathto('_static/opensearch.xml', 1) }}"/>
+    {%- endif %}
+    {%- if favicon %}
+    <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
+    {%- endif %}
+    {%- endif %}
+{%- block rellinks %}
+    {%- if hasdoc('about') %}
+    <link rel="author" title="About these documents" href="{{ pathto('about') }}" />
+    {%- endif %}
+    <link rel="contents" title="Global table of contents" href="{{ pathto('contents') }}" />
+    <link rel="index" title="Global index" href="{{ pathto('genindex') }}" />
+    <link rel="search" title="Search" href="{{ pathto('search') }}" />
+    {%- if hasdoc('copyright') %}
+    <link rel="copyright" title="Copyright" href="{{ pathto('copyright') }}" />
+    {%- endif %}
+    <link rel="top" title="{{ docstitle }}" href="{{ pathto('index') }}" />
+    {%- if parents %}
+    <link rel="up" title="{{ parents[-1].title|striptags }}" href="{{ parents[-1].link|e }}" />
+    {%- endif %}
+    {%- if next %}
+    <link rel="next" title="{{ next.title|striptags }}" href="{{ next.link|e }}" />
+    {%- endif %}
+    {%- if prev %}
+    <link rel="prev" title="{{ prev.title|striptags }}" href="{{ prev.link|e }}" />
+    {%- endif %}
+{%- endblock %}
+{%- block extrahead %}{% endblock %}
+  </head>
+  <body>
+
+{% block logilablogo %}
+<div class="logilablogo">
+	<a class="logogo" href="http://www.logilab.org"><img border="0" src="{{ pathto('_static/logilab.png', 1) }}"/></a>
+  </div>
+{% endblock %}
+
+{%- block relbar1 %}{{ relbar() }}{% endblock %}
+
+{%- block sidebar1 %}{# possible location for sidebar #}{% endblock %}
+
+{%- block document %}
+    <div class="document">
+      <div class="documentwrapper">
+      {%- if builder != 'htmlhelp' %}
+        <div class="bodywrapper">
+      {%- endif %}
+          <div class="body">
+            {% block body %}{% endblock %}
+          </div>
+      {%- if builder != 'htmlhelp' %}
+        </div>
+      {%- endif %}
+      </div>
+{%- endblock %}
+
+{%- block sidebar2 %}{{ sidebar() }}{% endblock %}
+      <div class="clearer"></div>
+    </div>
+
+{%- block relbar2 %}{{ relbar() }}{% endblock %}
+
+{%- block footer %}
+    <div class="footer">
+    {%- if hasdoc('copyright') %}
+      &copy; <a href="{{ pathto('copyright') }}">Copyright</a> {{ copyright }}.
+    {%- else %}
+      &copy; Copyright {{ copyright }}.
+    {%- endif %}
+    {%- if last_updated %}
+      Last updated on {{ last_updated }}.
+    {%- endif %}
+    {%- if show_sphinx %}
+      Created using <a href="http://sphinx.pocoo.org/">Sphinx</a>.
+    {%- endif %}
+    </div>
+{%- endblock %}
+  </body>
+</html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/Makefile	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,70 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html web pickle htmlhelp latex changes linkcheck
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html      to make standalone HTML files"
+	@echo "  pickle    to make pickle files (usable by e.g. sphinx-web)"
+	@echo "  htmlhelp  to make HTML files and a HTML help project"
+	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  changes   to make an overview over all changed/added/deprecated items"
+	@echo "  linkcheck to check all external links for integrity"
+
+clean:
+	-rm -rf build/*
+
+html:
+	mkdir -p build/html build/doctrees
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
+	@echo
+	@echo "Build finished. The HTML pages are in build/html."
+
+pickle:
+	mkdir -p build/pickle build/doctrees
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files or run"
+	@echo "  sphinx-web build/pickle"
+	@echo "to start the sphinx-web server."
+
+web: pickle
+
+htmlhelp:
+	mkdir -p build/htmlhelp build/doctrees
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in build/htmlhelp."
+
+latex:
+	mkdir -p build/latex build/doctrees
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in build/latex."
+	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+	      "run these through (pdf)latex."
+
+changes:
+	mkdir -p build/changes build/doctrees
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
+	@echo
+	@echo "The overview file is in build/changes."
+
+linkcheck:
+	mkdir -p build/linkcheck build/doctrees
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in build/linkcheck/output.txt."
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/argouml.log	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+2008-11-03 17:30:53,473  WARN: Unable to load configuration /home/adim/argo.user.properties (?:?)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/conf.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,179 @@
+# -*- coding: utf-8 -*-
+#
+# Cubicweb documentation build configuration file, created by
+# sphinx-quickstart on Fri Oct 31 09:10:36 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys, os
+
+# If your extensions are in another directory, add it here. If the directory
+# is relative to the documentation root, use os.path.abspath to make it
+# absolute, like shown here.
+#sys.path.append(os.path.abspath('some/directory'))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['.templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'Cubicweb'
+copyright = '2008, Logilab Inc.'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = '0.54'
+# The full version, including alpha/beta/rc tags.
+release = '2'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directories, that shouldn't be searched
+# for source files.
+#exclude_dirs = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+html_style = 'sphinx-default.css'
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (within the static path) to place at the top of
+# the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['.static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Cubicwebdoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+latex_documents = [
+  ('index', 'Cubicweb.tex', 'Cubicweb Documentation',
+   'Logilab Inc.', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/cubicweb-uml.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+.. -*- coding: utf-8 -*-
+
+
+Diagramme UML
+=============
+
+.. image:: cubicweb.png
+
+`Diagramme ArgoUML`_
+
+[FIXME]
+Make a downloadable source of zargo file.
+
+.. _`Diagramme ArgoUML`: cubicweb.zargo
Binary file doc/cubicweb.png has changed
Binary file doc/cubicweb.zargo has changed
Binary file doc/cubicweb.zargo~0.14.1 has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/advanced_notes.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+.. -*- coding: utf-8 -*-
+
+La différence entre la classe `AppRsetObject` et la classe `AppObject` est que
+les instances de la premières sont séléctionnées pour une requête et un "result
+set" et alors que les secondes ne sont séléctionnées qu'en fonction de leur
+identifiant.
Binary file doc/devmanual_fr/archi_globale.dia has changed
Binary file doc/devmanual_fr/archi_globale.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_autres_composants_ui.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+Autres composants de l'interface web
+====================================
+
+Actions
+-------
+XXXFILLME
+
+Component, VComponent
+---------------------
+XXXFILLME
+
+EProperty
+---------
+XXXFILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_bases_framework_cubicweb.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,228 @@
+.. -*- coding: utf-8 -*-
+
+Fondements du framework CubicWeb
+================================
+
+Le moteur web d'cubicweb consiste en quelques classes gérant un ensemble d'objets
+chargés dynamiquement au lancement d'cubicweb. Ce sont ces objets dynamiques, issus
+du modèle ou de la librairie, qui construisent le site web final. Les différents
+composants dynamiques sont par exemple : 
+
+* coté client et serveur
+
+ - les définitions d'entités, contenant la logique permettant la manipulation des
+   données de l'application
+
+* coté client
+
+  - les *vues* , ou encore plus spécifiquement 
+
+    - les boites
+    - l'en-tête et le pied de page
+    - les formulaires
+    - les gabarits de pages
+
+  - les *actions*
+  - les *controleurs*
+
+* coté serveur
+
+  - les crochets de notification
+  - les vues de notification
+
+Les différents composants du moteur sont :
+
+* un frontal web (seul twisted disponible pour le moment), transparent du point
+  de vue des objets dynamiques
+* un objet encapsulant la configuration
+* un `vregistry` (`cubicweb.cwvreg`) contenant les objets chargés dynamiquements
+
+
+Détail de la procédure d'enregistrement
+---------------------------------------
+Au démarage le `vregistry` ou base de registres inspecte un certain nombre de
+répertoires à la recherche de définition de classes "compatible". Après une
+procédure d'enregistrement les objets sont affectés dans différents registres
+afin d'être ensuite séléctionné dynamiquement pendant le fonctionnement de
+l'application.
+
+La classe de base de tout ces objets est la classe `AppRsetObject` (module
+`cubicweb.common.appobject`). 
+
+
+API Python/RQL
+--------------
+
+Inspiré de la db-api standard, avec un object Connection possédant les méthodes
+cursor, rollback et commit principalement. La méthode importante est la méthode
+`execute` du curseur :
+
+`execute(rqlstring, args=None, eid_key=None, build_descr=True)`
+
+:rqlstring: la requête rql à éxécuter (unicode)
+:args: si la requête contient des substitutions, un dictionnaire contenant les
+       valeurs à utiliser
+:eid_key: 
+   un détail d'implémentation du cache de requêtes RQL fait que si une substitution est
+   utilisée pour introduire un eid *levant des ambiguités dans la résolution de
+   type de la requête*, il faut spécifier par cet argument la clé correspondante
+   dans le dictionnaire
+
+C'est l'objet Connection qui possède les méthodes classiques `commit` et
+`rollback`. Vous ne *devriez jamais avoir à les utiliser* lors du développement
+d'interface web sur la base du framework CubicWeb étant donné que la fin de la
+transaction est déterminée par celui-ci en fonction du succès d'éxécution de la
+requête. 
+
+NOTE : lors de l'éxécution de requêtes de modification (SET,INSERT,DELETE), si une
+requête génère une erreur liée à la sécurité, un rollback est systématiquement
+effectuée sur la transaction courante.
+
+
+La classe `Request` (`cubicweb.web`)
+------------------------------------
+Une instance de requête est créée lorsque une requête HTTP est transmise au
+serveur web. Elle contient des informations telles que les paramètres de
+formulaires, l'utilisateur connecté, etc. 
+
+**De manière plus générale une requête représente une demande d'un utilisateur,
+que se soit par HTTP ou non (on parle également de requête rql coté serveur par
+exemple)**
+
+Une instance de la classe `Request` possède les attributs :
+
+* `user`, instance de`cubicweb.common.utils.User` correspondant à l'utilisateur
+  connecté 
+* `form`, dictionaire contenant les valeurs de formulaire web
+* `encoding`, l'encodage de caractère à utiliser dans la réponse
+
+Mais encore :
+
+:Gestion des données de session:        
+  * `session_data()`, retourne un dictionaire contenant l'intégralité des
+    données de la session
+  * `get_session_data(key, default=None)`, retourne la valeur associée à
+    la clé ou la valeur `default` si la clé n'est pas définie
+  * `set_session_data(key, value)`, associe une valeur à une clé
+  * `del_session_data(key)`,  supprime la valeur associé à une clé
+    
+
+:Gestion de cookie:
+  * `get_cookie()`, retourne un dictionnaire contenant la valeur de l'entête
+    HTTP 'Cookie'
+  * `set_cookie(cookie, key, maxage=300)`, ajoute un en-tête HTTP `Set-Cookie`,
+    avec une durée de vie 5 minutes par défault (`maxage` = None donne un cooke
+    *de session"* expirant quand l'utilisateur ferme son navigateur
+  * `remove_cookie(cookie, key)`, fait expirer une valeur
+
+:Gestion d'URL:
+  * `url()`, retourne l'url complète de la requête HTTP
+  * `base_url()`, retourne l'url de la racine de l'application
+  * `relative_path()`, retourne chemin relatif de la requête
+
+:Et encore...:
+  * `set_content_type(content_type, filename=None)`, place l'en-tête HTTP
+    'Content-Type'
+  * `get_header(header)`, retourne la valeur associé à un en-tête HTTP
+    arbitraire de la requête
+  * `set_header(header, value)`, ajoute un en-tête HTTP arbitraire dans la
+    réponse 
+  * `cursor()` retourne un curseur RQL sur la session
+  * `execute(*args, **kwargs)`, raccourci vers .cursor().execute()
+  * `property_value(key)`, gestion des propriétés (`EProperty`)
+  * le dictionaire `data` pour stocker des données pour partager de
+    l'information entre les composants *durant l'éxécution de la requête*.
+
+A noter que cette classe est en réalité abstraite et qu'une implémentation
+concrète sera fournie par le *frontend* web utilisé (en l'occurent *twisted*
+aujourd'hui). Enfin pour les vues ou autres qui sont éxécutés coté serveur,
+la majeure partie de l'interface de `Request` est définie sur la session
+associée au client. 
+
+
+La classe `AppObject`
+---------------------
+
+En général :
+
+* on n'hérite pas directement des cette classe mais plutôt d'une classe
+  plus spécifique comme par exemple `AnyEntity`, `EntityView`, `AnyRsetView`,
+  `Action`...
+
+* pour être enregistrable, un classe fille doit définir son registre (attribut
+  `__registry__`) et son identifiant (attribut `id`). Généralement on n'a pas à
+  s'occuper du registre, uniquement de l'identifiant `id` :) 
+
+On trouve un certain nombre d'attributs et de méthodes définis dans cette classe
+et donc commune à tous les objets de l'application :
+
+A l'enregistrement, les attributs suivants sont ajoutés dynamiquement aux
+*classes* filles:
+
+* `vreg`, le `vregistry` de l'application
+* `schema`, le schéma de l'application
+* `config`, la configuration de l'application
+
+On trouve également sur les instances les attributs :
+
+* `req`, instance de `Request`
+* `rset`, le "result set" associé à l'objet le cas échéant
+* `cursor`, curseur rql sur la session
+
+
+:Gestion d'URL:
+  * `build_url(method=None, **kwargs)`, retourne une URL absolue construites à
+    partir des arguments donnés. Le *controleur* devant gérer la réponse
+    peut-être spécifié via l'argument spécial `method` (le branchement est
+    théoriquement bien effectué automatiquement :).
+
+  * `datadir_url()`, retourne l'url du répertoire de données de l'application
+    (contenant les fichiers statiques tels que les images, css, js...)
+
+  * `base_url()`, raccourci sur `req.base_url()`
+
+  * `url_quote(value)`, version *unicode safe* de de la fonction `urllib.quote`
+
+:Manipulation de données:
+
+  * `etype_rset(etype, size=1)`, raccourci vers `vreg.etype_rset()`
+
+  * `eid_rset(eid, rql=None, descr=True)`, retourne un objet result set pour
+    l'eid donné
+  * `entity(row, col=0)`, retourne l'entité correspondant à la position données
+    du "result set" associé à l'objet
+
+  * `complete_entity(row, col=0, skip_bytes=True)`, équivalent à `entity` mais
+    appelle également la méthode `complete()` sur l'entité avant de la retourner
+
+:Formattage de données:
+  * `format_date(date, date_format=None, time=False)`
+  * `format_time(time)`,
+
+:Et encore...:
+
+  * `external_resource(rid, default=_MARKER)`, accède à une valeur définie dans
+    le fichier de configuration `external_resource`
+    
+  * `tal_render(template, variables)`, 
+
+
+**NOTE IMPORTANTE**
+Lorsqu'on hérite d'`AppObject` (même indirectement), il faut **toujours**
+utiliser **super()** pour récupérer les méthodes et attributs des classes
+parentes, et pas passer par l'identifiant de classe parente directement.
+(sous peine de tomber sur des bugs bizarres lors du rechargement automatique
+des vues). Par exemple, plutôt que d'écrire::
+
+      class Truc(PrimaryView):
+          def f(self, arg1):
+              PrimaryView.f(self, arg1)
+
+Il faut écrire::
+      
+      class Truc(PrimaryView):
+          def f(self, arg1):
+              super(Truc, self).f(arg1)
+
+
+XXX FILLME diagramme interaction application/controller/template/view
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_configuration_instance.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,162 @@
+.. -*- coding: utf-8 -*-
+
+Configuration d'une instance
+============================
+
+À la création d'une instance, un fichier de configuration est généré dans ::
+
+   $(CW_REGISTRY)/<instance>/<nom configuration>.conf
+
+par exemple ::
+
+   /etc/cubicweb.d/jpl/all-in-one.conf
+
+C'est un simple fichier texte au format INI. Dans la description suivante,
+chaque nom d'option est préfixé de sa section et suivi de sa valeur par défaut
+le cas échéant, e.g. "`<section>.<option>` [valeur]".
+
+
+Configuration du serveur web
+----------------------------
+:`web.auth-mode` [cookie]: 
+   mode d'authentification, cookie ou http
+:`web.realm`: 
+   realm de l'application en mode d'authentification http
+:`web.http-session-time` [0]:
+   délai d'inactivité d'une session HTTP avant sa fermeture automatique. Durée
+   en secondes, 0 signifiant pas d'expiration (ou plus exactement lors de la
+   fermeture du navigateur du client)
+
+:`main.anonymous-user`, `main.anonymous-password`:
+   login et mot de passe à utiliser pour se connecter au serveur RQL lors des
+   connexions HTTP anonymes. Il faut que le compte EUser associé existe.
+
+:`main.base-url`:
+   url de base du site, à utiliser pour générer les urls des pages web
+
+Configuration https
+```````````````````
+Il est possible de rendre un site accessible en http pour les connections 
+anonymes et en https pour les utilisateurs authentifié. Il faut pour cela
+utiliser apache (par ex.) pour la redirection et la variable `main.https-url` du
+fichier de configuration.
+
+:Exemple:
+
+  pour une redirection apache d'un site accessible via `http://localhost/demo`
+  et `https://localhost/demo` et qui tourne en réalité sur le port 8080, il 
+  faut avoir pour la version http : ::
+
+    RewriteCond %{REQUEST_URI} ^/demo
+    RewriteRule ^/demo$ /demo/
+    RewriteRule ^/demo/(.*) http://127.0.0.1:8080/$1 [L,P]
+  
+  et pour la version https : ::
+
+    RewriteCond %{REQUEST_URI} ^/demo
+    RewriteRule ^/demo$ /demo/
+    RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]
+
+
+  et on aura dans le fichier all-in-one.conf de l'instance : ::
+
+    base-url = http://localhost/demo
+    https-url = `https://localhost/demo`
+
+Configuration de l'interface web
+--------------------------------
+:`web.embed-allowed`:
+   expression régulière correspondant aux sites pouvant être "incorporé" dans
+   le site (controleur 'embed')
+:`web.submit-url`:
+   url à laquelle les bugs rencontrés dans l'application peuvent être posté
+
+
+Configuration du serveur RQL
+----------------------------
+:`main.host`:
+   nom de l'hôte s'il ne peut être détecter correctement
+:`main.pid-file`:
+   fichier où sera écrit le pid du serveur
+:`main.uid`:
+   compte utilisateur à utiliser pour le lancement du serveur quand il est
+   lancé en root par init
+:`main.session-time [30*60]`:
+   temps d'expiration d'une session RQL
+:`main.query-log-file`:
+   fichier dans lequel écrire toutes les requêtes RQL éxécutées par le serveur
+
+
+Configuration Pyro pour l'instance
+-----------------------------------
+Coté serveur web :
+
+:`pyro-client.pyro-application-id`: 
+   identifiant pyro du serveur RQL (e.g. le nom de l'instance)
+
+Coté serveur RQL :
+
+:`pyro-server.pyro-port`:
+   numéro de port pyro. Si aucune valeur n'est spécifiée, un port est attribué
+   automatiquement.
+
+Coté serveur RQL et serveur web :
+
+:`pyro-name-server.pyro-ns-host`:
+   nom de l'hôte hébergeant le serveur de nom pyro. Si aucune valeur n'est
+   spécifié, il est localisé par une requête de broadcast
+:`pyro-name-server.pyro-ns-group` [cubicweb]:
+   groupe pyro sous lequel enregistrer l'application
+
+
+Configuration courriel
+----------------------
+Coté serveur RQL et serveur web :
+
+:`email.mangle-emails [no]`:
+   indique si les adresses email doivent être affichées telle quelle ou
+   transformée
+
+Coté serveur RQL :
+
+:`email.smtp-host [mail]`:
+   nom de l'hôte hébergeant le serveur SMTP à utiliser pour le courriel sortant
+:`email.smtp-port [25]`:
+   port du serveur SMTP à utiliser pour le courriel sortant
+:`email.sender-name`:
+   nom à utiliser pour les courriels sortant de l'application
+:`email.sender-addr`:
+   adresse à utiliser pour les courriels sortant de l'application
+:`email.default-dest-addrs`:
+   adresses de destination par défaut, si utilisé par la configuration de la 
+   diffusion du modèle (séparées par des virgules)
+:`email.supervising-addrs`:
+   addresses de destination des courriels de supervision (séparées par des 
+   virgules)
+
+
+Configuration journalisation
+----------------------------
+:`main.log-threshold`:
+   niveau de filtrage des messages (DEBUG, INFO, WARNING, ERROR)
+:`main.log-file`:
+   fichier dans lequel écrire les messages
+
+
+Configuration Eproperties
+-------------------------
+D'autres paramètres de configuration sont sous la forme d'entités `EProperty`
+dans la base de données. Il faut donc les éditer via l'interface web ou par des
+requêtes rql.
+
+:`ui.encoding`:
+   encodage de caractères à utiliser pour l'interface web
+:`navigation.short-line-size`: # XXX should be in ui
+   nombre de caractères maximum pour les affichages "courts"
+:`navigation.page-size`:
+   nombre d'entités maximum à afficher par page de résultat
+:`navigation.related-limit`:
+   nombre d'entités liées maximum à afficher sur la vue primaire d'une entité
+:`navigation.combobox-limit`:
+   nombre d'entités non liées maximum à afficher sur les listes déroulantes de
+   la vue d'édition d'une entité
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_creation_instance.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+.. -*- coding: utf-8 -*-
+
+
+=======================
+Création d'une instance
+=======================
+
+.. toctree::
+   :maxdepth: 1
+
+   sect_installation.txt
+   sect_cubicweb-ctl.txt
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_definition_schema.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+.. -*- coding: utf-8 -*-
+
+Définition du modèle de données (*schéma*)
+==========================================
+
+Le schéma est l'élément central d'une application d'CubicWeb, définissant le modèle
+de données manipulé. Il est généralement défini à partir de type d'entités
+existants dans la librairie et d'autres spécifiques, généralement décrites dans
+un ou plusieurs fichiers python dans le sous-répertoire `schema` du modèle.
+
+A ce niveau il est important de noter la différence entre type de relation et
+définition de relation : un type de relation est uniquement un nom de relation
+avec éventuellement quelques propriétés supplémentaires (voir plus bas), alors
+qu'une définition de relation est un triplet complet "<type d'entité sujet>
+<type de relation> <type d'entité objet>". Eventuellement un type de relation
+sera créé implicitement si aucun n'est associé à une définition de relation du
+schema.
+
+.. include:: sect_stdlib_schemas.txt
+.. include:: sect_definition_schema.txt
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_definition_workflows.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+.. -*- coding: utf-8 -*-
+
+Définition de workflow
+======================
+On peut mettre une condition rql ou/et un groupe auquel doit appartenir l'utilisateur.
+
+Si on met à la fois un(ou plusieurs) groupe et une condition RQL, il faut que les deux soient respectés.
+
+Si on met plusieurs groupes, il faut que l'utilisateur soit dans un des groupes.
+
+Pour la condition RQL sur une transition, on peut y mettre les substitutions suivantes :
+
+* `%(eid)s`, eid de l'objet
+* `%(ueid)s`, eid de l'utilisateur qui fait la requête
+* `%(seid)s`, eid de l'état courant de l'objet
+
+Dans le script de création d'un workflow, penser à mettre `_()` autour des noms d'états et de transitions
+pour que ceux si soient pris en compte par les scripts de gestion des catalogues i18n.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_fondements_cubicweb.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,169 @@
+.. -*- coding: utf-8 -*-
+
+Fondements `CubicWeb`
+=====================
+
+Architecture globale
+--------------------
+.. image:: archi_globale.png
+
+**Note**: en pratique la partie cliente et la partie serveur sont
+généralement intégrées dans le même processus et communiquent donc
+directement, sans nécessiter des appels distants via Pyro. Il est
+cependant important de retenir que ces deux parties sont disjointes
+et qu'il est même possible d'en exécuter plusieurs exemplaires dans
+des processus distincts pour répartir la charge globale d'un site
+sur une ou plusieurs machines.
+
+Concepts et vocabulaire
+-----------------------
+
+*schéma*
+  le schéma définit le modèle de données d'une application sous forme
+  d'entités et de relations, grâce à la biblithèque `yams`_. C'est
+  l'élément central d'une application. Il est initialement défini sur
+  le système de fichiers et est stocké dans la base de données lors de
+  la création d'une instance. `CubicWeb` fournit un certain nombres de
+  types d'entités inclus systématiquement car nécessaire au noyau
+  `CubicWeb` et une librairie de composants devant être inclus
+  explicitement le cas échéant.
+
+*type d'entité* 
+  une entité est un ensemble d'attributs ; l'attribut de
+  base de toute entité, qui est sa clef, est l'eid
+
+*type de relation*
+  les entités sont liées entre elles par des relations. Dans cubicweb les
+  relations sont binaires : par convention on nomme le premier terme
+  d'une relation son 'sujet' et le second son 'objet'.
+
+*type d'entité final*
+  les types finaux correspondent aux types de bases comme les chaînes
+  de caractères, les nombres entiers... Une propriété de ces types est
+  qu'ils ne peuvent être utilisés qu'uniquement comme objet d'une
+  relation. Les attributs d'une entité (non finale) sont des entités
+  (finales).
+
+*type de relation finale*
+  une relation est dite finale si son objet est un type final. Cela revient à
+  un attribut d'une entité.
+
+*entrepôt*
+  ou *repository*, c'est la partie serveur RQL de `CubicWeb`. Attention à ne pas
+  confondre avec un entrepôt mercurial ou encore un entrepôt debian.
+
+*source*
+  une source de données est un conteneur de données quelquonque (SGBD, annuaire
+  LDAP...) intégré par l'entrepôt `CubicWeb`. Un entrepôt possède au moins une source
+  dite "system" contenant le schéma de l'application, l'index plein-texte et
+  d'autres informations vitales au système.
+
+*configuration*
+  il est possible de créer différentes configurations pour une instance :
+
+  - ``repository`` : entrepôt uniquement, accessible pour les clients via Pyro
+  - ``twisted`` : interface web uniquement, communiquant avec un entrepôt via Pyro
+  - ``all-in-one`` : interface web et entrepôt dans un seul processus. L'entrepôt
+     peut ou non être accessible via Pyro
+
+*composant*
+  un composant est un modèle regroupant un ou plusieurs types de données et/ou
+  des vues afin de fournir une fonctionalité précise, ou une application `CubicWeb`
+  complète utilisant éventuellement d'autres composants. Les différents
+  composants disponibles sur une machine sont installés dans
+  `/usr/share/cubicweb/cubes`
+
+*instance*
+  une instance est une installation spécifique d'un template. Par exemple 
+  intranet/jpl et logilab.org sont deux instances du composant jpl. Les
+  instances sont définies dans le répertoire `/etc/cubicweb.d`.
+
+*application*
+  le mot application est utilisé parfois pour parler d'une instance et parfois
+  d'un composant, en fonction du contexte... Mieux vaut donc éviter de
+  l'utiliser et utiliser plutôt *composant* et *instance*.
+
+*result set*
+  objet encaspulant les résultats d'une requête RQL et des informations sur
+  cette requête.
+
+*Pyro*
+  `Python Remote Object`_, système d'objets distribués pur Python similaire à
+  Java's RMI (Remote Method Invocation), pouvant être utilisé pour la
+  communication entre la partie web du framework et l'entrepôt RQL.
+
+.. _`Python Remote Object`: http://pyro.sourceforge.net/
+.. _`yams`: http://www.logilab.org/project/name/yams/
+
+
+Structure standard d'un composant
+---------------------------------
+
+Un composant complexe est structuré selon le modèle suivant :
+
+::
+  
+  moncomposant/
+  |
+  |-- schema.py
+  |
+  |-- entities/
+  |
+  |-- sobjects/
+  |
+  |-- views/
+  |
+  |-- test/
+  |
+  |-- i18n/
+  |
+  |-- data/
+  |
+  |-- migration/
+  | |- postcreate.py
+  | \- depends.map
+  |
+  |-- debian/
+  |
+  \-- __pkginfo__.py
+    
+On peut utiliser de simple module python plutôt que des répertoires (packages),
+par ex.:
+
+::
+  
+  moncomposant/
+  |
+  |-- entities.py
+  |-- hooks.py
+  \-- views.py
+    
+
+où :
+
+* ``schema`` contient la définition du schéma (coté serveur uniquement)
+* ``entities`` contient les définitions d'entités (coté serveur et interface web)
+* ``sobjects`` contient les crochets et/ou vues de notification (coté serveur
+  uniquement) 
+* ``views`` contient les différents composants de l'interface web (coté interface
+  web uniquement)  
+* ``test`` contient les tests spécifiques à l'application (non installé)
+* ``i18n`` contient les catalogues de messages pour les langues supportées (coté
+  serveur et interface web) 
+* ``data`` contient des fichiers de données arbitraires servis statiquement
+  (images, css, fichiers javascripts)... (coté interface web uniquement)
+* ``migration`` contient le fichier d'initialisation de nouvelles instances
+  (``postcreate.py``) et générallement un fichier donnant les dépendances `CubicWeb` du
+  composant en fonction de la version de celui-ci (``depends.map``)
+* ``debian`` contient les fichiers contrôlant le packaging debian (vous y
+  trouverez les fichiers classiques ``control``, ``rules``, ``changelog``... (non
+  installé) 
+* le fichier ``__pkginfo__.py`` donne un certain nombre de méta-données sur le
+  composant, notamment le nom de la distribution et la version courante (coté
+  serveur et interface web) ou encore les sous-composants utilisés par ce
+  composant. 
+
+Le strict minimum étant :
+
+* le fichier ``__pkginfo__.py``
+* la définition du schéma
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_i18n.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,66 @@
+Internationalisation
+====================
+
+Le système d'internationalisation de l'interface web d'cubicweb est basé sur le
+système `GNU gettext`_.
+
+.. _`GNU gettext`: http://www.gnu.org/software/gettext/
+
+Messages à internationaliser
+----------------------------
+
+Marquage des messages à internaliser
+````````````````````````````````````
+Les chaines de caractères à internationaliser sont marqués par l'appel à la
+fonction `_` *OU* par la méthode équivalent de la requête dans le code python ou
+dans les expressions python de template TAL. 
+
+Dans les templates cubicweb-tal, il est également possible d'insérer une chaine à
+traduire via les balises `i18n:content` et  `i18n:replace`.
+
+De plus des messages correspondant aux entités/relations utilisés par le schéma
+de l'application seront automatiquement ajoutés.
+
+Renvoi d'un message internationalisé lors de la construction d'une page
+```````````````````````````````````````````````````````````````````````
+La fonction *built-in* `_` ne doit servir qu'**à marquer les messages à
+traduire**, non pas à récupérer une traduction. Il faut pour cela utiliser la
+méthode `_` de l'objet requête, sans quoi vous récupérerez l'identifiant de
+message au lieu de sa traduction dans la langue propre à la requête.1
+
+
+Gestion des catalogues de traduction
+------------------------------------
+Une fois l'application rendu internationalisable coté code, reste à gérer les
+catalogues de traductions. cubicweb-ctl intègre pour cela les commandes suivantes : 
+
+* `i18nlibupdate`, met à jour les catalogues de messages *de la librairie
+  cubicweb*. Sauf si vous développez sur le framework (et non votre propre
+  application), vous ne devriez pas avoir à utiliser cette commande
+
+* `i18nupdate`, met à jour les catalogues de messages *du composant* (ou de tous
+  les composants). A la suite de cette commande, vous devez mettre à jour les
+  fichiers de traduction *.po* dans le sous-répertoire "i18n" de votre
+  template. Évidemment les traductions précédentes toujours utilisées ont été
+  conservées.
+
+* `i18ncompile`, recompile les catalogues de messages *d'une instance* (ou de
+  toutes les instances) après mise à jour des catalogues de son composant. Cela
+  est effectué automatiquement lors d'une création ou d'une mise à jour. Les
+  catalogues de messages compilés se trouvent dans le répertoire
+  "i18n/<lang>/LC_MESSAGES/cubicweb.mo" de l'application où `lang` est
+  l'identifiant de la langue sur 2 lettres ('en' ou 'fr' par exemple)
+
+
+Le cas classique
+````````````````
+Vous avez ajouté et/ou modifié des messages d'un composant utilisé par votre
+application (en ajoutant une nouvelle vue ou en ayant modifié le schéma par
+exemple) :
+
+1. `cubicweb-ctl i18nupdate <composant>`
+2. éditer les fichiers <composant>/xxx.po dans pour y rajouter les traductions
+   manquantes (`msgstr` vide) 
+3. `hg ci -m "updated i18n catalogs"`
+4. `cubicweb-ctl i18n compile <monapplication>`
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_manipulation_donnees.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,142 @@
+.. -*- coding: utf-8 -*-
+
+
+Manipulation des données stockées
+=================================
+
+Les classes `Entity` et `AnyEntity`
+-----------------------------------
+Pour fournir un comportement spécifique à un type d'entité, il suffit de définir
+une classe héritant de la class `ginco.entities.AnyEntity`. En général il faut
+définir ces classes dans un module du package `entities` d'une application pour 
+qu'elle soit disponible à la fois coté serveur et coté client.
+
+La classe `AnyEntity` est une classe chargée dynamiquement héritant de la classe
+de base `Entity` (`ginco.common.entity`). On définit une sous-classe pour
+ajouter des méthodes ou spécialiser les comportements d'un type d'entité donné.
+
+Des descripteurs sont ajoutés à l'enregistrement pour initialiser la classe en
+fonction du schéma :
+
+* on peut accéder aux attributs définis dans le schéma via les attributs de même
+  nom sur les instances (valeur typée)
+
+* on peut accéder aux relations définies dans le schéma via les attributs de même
+  nom sur les instances (liste d'instances d'entité)
+
+Les méthodes définies sur la classe `AnyEntity` ou `Entity` sont les suivantes :
+
+* `has_eid()`, retourne vrai si l'entité à un eid affecté (i.e. pas en cours de
+  création) 
+        
+* `check_perm(action)`, vérifie que l'utilisateur à le droit d'effectuer
+  l'action demandée sur l'entité
+
+:Formattage et génération de la sortie:
+
+  * `view(vid, **kwargs)`, applique la vue donnée à l'entité
+
+  * `absolute_url(**kwargs)`, retourne une URL absolue permettant d'accéder à la
+    vue primaire d'une entité
+
+  * `rest_path()`, renvoie une l'URL REST relative permettant d'obtenir l'entité
+
+  * `format(attr)`, retourne le format (type MIME) du champ passé en argument
+
+  * `printable_value(attr, value=_marker, attrtype=None, format='text/html')`, 
+    retourne une chaine permettant l'affichage dans un format donné de la valeur
+    d'un attribut (la valeur est automatiquement récupérée au besoin)
+
+  * `display_name(form='')`, retourne une chaîne pour afficher le type de
+    l'entité, en spécifiant éventuellement la forme désirée ('plural' pour la
+    forme plurielle)
+
+:Gestion de données:
+
+  * `as_rset()`, transforme l'entité en un resultset équivalent simulant
+     le résultat de la requête `Any X WHERE X eid _eid_`
+
+  * `complete(skip_bytes=True)`, effectue une requête permettant de récupérer d'un
+    coup toutes les valeurs d'attributs manquant sur l'entité
+
+  * `get_value(name)`, récupere la valeur associée à l'attribut passé en argument
+
+  * `related(rtype, x='subject', limit=None, entities=False)`, retourne une liste
+    des entités liées à l'entité courant par la relation donnée en argument
+
+  * `unrelated(rtype, targettype, x='subject', limit=None)`, retourne un result set
+    des entités not liées à l'entité courante par la relation donnée en argument
+    et satisfaisants les contraintes de celle-ci
+
+  * `set_attributes(**kwargs)`, met à jour la liste des attributs avec
+    les valeurs correspondantes passées sous forme d'arguments nommés
+
+  * `copy_relations(ceid)`, copie les relations de l'entité ayant l'eid passé en
+    argument sur l'entité courante
+
+  * `last_modified(view)`, retourne la date à laquelle on doit considérer
+    l'objet comme modifié (utiliser par la gestion de cache HTTP)
+
+  * `delete()` permet de supprimer l'entité représentée
+  
+:Meta-données standard (Dublin Core):
+
+  * `dc_title()`, retourne une chaine unicode correspondant à la méta-donnée
+    'Title' (utilise par défaut le premier attribut non 'meta' du schéma de
+    l'entité) 
+
+  * `dc_long_title()`, comme dc_title mais peut retourner un titre plus détaillé
+
+  * `dc_description(format='text/plain')`, retourne une chaine unicode
+     correspondant à la méta-donnée 'Description' (cherche un attribut
+     'description' par défaut)
+
+  * `dc_authors()`, retourne une chaine unicode correspondant à la méta-donnée
+    'Authors' (propriétaires par défaut)
+
+  * `dc_date(date_format=None)`, retourne une chaine unicode
+     correspondant à la méta-donnée 'Date' (date de modification par défaut)
+            
+:Contrôle du vocabulaire pour les relations:
+
+  * `vocabulary(rtype, x='subject', limit=None)`, appelée notamment
+    par les vues d'édition d'erudi, elle renvoie une liste de couple
+    (label, eid) des entités qui pourraient être liées à l'entité
+    via la relation `rtype`
+  * `subject_relation_vocabulary(rtype, limit=None)`, appelée
+    en interne par `vocabulary` dans le cas d'une relation sujet
+  * `object_relation_vocabulary(rtype, limit=None)`, appelée
+    en interne par `vocabulary` dans le cas d'une relation objet
+  * `relation_vocabulary(rtype, targettype, x, limit=None)`, appelé
+    en interne par `subject_relation_vocabulary` et `object_relation_vocabulary`
+
+
+Les *rtags*
+-----------
+Les *rtags* permettent de spécifier certains comportements propres aux relations
+d'un type d'entité donné (voir plus loin). Ils sont définis sur la classe 
+d'entité via l'attribut `rtags` qui est un dictionnaire dont les clés sont un 
+triplet ::
+
+  <type de relation>, <type d'entité cible>, <position du contexte ("subject" ou "object"
+
+et les valeurs un `set` ou un tuple de marqueurs définissant des propriétés 
+s'appliquant à cette relation. 
+
+Il est possible de simplifier ce dictionnaire :
+
+* si l'on veut spécifier un seul marqueur, il n'est pas nécessaire d'utiliser
+  un tuple comme valeur, le marqueur seul (chaine de caractères) suffit
+* si l'on s'intéresse uniquement à un type de relation et non à la cible et à la
+  position du contexte (ou que celui-ci n'est pas ambigüe), on peut simplement
+  utiliser le nom du type de relation comme clé
+* si l'on veut qu'un marqueur s'applique quelque soit le type d'entité cible, il
+  faut utiliser la chaine `*` comme type d'entité cible
+
+A noter également que ce dictionnaire est *traité à la création de la classe*. 
+Il est automatiquement fusionné avec celui de la ou des classe(s) parentes (pas
+besoin de copier celui de la classe parent pour le modifier). De même modifier
+celui-ci après création de la classe n'aura aucun effet...
+
+
+.. include:: sect_definition_entites.txt
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_migration.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,218 @@
+.. -*- coding: utf-8 -*-
+
+
+Migration
+=========
+
+Une des idées de base d'Erudi est la création incrémentale d'application, et
+pour cela de nombreuses actions sont fournies afin de facilement faire évoluer
+une application et tout particulièrement le modèle de données manipulé sans
+perdre les données des instances existantes.
+
+La version courante d'un modèle d'application est données dans le fichier
+`__pkginfo__.py` sous forme d'un tuple de 3 entiers.
+
+
+Gestion des scripts de migrations
+---------------------------------
+Les scripts des migrations doivent être placés dans le répertoire `migration` de
+l'application, et nommé de la manière suivante :
+
+::
+
+  <n° de version X.Y.Z>[_<description>]_<mode>.py
+
+dans lequel : 
+
+* X.Y.Z correspond au n° de version du modèle vers lequel le script permet de
+  migrer,
+
+* le *mode* (entre le dernier "_" et l'extension ".py") indique à quelle partie
+  de l'application (serveur RQL, serveur web) le script s'applique en cas
+  d'installation distribuée. Il peut valoir : 
+
+  * `common`, s'applique aussi bien sur le serveur RQL que sur le serveur web,
+    et met à jour des fichiers sur le disque (migration de fichier de
+    configuration par exemple).
+
+  * `web`, s'applique uniquement sur le serveur web, et met à jour des fichiers
+    sur le disque 
+
+  * `repository`, s'applique uniquement sur le serveur RQL, et met à jour des
+    fichiers sur le disque 
+
+  * `Any`, s'applique uniquement sur le serveur RQL, et met à jour des
+    données en base (migrations de schéma et de données par ex.)
+
+
+Toujours dans le répertoire `migration`, le fichier spécial `depends.map` permet
+d'indiquer que pour migrer vers une version spécifique du modèle, il faut tout
+d'abord avoir migrer vers une version données de erudi. Ce fichier peut contenir
+des commentaires (lignes commençant par un "#"), et une dépendance est notée sur
+une ligne de la manière suivante : ::
+
+  <n° de version du modèle X.Y.Z> : <n° de version erudi X.Y.Z>
+
+Par exemple ::
+
+  0.12.0: 2.26.0
+  0.13.0: 2.27.0
+  # 0.14 works with 2.27 <= erudi <= 2.28 at least
+  0.15.0: 2.28.0
+
+
+Contexte de base
+----------------
+Les identifiants suivants sont préféfinis dans les scripts de migration : 
+
+* `config`, configuration de l'instance
+
+* `interactive_mode`, booléen indiquant si le script est éxécuté en mode
+  interactif ou non
+
+* `appltemplversion`, version du modèle d'application de l'instance
+
+* `applerudiversion`, version erudi de l'instance
+
+* `templversion`, version du modéle d'application installée
+
+* `erudiversion`, version erudi installée
+
+* `confirm(question)`, fonction posant une question et retournant vrai si
+  l'utilisateur a répondu oui, faux sinon (retourne toujours vrai en mode non
+  interactif) 
+
+* `_`, fonction équivalente à `unicode` permettant de marquer des chaines à
+  internationaliser dans les scripts de migration
+
+Dans les scripts "repository", les identifiants suivant sont également définis :
+
+* `checkpoint`, demande confirmant et effectue un "commit" au point d'appel
+
+* `repo_schema`, schéma persistent de l'instance (i.e. schéma de l'instance en
+  cours de migration)
+
+* `newschema`, schéma installé sur le système de fichier (i.e. schéma de la
+  version à jour du modèle et de erudi)
+
+* `sqlcursor`, un curseur SQL pour les très rares cas où il est réellement
+  nécessaire ou avantageux de passer par du sql
+
+* `repo`, l'objet repository
+
+                        
+Migration de schéma
+-------------------
+Les fonctions de migration de schéma suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_attribute(etype, attrname, attrtype=None, commit=True)`, ajoute un
+  nouvel attribut à un type d'entité existante. Si le type de celui-ci n'est pas
+  spécifié il est extrait du schéma à jour.
+        
+* `drop_attribute(etype, attrname, commit=True)`, supprime un
+  attribut à un type d'entité existante.
+
+* `rename_attribute(etype, oldname, newname, commit=True)`, renomme un attribut
+            
+* `add_entity_type(etype, auto=True, commit=True)`, ajoute un nouveau type
+  d'entité. Si `auto` est vrai, toutes les relations utilisant ce type d'entité
+  et ayant un type d'entité connu à l'autre extrémité vont également être
+  ajoutées.
+
+* `drop_entity_type(etype, commit=True)`, supprime un type d'entité et toutes
+  les relations l'utilisant.
+
+* `rename_entity_type(oldname, newname, commit=True)`, renomme un type d'entité
+            
+* `add_relation_type(rtype, addrdef=True, commit=True)`, ajoute un nouveau type
+  de relation. Si `addrdef` est vrai, toutes les définitions de relation de ce
+  type seront également ajoutées.
+
+* `drop_relation_type(rtype, commit=True)`, supprime un type de relation et
+  toutes les définitions de ce type.
+
+* `rename_relation(oldname, newname, commit=True)`, renomme une relation.
+
+* `add_relation_definition(subjtype, rtype, objtype, commit=True)`, ajoute une
+  définition de relation.
+
+* `drop_relation_definition(subjtype, rtype, objtype, commit=True)`, supprime
+  une définition de relation.
+
+* `synchronize_permissions(ertype, commit=True)`, synchronise les permissions
+  d'un type d'entité ou de relation
+        
+* `synchronize_rschema(rtype, commit=True)`, synchronise les propriétés et
+  permissions d'un type de relation.
+                
+* `synchronize_eschema(etype, commit=True)`, synchronise les propriétés et
+  permissions d'un type d'entité.
+    
+* `synchronize_schema(commit=True)`, synchronise le schéma persistent avec le
+  schéma à jour (mais sans ajouter ni supprimer de nouveaux types d'entités ou
+  de relations ni de définitions de relation).
+        
+* `change_relation_props(subjtype, rtype, objtype, commit=True, **kwargs)`, change
+  les propriétés d'une definition de relation en utilisant les arguments nommés
+  pour les propriétés à changer.
+
+* `set_widget(etype, rtype, widget, commit=True)`, change le widget à utiliser
+  pour la relation <rtype> du type d'entité <etype>
+
+* `set_size_constraint(etype, rtype, size, commit=True)`, change la contrainte
+  de taille pour la relation <rtype> du type d'entité <etype>
+
+
+Migration de données
+--------------------
+Les fonctions de migration de données suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `rql(rql, kwargs=None, cachekey=None, ask_confirm=True)`, éxécute une
+  requête rql arbitraire, d'interrogation ou de modification. Un objet result
+  set est retourné.
+
+* `add_entity(etype, *args, **kwargs)`, ajoute une nouvelle entité du type
+  données. La valeur des attributs et relations est spécifiée en utilisant les
+  arguments nommés et positionnels.
+
+  
+Création de workflow
+--------------------
+Les fonctions de création de workflow suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_state(name, stateof, initial=False, commit=False, **kwargs)`, ajoute un
+  nouvel état de workflow
+    
+* `add_transition(name, transitionof, fromstates, tostate, requiredgroups=(), commit=False, **kwargs)`, 
+  ajoute une nouvelle transtion de workflow
+
+Migration de configuration
+--------------------------
+Les fonctions de migration de configuration suivantes sont disponibles dans tout
+les scripts : 
+
+* `option_renamed(oldname, newname)`, indique qu'une option a été renommée
+
+* `option_group_change(option, oldgroup, newgroup)`, indique qu'une option a
+  changé de groupe
+
+* `option_added(oldname, newname)`, indique qu'une option a été ajoutée
+
+* `option_removed(oldname, newname)`, indique qu'une option a été supprimée
+
+
+Autres fonctions de migration
+-----------------------------
+Ces fonctions ne sont utilisés que pour des opérations de bas niveau
+irréalisables autrement ou pour réparer des bases cassées lors de session
+interactive. Elles sont disponibles dans les scripts "repository".
+
+* `sqlexec(sql, args=None, ask_confirm=True)`, éxécute une requête sql
+  arbitraire, à n'utiliser 
+
+* `add_entity_type_table(etype, commit=True)`
+* `add_relation_type_table(rtype, commit=True)`
+* `uninline_relation(rtype, commit=True)`
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_mise_en_place_environnement.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,218 @@
+.. -*- coding: utf-8 -*-
+
+
+Migration
+=========
+
+Une des idées de base d'Erudi est la création incrémentale d'application, et
+pour cela de nombreuses actions sont fournies afin de facilement faire évoluer
+une application et tout particulièrement le modèle de données manipulé sans
+perdre les données des instances existantes.
+
+La version courante d'un modèle d'application est données dans le fichier
+`__pkginfo__.py` sous forme d'un tuple de 3 entiers.
+
+
+Gestion des scripts de migrations
+---------------------------------
+Les scripts des migrations doivent être placés dans le répertoire `migration` de
+l'application, et nommé de la manière suivante :
+
+::
+
+  <n° de version X.Y.Z>[_<description>]_<mode>.py
+
+dans lequel : 
+
+* X.Y.Z correspond au n° de version du modèle vers lequel le script permet de
+  migrer,
+
+* le *mode* (entre le dernier "_" et l'extension ".py") indique à quelle partie
+  de l'application (serveur RQL, serveur web) le script s'applique en cas
+  d'installation distribuée. Il peut valoir : 
+
+  * `common`, s'applique aussi bien sur le serveur RQL que sur le serveur web,
+    et met à jour des fichiers sur le disque (migration de fichier de
+    configuration par exemple).
+
+  * `web`, s'applique uniquement sur le serveur web, et met à jour des fichiers
+    sur le disque 
+
+  * `repository`, s'applique uniquement sur le serveur RQL, et met à jour des
+    fichiers sur le disque 
+
+  * `Any`, s'applique uniquement sur le serveur RQL, et met à jour des
+    données en base (migrations de schéma et de données par ex.)
+
+
+Toujours dans le répertoire `migration`, le fichier spécial `depends.map` permet
+d'indiquer que pour migrer vers une version spécifique du modèle, il faut tout
+d'abord avoir migrer vers une version données de erudi. Ce fichier peut contenir
+des commentaires (lignes commençant par un "#"), et une dépendance est notée sur
+une ligne de la manière suivante : ::
+
+  <n° de version du modèle X.Y.Z> : <n° de version erudi X.Y.Z>
+
+Par exemple ::
+
+  0.12.0: 2.26.0
+  0.13.0: 2.27.0
+  # 0.14 works with 2.27 <= erudi <= 2.28 at least
+  0.15.0: 2.28.0
+
+
+Contexte de base
+----------------
+Les identifiants suivants sont préféfinis dans les scripts de migration : 
+
+* `config`, configuration de l'instance
+
+* `interactive_mode`, booléen indiquant si le script est éxécuté en mode
+  interactif ou non
+
+* `appltemplversion`, version du modèle d'application de l'instance
+
+* `applerudiversion`, version erudi de l'instance
+
+* `templversion`, version du modéle d'application installée
+
+* `erudiversion`, version erudi installée
+
+* `confirm(question)`, fonction posant une question et retournant vrai si
+  l'utilisateur a répondu oui, faux sinon (retourne toujours vrai en mode non
+  interactif) 
+
+* `_`, fonction équivalente à `unicode` permettant de marquer des chaines à
+  internationaliser dans les scripts de migration
+
+Dans les scripts "repository", les identifiants suivant sont également définis :
+
+* `checkpoint`, demande confirmant et effectue un "commit" au point d'appel
+
+* `repo_schema`, schéma persistent de l'instance (i.e. schéma de l'instance en
+  cours de migration)
+
+* `newschema`, schéma installé sur le système de fichier (i.e. schéma de la
+  version à jour du modèle et de erudi)
+
+* `sqlcursor`, un curseur SQL pour les très rares cas où il est réellement
+  nécessaire ou avantageux de passer par du sql
+
+* `repo`, l'objet repository
+
+                        
+Migration de schéma
+-------------------
+Les fonctions de migration de schéma suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_attribute(etype, attrname, attrtype=None, commit=True)`, ajoute un
+  nouvel attribut à un type d'entité existante. Si le type de celui-ci n'est pas
+  spécifié il est extrait du schéma à jour.
+        
+* `drop_attribute(etype, attrname, commit=True)`, supprime un
+  attribut à un type d'entité existante.
+
+* `rename_attribute(etype, oldname, newname, commit=True)`, renomme un attribut
+            
+* `add_entity_type(etype, auto=True, commit=True)`, ajoute un nouveau type
+  d'entité. Si `auto` est vrai, toutes les relations utilisant ce type d'entité
+  et ayant un type d'entité connu à l'autre extrémité vont également être
+  ajoutées.
+
+* `drop_entity_type(etype, commit=True)`, supprime un type d'entité et toutes
+  les relations l'utilisant.
+
+* `rename_entity_type(oldname, newname, commit=True)`, renomme un type d'entité
+            
+* `add_relation_type(rtype, addrdef=True, commit=True)`, ajoute un nouveau type
+  de relation. Si `addrdef` est vrai, toutes les définitions de relation de ce
+  type seront également ajoutées.
+
+* `drop_relation_type(rtype, commit=True)`, supprime un type de relation et
+  toutes les définitions de ce type.
+
+* `rename_relation(oldname, newname, commit=True)`, renomme une relation.
+
+* `add_relation_definition(subjtype, rtype, objtype, commit=True)`, ajoute une
+  définition de relation.
+
+* `drop_relation_definition(subjtype, rtype, objtype, commit=True)`, supprime
+  une définition de relation.
+
+* `synchronize_permissions(ertype, commit=True)`, synchronise les permissions
+  d'un type d'entité ou de relation
+        
+* `synchronize_rschema(rtype, commit=True)`, synchronise les propriétés et
+  permissions d'un type de relation.
+                
+* `synchronize_eschema(etype, commit=True)`, synchronise les propriétés et
+  permissions d'un type d'entité.
+    
+* `synchronize_schema(commit=True)`, synchronise le schéma persistent avec le
+  schéma à jour (mais sans ajouter ni supprimer de nouveaux types d'entités ou
+  de relations ni de définitions de relation).
+        
+* `change_relation_props(subjtype, rtype, objtype, commit=True, **kwargs)`, change
+  les propriétés d'une definition de relation en utilisant les arguments nommés
+  pour les propriétés à changer.
+
+* `set_widget(etype, rtype, widget, commit=True)`, change le widget à utiliser
+  pour la relation <rtype> du type d'entité <etype>
+
+* `set_size_constraint(etype, rtype, size, commit=True)`, change la contrainte
+  de taille pour la relation <rtype> du type d'entité <etype>
+
+
+Migration de données
+--------------------
+Les fonctions de migration de données suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `rql(rql, kwargs=None, cachekey=None, ask_confirm=True)`, éxécute une
+  requête rql arbitraire, d'interrogation ou de modification. Un objet result
+  set est retourné.
+
+* `add_entity(etype, *args, **kwargs)`, ajoute une nouvelle entité du type
+  données. La valeur des attributs et relations est spécifiée en utilisant les
+  arguments nommés et positionnels.
+
+  
+Création de workflow
+--------------------
+Les fonctions de création de workflow suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_state(name, stateof, initial=False, commit=False, **kwargs)`, ajoute un
+  nouvel état de workflow
+    
+* `add_transition(name, transitionof, fromstates, tostate, requiredgroups=(), commit=False, **kwargs)`, 
+  ajoute une nouvelle transtion de workflow
+
+Migration de configuration
+--------------------------
+Les fonctions de migration de configuration suivantes sont disponibles dans tout
+les scripts : 
+
+* `option_renamed(oldname, newname)`, indique qu'une option a été renommée
+
+* `option_group_change(option, oldgroup, newgroup)`, indique qu'une option a
+  changé de groupe
+
+* `option_added(oldname, newname)`, indique qu'une option a été ajoutée
+
+* `option_removed(oldname, newname)`, indique qu'une option a été supprimée
+
+
+Autres fonctions de migration
+-----------------------------
+Ces fonctions ne sont utilisés que pour des opérations de bas niveau
+irréalisables autrement ou pour réparer des bases cassées lors de session
+interactive. Elles sont disponibles dans les scripts "repository".
+
+* `sqlexec(sql, args=None, ask_confirm=True)`, éxécute une requête sql
+  arbitraire, à n'utiliser 
+
+* `add_entity_type_table(etype, commit=True)`
+* `add_relation_type_table(rtype, commit=True)`
+* `uninline_relation(rtype, commit=True)`
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_rql.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+.. -*- coding: utf-8 -*-
+
+Le langage RQL (Relation Query Language)
+========================================
+
+Voir la `documentation de RQL <file:///home/sandrine/src/fcubicweb/rql/doc/build/html/index.html>`_ .
+
+
+[TODO]
+Specific link to RQL complete documentation to remove duplicated content.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_serveur_crochets.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,31 @@
+.. -*- coding: utf-8 -*-
+
+Les crochets (*hooks*)
+======================
+
+XXX FILLME
+
+Les crochets sont appelés avant ou après la mise à jour d'une entité ou d'une
+relations dans le dépot
+
+Leur prototypes sont les suivants
+
+
+    * after_add_entity     (session, entity)
+    * after_update_entity  (session, entity)
+    * after_delete_entity  (session, eid)
+    * before_add_entity    (session, entity)
+    * before_update_entity (session, entity)
+    * before_delete_entity (session, eid)
+
+    * after_add_relation     (session, fromeid, rtype, toeid)
+    * after_delete_relation  (session, fromeid, rtype, toeid)
+    * before_add_relation    (session, fromeid, rtype, toeid)
+    * before_delete_relation (session, fromeid, rtype, toeid)
+    
+    * server_startup
+    * server_shutdown
+    
+    * session_open
+    * session_close
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_serveur_notification.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+.. -*- coding: utf-8 -*-
+
+Gestion de notifications
+========================
+
+XXX FILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_tests.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,38 @@
+.. -*- coding: utf-8 -*-
+
+Tests
+=====
+
+Écriture de tests unitaires
+---------------------------
+Le framework de test fournit principalement deux classes de tests dans le module
+`ginco.devtools.apptest`:
+
+* `EnvBasedTC`, pour simuler un environnement complet (web + repository)
+* `RepositoryBasedTC`, pour simuler un environnement de repository uniquement
+
+Ces deux classes ont quasiment la même interface et proposent un certain nombre de méthodes
+rendant l'écriture de test puissante et rapide.
+
+XXXFILLME describe API
+
+Dans la plupart des cas, vous allez vouloir hériter de `EnvBasedTC` pour écrire des tests
+unitaires ou fonctionnels pour vos entités, vues, crochets...
+
+
+Test des courriels de notifications
+```````````````````````````````````
+Lors de l'éxécution de tests les courriels potentiellement générés ne sont pas réellement
+envoyé mais se retrouve dans la liste `MAILBOX` du module `ginco.devtools.apptest`. Cette
+liste est remise à zéro au *setUp* de chaque test (par le setUp des classes `EnvBasedTC`
+et `RepositoryBasedTC`).
+
+Vous pouvez donc tester vos notifications en analysant le contenu de cette liste, qui
+contient des objets ayant deux attributs :
+* `recipients`, la liste des destinataires
+* `msg`, l'objet email.Message
+
+
+Tests automatiques
+------------------
+XXXFILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_ui_gestion_formulaire.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,133 @@
+.. -*- coding: utf-8 -*-
+
+Gestion de formulaires
+======================
+
+Contrôle de la génération automatique de formulaire pour les entités manipulée
+------------------------------------------------------------------------------
+XXX FILLME
+
+* les formulaires 'edition' et 'creation'
+
+Le formulaire généré par défaut ne vous convient pas ? Vous êtes peut-être pas
+obligé de le refaire à la main ! :)
+
+* rtags primary, secondary, generated, generic,
+  `Entity.relation_category(rtype, x='subject')`
+* inline_view (now a rtag?)
+* spécification widget
+
+
+Fonctionnement du contrôleur d'édition par défaut (id: 'edit')
+--------------------------------------------------------------
+
+Contrôle de l'édition
+`````````````````````
+Prérequis: les paramètres liés aux entités à éditer sont spécifiés de la forme ::
+
+  <nom de champ>:<eid de l'entité>
+
+où l'eid de l'entité pourra être une lettre dans le cas d'une entité à créer. On
+dénommera ces paramètres comme *qualifié*.
+
+1. récupération des entités à éditer en cherchant les paramètres de formulaire
+   commençant par 'eid:' ayant également un paramètre '__type' associé
+   (également *qualifié* par l'eid évidemment)
+
+2. pour tous les attributs et relations de chaque entité à éditer
+
+   1. recherche d'un paramètre 'edits-<nom relation>' ou 'edito-<nom relation>'
+      qualifié dans le cas d'une relation dont l'entité est objet
+   2. si trouvé, la valeur récupérée est considérée comme la valeur originale
+      pour cette relation, et on cherche la (ou les) nouvelle(s) valeur(s) dans
+      le paramètre <nom relation> (qualifié)
+   3. si la valeur est différente de l'originale, une requête de modification en
+      base est effectuée
+
+3. pour chaque entité à éditer
+
+   1. si un paramètre `__linkto` qualifié est spécifié, sa valeur doit être une
+      chaine (ou une liste de chaine) de la forme : ::
+
+        <relation type>:<eids>:<target>
+
+      où <target> vaut 'subject' ou 'object' et chaque eid peut-être séparé d'un
+      autre par un '_'. Target spécifie *l'entité éditée* est sujet ou objet de la
+      relation et chaque relation ainsi spécifiée sera insérée.
+
+   2. si un paramètre `__cloned_eid` qualifié est spécifié pour une entité, les
+      relations de l'entité spécifiée en valeur de cette argument sont copiées sur
+      l'entité éditée
+
+
+   3. si un paramètre `__delete` qualifié est spécifié, sa valeur doit être une
+      chaine (ou une liste de chaine) de la forme : ::
+
+	<subject eids>:<relation type>:<object eids>
+
+      où chaque eid sujet ou objet peut-être séparé d'un autre par un '_'. Chaque
+      relation ainsi spécifiée sera supprimée.
+
+   4. si un paramètre `__insert` qualifié est spécifié, sa valeur doit être de
+      même format que pour `__delete`, mais chaque relation ainsi spécifiée sera 
+      insérée.
+
+4. si les paramètres `__insert` et/ou  `__delete` sont trouvés non qualifiés,
+   ils sont interprétés comme décrit ci-dessus (quelque soit le nombre d'entité
+   édité)
+
+5. si aucune entité n'est éditée mais que le formulaire contient les paramètres
+   `__linkto` et `eid`, celui-ci est interprété en prenant la valeur spécifié
+   par le paramètre `eid` pour désigner l'entité sur laquelle ajouter les
+   relations
+
+
+A noter que :
+
+* si le paramètre `__action_delete` est trouvé, toutes les entités comme
+  spécifiées à éditer seront supprimées
+
+* si le paramètre `__action_cancel` est trouvé, aucune action n'est effectuée
+
+* si le paramètre `__action_apply` est trouvé, l'édition est effectuée
+  normalement mais la redirection sera effectuée sur le formulaire (cf `Contrôle
+  de la redirection`_)
+
+* le paramètre `__method` est également supporté comme sur le template principal
+  (XXX not very consistent, maybe __method should be dealed in the view controller) 
+
+* si aucune entité à éditer n'est trouvée et qu'il n'y a pas de paramètre
+  `__action_delete`, `__action_cancel`, `__linkto`, `__delete` ou `__insert`,
+  une erreur est levée
+
+* placer dans le formulaire le paramètre `__message` permettra d'utiliser la
+  valeur de ce paramètre comme message d'information à l'utilisateur une fois
+  l'édition effectuée.
+
+
+Contrôle de la redirection
+``````````````````````````
+Une fois que l'édition s'est bien passé, reste un problème : c'est bien beau
+tout ça, mais où qu'on va maintenant ?? Si rien n'est spécifié, le controlleur
+se débrouille, mais comme il fait pas toujours ce qu'on voudrait, on peut
+controller ça en utilisant les paramètres suivant :
+
+* `__redirectpath`: chemin de l'url (relatif à la racine du site, sans paramètre
+  de formulaire
+  
+* `__redirectparams`: paramètres de formulaires à ajouter au chemin
+  
+* `__redirectrql`: requête RQL de redirection
+
+* `__redirectvid`: identifiant de vue de redirection
+
+* `__errorurl`: url du formulaire original, utilisé pour la redirection en cas
+  d'erreur de validation pendant l'édition. Si celui-ci n'est pas spécifié, une
+  page d'erreur sera présentée plutot qu'un retour sur le formulaire (qui est le
+  cas échéant responsable d'afficher les erreurs)
+
+* `__form_id`: identifiant de vue du formulaire original, utilisée si
+  `__action_apply` est trouvé
+
+En général on utilise soit `__redirectpath et `__redirectparams` soit
+`__redirectrql` et `__redirectvid`.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_ui_js_json.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+.. -*- coding: utf-8 -*-
+
+AJAX
+====
+JSON bla  bla
+XXX FILLME
+
+
+Le contrôleur 'json'
+--------------------
+XXX FILLME
+
+
+API Javascript
+--------------
+XXX FILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/chap_visualisation_donnees.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,120 @@
+.. -*- coding: utf-8 -*-
+
+
+Définition de vues
+==================
+
+Les classes de base des vues
+----------------------------
+
+La class `View` (`ginco.common.view`)
+`````````````````````````````````````
+Un vue écrit dans son flux de sortie via son attribut `w` (`UStreamIO`).
+
+L'interface de base des vues est la suivante :
+
+* `dispatch(**context)`, appelle ("rend") la vue en appellent `call` ou
+  `cell_call` en fonction des arguments passé
+* `call(**kwargs)`, appelle la vue pour un result set complet ou nul
+* `cell_call(row, col, **kwargs)`, appelle la vue pour une cellule donnée d'un
+  result set
+* `url()`, retourne l'url permettant d'obtenir cette vue avec le result set en
+  cours 
+* `view(__vid, rset, __fallback_vid=None, **kwargs)`, appelle la vue
+  d'identificant `__vid` sur le result set donné. Il est possible de données un
+  identificant de vue de "fallback" qui sera utilisé si la vue demandée n'est
+  pas applicable au result set
+  
+* `wview(__vid, rset, __fallback_vid=None, **kwargs)`, pareil que `view` mais
+  passe automatiquement le flux en argument
+  
+* `html_headers()`, retourne une liste d'en-tête HTML à placer par le template
+  principal 
+
+* `page_title()`, retourne le titre à utiliser dans l'en tête HTML `title`
+
+* `creator(eid)`, retourne l'eid et le login du créateur de l'entité ayant
+  l'eid passé en argument
+
+Autres classes de base :
+
+* `EntityView`, vue s'appliquant à aux lignes ou cellule contenant une entité
+  (eg un eid)
+* `StartupView`, vue de départ n'ayant pas besoin de result set
+* `AnyRsetView`, vue s'appliquant à n'importe quelle result set
+
+
+Les templates ou patron
+-----------------------
+
+Les patrons (ou *template*) sont des cas particulier de vue ne dépendant a
+priori pas d'un result set. La classe de base `Template` (`ginco.common.view`)
+est une classe dérivée de la classe `View`.
+
+Pour construire une page HTML, un *template principal* est utilisé. Généralement
+celui possédant l'identifiant 'main' est utilisé (ce n'est pas le cas lors
+d'erreur dans celui-ci ou pour le formulaire de login par exemple). Ce patron
+utilise d'autres patrons en plus des vues dépendants du contenu pour générer la
+page à renvoyer.
+
+C'est ce template qui est chargé :
+
+1. d'éxécuter la requête RQL des données à afficher le cas échéant
+2. éventuellement de déterminer la vue à utiliser pour l'afficher si non
+   spécifiée
+3. de composer la page à retourner
+
+
+Le patron principal par défaut (`ginco.web.views.basetemplates.TheMainTemplate`)
+--------------------------------------------------------------------------------
+
+Le template principal par défaut construit la page selon la décomposition
+suivante :
+
+.. image:: main_template_layout.png
+
+Le rectancle contenant le `view.dispatch()` représente l'emplacement où est
+inséré la vue de contenu à afficher. Les autres représentent des sous-templates
+appelé pour construire la page. Les implémentations par défaut de tout ces
+templates sont dans le module `ginco.web.views.basetemplates`. Vous pouvez
+évidemment surcharger l'un des sous-templates pour modifier l'aspect visuel
+d'une partie désirée de la page.
+
+On peut également contrôler certains comportements du template principal à
+l'aide des paramètres de formulaire suivante :
+
+* `__notemplate`, si présente (quelque soit la valeur associée), seule la vue de
+  contenu est renvoyée
+* `__force_display`, si présente et contient une valeur non nulle, pas de
+  navigation quelque soit le nombre d'entités à afficher
+* `__method`, si le result set à afficher ne contient qu'une entité et que ce
+  paramètre est spécifié, celui-ci désigne une méthode à appeler sur l'entité
+  en lui donnant en argument le dictionnaire des paramètres de formulaire, avant
+  de reprendre le comportement classique (s'insère entre les étapes 1. et
+  2. décrites ci-dessus)
+
+
+.. include:: sect_stdlib_vues.txt
+
+
+Vues xml, binaires...
+---------------------
+Pour les vues générants autre que du html  (une image générée dynamiquement par
+exemple), et qui ne peuvent donc généralement pas être incluse dans la page
+HTML générée par le template principal (voir ci-dessus), il faut :
+
+* placer l'attribut `templatable` de la classe à `False`
+* indiquer via l'attribut `content_type` de la classe le type MIME généré par la
+  vue 'application/octet-stream'
+
+Pour les vues générants un contenu binaire (une image générée dynamiquement par
+exemple), il faut également placer l'attribut `binary` de la classe à `True` (ce
+qui implique `templatable == False` afin que l'attribut `w` de la vue soit
+remplacé par un flux binaire plutôt que unicode.
+
+
+Quelques trucs (X)HTML à respecter
+----------------------------------
+Certains navigateurs (dont firefox) n'aime pas les `<div>` vides (par vide
+j'entend sans contenu dans la balise, il peut y avoir des attributs), faut
+toujours mettre `<div></div>` même s'il n'y a rien dedans, et non `<div/>`. 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/gae.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+.. -*- coding: utf-8 -*-
+
+.. _contents:
+
+==================================
+LAX - Logilab App engine eXtension
+==================================
+:authors: Nicolas Chauvat, Sylvain Thénault, Adrien Di Mascio
+:date: 2008-07-12
+:version: 0.4
+:organisation: Logilab
+:copyright: © 2008 Logilab
+:contact: contact@logilab.fr
+
+
+Voir la `documentation de LAX <file:///home/sandrine/src/lax/doc/html-en/lax-book.en.html>`_ .
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/index.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,37 @@
+.. -*- coding: utf-8 -*-
+
+.. _contents:
+
+==========================================
+Développement d'applications avec CubicWeb
+==========================================
+
+
+:Author: Sylvain Thénault
+:Organization: Logilab
+
+.. toctree::
+   :maxdepth: 1
+   
+   chap_fondements_cubicweb.txt
+   chap_mise_en_place_environnement.txt
+   chap_rql.txt
+   chap_definition_schema.txt
+   chap_definition_workflows.txt
+   chap_bases_framework_cubicweb.txt
+   chap_visualisation_donnees.txt
+   chap_manipulation_donnees.txt
+   chap_ui_gestion_formulaire.txt
+   chap_ui_js_json.txt
+   chap_autres_composants_ui.txt
+   chap_serveur_crochets.txt
+   chap_serveur_notification.txt
+   
+   chap_tests.txt
+   chap_i18n.txt
+   chap_migration.txt
+   
+   chap_creation_instance.txt
+   chap_configuration_instance.txt
+
+XXX: XXX FILLME, CSS, API sécurité
Binary file doc/devmanual_fr/main_template_layout.dia has changed
Binary file doc/devmanual_fr/main_template_layout.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/makefile	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+MKHTMLOPTS=--doctype book --param toc.section.depth=1  --target html --stylesheet standard 
+SRC=.
+
+MKPDFOPTS=--doctype book --param toc.section.depth=2  --target pdf --stylesheet standard
+
+TXTFILES:= $(wildcard *.txt)
+TARGET := $(TXTFILES:.txt=.html)
+
+all: index.html
+
+index.html: *.txt
+	mkdoc ${MKHTMLOPTS} index.txt
+
+index.pdf: *.txt
+	mkdoc ${MKPDFOPTS} index.txt
+
+%.html: %.txt
+	mkdoc ${MKHTMLOPTS} $<
+
+clean:
+	rm -f *.html
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_cubicweb-ctl.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,83 @@
+.. -*- coding: utf-8 -*-
+
+L'outil `cubicweb-ctl`
+----------------------
+`cubicweb-ctl` est le couteau suisse pour la gestion d'instances CubicWeb.
+La syntaxe générale est ::
+
+  cubicweb-ctl <commande> [options commande] <arguments commandes>
+
+Pour voir les commandes disponibles ::
+
+  cubicweb-ctl
+  cubicweb-ctl --help
+
+A noter que les commandes disponibles varient en fonction des parties d'CubicWeb
+qui sont installées.
+
+Pour voir l'aide pour une commande spécifiques ::
+
+  cubicweb-ctl <commande> --help
+
+Commandes pour la création d'un composant
+`````````````````````````````````````````
+* ``newtemplate``, crée un nouveau composant sur le système de fichiers
+  à partir du nom passé en paramètre. Cette commande crée le composant à partir
+  d'une squelette d'application, incluant également les fichiers pour le
+  packaging debian)
+  
+Commandes pour la création d'une instance
+`````````````````````````````````````````
+* ``create``, crée les fichiers de configuration d'une instance
+* ``db-create``, crée la base de données système d'une instance (tables et
+  extensions uniquement)
+* ``db-init``, initialise la base de données système d'une instance (schéma,
+  groupes, utilisateurs, workflows...)
+
+Par défaut ces trois commandes sont enchainées.
+
+Commande pour la création d'une instance pour Google App Engine
+```````````````````````````````````````````````````````````````
+* ``newgapp``, crée les fichiers de configuration d'une instance
+
+Cette commande doit être suivie de l'exécution de commandes
+permettant l'initialisation de la base de données spécifique à  
+Google App Engine, appellée ``datastore``.
+
+Pour plus de détails veuillez vous référer à `LAX <>`_
+
+
+Commandes pour le lancement des instances
+`````````````````````````````````````````
+* ``start``, démarre une, plusieurs, ou toutes les instances
+* ``stop``, arrêt une, plusieurs, ou toutes les instances
+* ``restart``, redémarre une, plusieurs, ou toutes les instances
+* ``status``, donne l'état des instances
+
+Commandes pour la maintenance des instances
+```````````````````````````````````````````
+* ``upgrade``, lance la migration d'instance(s) existante(s) lorsqu'une nouvelle
+  version d'CubicWeb ou du composant est installée
+* ``shell``, ouvre un shell de migration pour la maintenance manuelle d'une instance
+* ``db-dump``, crée un dump de la base de données système
+* ``db-restore``, restore un dump de la base de données système
+* ``db-check``, vérifie l'intégrité des données d'une instance. Si la correction
+  automatique est activée, il est conseillé de faire un dump avant cette
+  opération
+* ``schema-sync``, , synchronise le schéma persistent d'une instance avec le schéma
+  de l'application. Il est conseillé de faire un dump avant cette opération
+
+Commandes pour la maintenance des catalogues i18n
+`````````````````````````````````````````````````
+* ``i18nlibupdate``, regénère les catalogues de messages de la librairie CubicWeb
+* ``i18nupdate``, regénère les catalogues de messages d'un composant
+* ``i18ncompile``, recompile les catalogues de messages d'une instance. Cela est
+  effectué automatiquement lors d'une upgrade
+
+Cf Internationalisation_.
+
+Autres commandes
+````````````````
+* ``list``, donne la liste des configurations, des composants et des instances
+  disponibles
+* ``delete``, supprime une instance (fichiers de configuration et base de données)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_definition_entites.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,168 @@
+.. -*- coding: utf-8 -*-
+
+Paramétrages et extensions spécifiques
+--------------------------------------
+
+Valeurs par défaut dynamiques
+`````````````````````````````
+Il est possible de définir dans le schéma des valeurs par défaut *statiques*.
+Il est également possible de définir des valeurs par défaut *dynamiques* en 
+définissant sur la classe d'entité une méthode `default_<nom attribut>` pour
+un attribut donnée.
+
+
+Contrôle des attributs chargés et du tri par défaut
+```````````````````````````````````````````````````
+* l'attribut de classe `fetch_attrs` permet de définir sur une classe d'entité
+  la liste des noms des attributs ou relations devant être chargés 
+  automatiquement lors de la récupération d'entité(s) de ce type. Dans le cas 
+  des relations, on est limité aux relations *sujets de cardinalité `?` ou `1`*.
+
+* la méthode de classe `fetch_order(attr, var)` prend en argument un nom 
+  d'attribut (ou de relation) et un nom de variable et doit retourner une chaine
+  à utiliser dans la close "ORDERBY" d'une requête RQL pour trier 
+  automatiquement les listes d'entités de ce type selon cet attribut, ou `None`
+  si l'on ne veut pas de tri sur l'attribut passé en argument. Par défaut les 
+  entités sont triées selon leur date de création
+
+* la méthode de classe `fetch_unrelated_order(attr, var)` est similaire à la 
+  méthode `fetch_order` mais est utilisée essentiellement pour contrôler le tri
+  des listes déroulantes permettant de créer des relations dans la vue d'édition
+  d'une entité
+
+La fonction `fetch_config(fetchattrs, mainattr=None)` permet de simplifier la 
+définition des attributs à précharger et du tri en retournant une liste des 
+attributs à précharger (en considérant ceux de la classe  `AnyEntity`
+automatiquement) et une fonction de tri sur l'attribut "principal" (le 2eme 
+argument si spécifié ou sinon le premier attribut de la liste `fetchattrs`).
+Cette fonction est définie dans le package `ginco.entities`.
+
+Par exemple : ::
+
+  class Transition(AnyEntity):
+    """..."""
+    id = 'Transition'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+
+Indique que pour le type d'entité "Transition" il faut précharger l'attribut
+"name" et trier par défaut selon cet attribut.
+
+
+Contrôle des formulaires d'édition
+``````````````````````````````````
+Il est possible de contrôler les attributs/relations dans la vue d'édition
+simple ou multiple à l'aide des *rtags* suivants :
+
+* `primary`, indique qu'un attribut ou une relation doit être incorporé dans
+  les formulaires d'édition simple et multiple. Dans le cas d'une relation,
+  le formulaire d'édition de l'entité liée sera inclus dans le formulaire
+
+* `secondary`, indique qu'un attribut ou une relation doit être incorporé dans
+  le formulaire d'édition simple uniquement. Dans le cas d'une relation,
+  le formulaire d'édition de l'entité liée sera inclus dans le formulaire
+
+* `generic`, indique qu'une relation doit être incorporé dans le formulaire 
+  d'édition simple dans la boite générique d'ajout de relation
+
+* `generated`, indique qu'un attribut est caculé dynamiquement ou autre, et 
+  qu'il ne doit donc pas être présent dans les formulaires d'édition
+
+Au besoin il est possible de surcharger la méthode 
+`relation_category(rtype, x='subject')` pour calculer dynamiquement la catégorie
+d'édition d'une relation.
+
+
+Contrôle de la boîte "add_related"
+``````````````````````````````````
+La boite `add related` est une boite automatique proposant de créer une entité
+qui sera automatiquement liée à l'entité de départ (le contexte dans lequel 
+s'affiche la boite). Par défaut, les liens présents dans cette boite sont 
+calculés en fonction des propriétés du schéma de l'entité visualisée, mais il
+est possible de les spécifier explicitement à l'aide des *rtags* suivants :
+
+* `link`, indique qu'une relation est généralement créée vers une entité
+  existante et qu'il ne faut donc pas faire apparaitre de lien pour cette 
+  relation
+
+* `create`, indique qu'une relation est généralement créée vers de nouvelles
+  entités et qu'il faut donc faire apparaitre un lien pour créer une nouvelle
+  entité et la lier automatiquement
+
+Au besoin il est possible de surcharger la méthode  
+`relation_mode(rtype, targettype, x='subject')` pour caculer dynamiquement la
+catégorie de création d'une relation.
+
+A noter également que si au moins une action dans la catégorie "addrelated" est
+trouvée pour le contexte courant, le fonctionnement automatique est désactivé
+en faveur du fonctionnement explicite (i.e. affichage des actions de la
+catégorie "addrelated" uniquement).
+
+Contrôle des formulaires de filtrage de table
+`````````````````````````````````````````````
+La vue "table" par défaut gère dynamiquement un formulaire de filtrage du
+contenu de celle-ci. L'algorithme est le suivant : 
+
+1. on considère que la première colonne contient les entités à restreindre
+2. on recupère la première entité de la table (ligne 0) pour "représenter"
+   toutes les autres
+3. pour toutes les autres variables définies dans la requête originale :
+
+   1. si la variable est liée à la variable principale par au moins une
+      n'importe quelle relation
+   2. on appelle la méthode `filterform_vocabulary(rtype, x)` sur l'entité
+      et si rien est retourné (ou plus exactement un tuple de valeur `None`,
+      voir ci-dessous) on passe à la variable suivante, sinon un élément de
+      formulaire de filtrage sera créé avec les valeurs de vocabulaire
+      retournées
+
+4. il n'y a pas d'autres limitations sur le rql, il peut comporter des clauses
+   de tris, de groupes... Des fonctions javascripts sont utilisées pour
+   regénérer une requête à partir de la requête de départ et des valeurs
+   séléctionnées dans les filtres de formulaire.
+
+   
+La méthode `filterform_vocabulary(rtype, x, var, rqlst, args, cachekey)` prend
+en argument le nom d'une relation et la "cible", qui indique si l'entité sur
+laquelle la méthode est appellée est sujet ou objet de la relation. Elle doit
+retourner :
+
+* un 2-uple de None si elle ne sait pas gérer cette relation
+
+* un type et une liste contenant le vocabulaire
+
+  * la liste doit contenir des couples (valeur, label)
+  * le type indique si la valeur désigne un nombre entier (`type == 'int'`), une
+    chaîne de  caractères (`type == 'string'`) ou une entité non finale (`type
+    == 'eid'`)
+
+Par exemple dans notre application de gestion de tickets, on veut pouvoir
+filtrés ceux-ci par : 
+
+* type
+* priorité
+* état (in_state)
+* étiquette (tags)
+* version (done_in)
+
+On définit donc la méthode suivante : ::
+
+
+    class Ticket(AnyEntity):
+
+	...
+
+	def filterform_vocabulary(self, rtype, x, var, rqlst, args, cachekey):
+	    _ = self.req._
+	    if rtype == 'type':
+		return 'string', [(x, _(x)) for x in ('bug', 'story')]
+	    if rtype == 'priority':
+		return 'string', [(x, _(x)) for x in ('minor', 'normal', 'important')]
+	    if rtype == 'done_in':
+		rql = insert_attr_select_relation(rqlst, var, rtype, 'num')
+		return 'eid', self.req.execute(rql, args, cachekey)
+	    return super(Ticket, self).filterform_vocabulary(rtype, x, var, rqlst,
+							     args, cachekey)
+
+							     
+NOTE: Le support du filtrage sur les étiquettes et l'état est installé
+automatiquement, pas besoin de le gérer ici.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_definition_schema.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,348 @@
+.. -*- coding: utf-8 -*-
+
+Définition d'un type d'entité
+-----------------------------
+
+Un type d'entité est définit par une classe python héritant de `EntityType`. Le
+nom de la classe correspond au nom du type. Ensuite le corps de la classe
+contient la description des attributs et des relations pour ce type d'entité,
+par exemple ::
+
+  class Personne(EntityType):
+    """une personne avec les propriétés et relations nécessaires à mon
+    application"""
+
+    nom = String(required=True, fulltextindexed=True)
+    prenom = String(required=True, fulltextindexed=True)
+    civilite = String(vocabulary=('M', 'Mme', 'Mlle'))
+    date_naiss = Date()
+    travaille_pour = SubjectRelation('Company', cardinality='?*')
+
+* le nom de l'attribut python correspond au nom de l'attribut ou de la relation
+  dans cubicweb.
+
+* tout les types de bases sont disponibles nativement : `String`, `Int`, `Float`,
+  `Boolean`, `Date`, `Datetime`, `Time`, `Byte`.
+
+* Chaque type d'entité a au moins les méta-relations suivantes :
+
+  - `eid` (`Int`)
+  
+  - `creation_date` (`Datetime`)
+  
+  - `modification_date` (`Datetime`)
+  
+  - `created_by` (`EUser`) (quel utilisateur a créé l'entité)
+  
+  - `owned_by` (`EUser`) (à qui appartient l'entité, par défaut le
+     créateur mais pas forcément et il peut exister plusieurs propriétaires)
+     
+  - `is` (`EEType`)
+
+  
+* il est également possible de définir des relations dont le type d'entité est
+  l'objet en utilisant `ObjectRelation` plutôt que `SubjectRelation`
+
+* le premier argument de `SubjectRelation` et `ObjectRelation` donne
+  respectivement le type d'entité objet /sujet de la relation. Cela
+  peut être : 
+
+  * une chaine de caractères correspondant à un type d'entité
+
+  * un tuple de chaines de caractères correspondant à plusieurs types d'entité
+
+  * les chaînes de caractères spéciales suivantes :
+
+    - "**" : tout les types d'entité
+    - "*" : tout les types d'entité non méta
+    - "@" : tout les types d'entité méta mais non "système" (i.e. servant à la
+      description du schema en base)
+
+* il est possible d'utiliser l'attribut possible `meta` pour marquer un type
+  d'entité comme étant "méta" (i.e. servant à décrire / classifier d'autre
+  entités) 
+
+* propriétés optionnelles des attributs et relations : 
+
+  - `description` : chaine de caractères décrivant un attribut ou une
+    relation. Par défaut cette chaine sera utilisée dans le formulaire de saisie
+    de l'entité, elle est donc destinée à aider l'utilisateur final et doit être
+    marquée par la fonction `_` pour être correctement internationalisée.
+
+  - `constraints` : liste de contraintes devant être respecté par la relation
+    (c.f. `Contraintes`_)
+
+  - `cardinality` : chaine de 2 caractères spécifiant la cardinalité de la
+    relation. Le premier caractère donne la cardinalité de la relation sur le
+    sujet, le 2eme sur l'objet. Quand une relation possède plusieurs sujets ou
+    objets possibles, la cardinalité s'applique sur l'ensemble et non un à un (et
+    doit donc à priori être cohérente...). Les valeurs possibles sont inspirées
+    des expressions régulières :
+
+    * `1`: 1..1
+    * `?`: 0..1
+    * `+`: 1..n
+    * `*`: 0..n
+
+  - `meta` : booléen indiquant que la relation est une méta relation (faux par
+    défaut)
+
+* propriétés optionnelles des attributs : 
+
+  - `required` : booléen indiquant si l'attribut est obligatoire (faux par
+    défaut)
+
+  - `unique` : booléen indiquant si la valeur de l'attribut doit être unique
+    parmi toutes les entités de ce type (faux par défaut)
+
+  - `indexed` : booléen indiquant si un index doit être créé dans la base de
+    données sur cette attribut (faux par défaut). C'est utile uniquement si vous
+    savez que vous allez faire de nombreuses recherche sur la valeur de cet
+    attribut. 
+
+  - `default` : valeur par défaut de l'attribut. A noter que dans le cas des
+    types date, les chaines de caractères correspondant aux mots-clés RQL
+    `TODAY` et `NOW` sont utilisables.
+
+  - `vocabulary` : spécifie statiquement les valeurs possibles d'un attribut
+
+* propriétés optionnelles des attributs de type `String` : 
+
+  - `fulltextindexed` : booléen indiquant si l'attribut participe à l'index plein
+    texte (faux par défaut) (*valable également sur le type `Byte`*)
+
+  - `internationalizable` : booléen indiquant si la valeur de cet attribut est
+    internationalisable (faux par défaut) 
+
+  - `maxsize` : entier donnant la taille maximum de la chaine (pas de limite par
+    défaut)  
+
+* propriétés optionnelles des relations : 
+
+  - `composite` : chaîne indiquant que le sujet (composite == 'subject') est
+    composé de ou des objets de la relation. Pour le cas opposé (l'objet est
+    composé de ou des sujets de la relation, il suffit de mettre 'object' comme
+    valeur. La composition implique que quand la relation est supprimé (et donc
+    aussi quand le composite est supprimé), le ou les composés le sont
+    également. 
+
+Contraintes
+```````````
+Par défaut les types de contraintes suivant sont disponibles :
+
+* `SizeConstraint` : permet de spécifier une taille minimale et/ou maximale sur
+  les chaines de caractères (cas générique de `maxsize`)
+
+* `BoundConstraint` : permet de spécifier une valeur minimale et/ou maximale sur
+  les types numériques
+
+* `UniqueConstraint` : identique à "unique=True"
+
+* `StaticVocabularyConstraint` : identique à "vocabulary=(...)"
+
+* `RQLConstraint` : permet de spécifier une requête RQL devant être satisfaite
+  par le sujet et/ou l'objet de la relation. Dans cette requête les variables `S`
+  et `O` sont préféfinies respectivement comme l'entité sujet et objet de la
+  relation
+
+* `RQLVocabularyConstraint` : similaire à la précédente, mais exprimant une
+  contrainte "faible", i.e. servant uniquement à limiter les valeurs apparaissant
+  dans la liste déroulantes du formulaire d'édition, mais n'empêchant pas une
+  autre entité d'être séléctionnée
+
+
+Définition d'un type de relation
+--------------------------------
+
+Un type de relation est définit par une classe python héritant de `RelationType`. Le
+nom de la classe correspond au nom du type. Ensuite le corps de la classe
+contient la description des propriétés de ce type de relation, ainsi
+qu'éventuellement une chaine pour le sujet et une autre pour l'objet permettant
+de créer des définitions de relations associées (auquel cas il est possibles de
+donner sur la classe les propriétés de définition de relation explicitées
+ci-dessus), par exemple ::
+
+  class verrouille_par(RelationType):
+    """relation sur toutes les entités applicatives indiquant que celles-ci sont vérouillées
+    inlined = True
+    cardinality = '?*'
+    subject = '*'
+    object = 'EUser'
+
+En plus des permissions, les propriétés propres aux types de relation (et donc
+partagés par toutes les définitions de relation de ce type) sont :
+
+* `inlined` : booléen contrôlant l'optimisation physique consistant à stocker la
+  relation dans la table de l'entité sujet au lieu de créer une table spécifique
+  à la relation. Cela se limite donc aux relations dont la cardinalité
+  sujet->relation->objet vaut 0..1 ('?') ou 1..1 ('1')
+
+* `symetric` : booléen indiquant que la relation est symétrique. i.e.
+  `X relation Y` implique `Y relation X`
+
+Dans le cas de définitions de relations simultanée, `sujet` et `object` peuvent
+tout deux valoir la même chose que décrite pour le 1er argument de
+`SubjectRelation` et `ObjectRelation`.
+
+A partir du moment où une relation n'est ni mise en ligne, ni symétrique, et
+ne nécessite pas de permissions particulières, sa définition (en utilisant
+`SubjectRelation` ou `ObjectRelation`) est suffisante.
+
+
+Définition des permissions
+--------------------------
+
+La définition des permissions se fait à l'aide de l'attribut `permissions` des
+types d'entité ou de relation. Celui-ci est un dictionnaire dont les clés sont
+les types d'accès (action), et les valeurs les groupes ou expressions autorisées. 
+
+Pour un type d'entité, les actions possibles sont `read`, `add`, `update` et
+`delete`.
+
+Pour un type de relation, les actions possibles sont `read`, `add`, et `delete`.
+
+Pour chaque type d'accès, un tuple indique le nom des groupes autorisés et/ou
+une ou plusieurs expressions RQL devant être vérifiées pour obtenir
+l'accès. L'accès est donné à partir du moment où l'utilisateur fait parti d'un
+des groupes requis ou dès qu'une expression RQL est vérifiée.
+
+Les groupes standards sont :
+
+* `guests`
+
+* `users`
+
+* `managers`
+
+* `owners` : groupe virtuel correspondant au propriétaire d'une entité. Celui-ci
+  ne peut être utilisé que pour les actions `update` et `delete` d'un type
+  d'entité. 
+
+Il est également possible d'utiliser des groupes spécifiques devant être pour
+cela créés dans le precreate de l'application (`migration/precreate.py`).
+
+Utilisation d'expression RQL sur les droits en écriture
+```````````````````````````````````````````````````````
+Il est possible de définir des expressions RQL donnant des droits de
+modification (`add`, `delete`, `update`) sur les types d'entité et de relation.
+
+Expression RQL pour les permissions sur un type d'entité :
+
+* il faut utiliser la classe `ERQLExpression`
+
+* l'expression utilisée correspond à la clause WHERE d'une requête RQL
+
+* dans cette expression, les variables X et U sont des références prédéfinies
+  respectivement sur l'entité courante (sur laquelle l'action est vérifiée) et
+  sur l'utilisateur ayant effectué la requête
+
+* il est possible d'utiliser dans cette expression les relations spéciales
+  "has_<ACTION>_permission" dont le sujet est l'utilisateur et l'objet une
+  variable quelquonque, signifiant ainsi que l'utilisateur doit avoir la
+  permission d'effectuer l'action <ACTION> sur la ou les entités liées cette
+  variable
+
+Pour les expressions RQL sur un type de relation, les principes sont les mêmes
+avec les différences suivantes :
+
+* il faut utiliser la classe `RRQLExpression` dans le cas d'une relation non
+  finale
+
+* dans cette expression, les variables S, O et U sont des références
+  prédéfinies respectivement sur le sujet et l'objet de la relation
+  courante (sur laquelle l'action est vérifiée) et sur l'utilisateur
+  ayant effectué la requête
+
+* On peut aussi définir des droits sur les attributs d'une entité (relation non
+  finale), sachant les points suivants :
+
+  - pour définir des expressions rql, il faut utiliser la classe `ERQLExpression`
+    dans laquelle X représentera l'entité auquel appartient l'attribut
+
+  - les permissions 'add' et 'delete' sont équivalentes. En pratique seul
+    'add'/'read' son pris en considération
+
+
+En plus de cela, le type d'entité `EPermission` de la librairie standard permet
+de construire des modèles de sécurités très complexes et dynamiques. Le schéma
+de ce type d'entité est le suivant : ::
+
+
+    class EPermission(MetaEntityType):
+	"""entity type that may be used to construct some advanced security configuration
+	"""
+	name = String(required=True, indexed=True, internationalizable=True, maxsize=100)
+	require_group = SubjectRelation('EGroup', cardinality='+*',
+					description=_('groups to which the permission is granted'))
+	require_state = SubjectRelation('State',
+				    description=_("entity'state in which the permission is applyable"))
+	# can be used on any entity
+	require_permission = ObjectRelation('**', cardinality='*1', composite='subject',
+					    description=_("link a permission to the entity. This "
+							  "permission should be used in the security "
+							  "definition of the entity's type to be useful."))
+
+
+Exemple de configuration extrait de *jpl* ::
+
+    ...
+
+    class Version(EntityType):
+	"""a version is defining the content of a particular project's release"""
+
+	permissions = {'read':   ('managers', 'users', 'guests',),
+		       'update': ('managers', 'logilab', 'owners',),
+		       'delete': ('managers', ),
+		       'add':    ('managers', 'logilab',
+				  ERQLExpression('X version_of PROJ, U in_group G,'
+						 'PROJ require_permission P, P name "add_version",'
+						 'P require_group G'),)}
+
+    ...
+
+    class version_of(RelationType):
+	"""link a version to its project. A version is necessarily linked to one and only one project.
+	"""
+	permissions = {'read':   ('managers', 'users', 'guests',),
+		       'delete': ('managers', ),
+		       'add':    ('managers', 'logilab',
+				  RRQLExpression('O require_permission P, P name "add_version",'
+						 'U in_group G, P require_group G'),)
+		       }
+	inlined = True
+
+Cette configuration suppose indique qu'une entité `EPermission` de nom
+"add_version" peut-être associée à un projet et donner le droit de créer des
+versions sur ce projet à des groupes spécifiques. Il est important de noter les
+points suivants :
+
+* dans ce cas il faut protéger à la fois le type d'entité "Version" et la
+  relation liant une version à un projet ("version_of")
+
+* du fait de la généricité du type d'entité `EPermission`, il faut effectuer
+  l'unification avec les groupes et / ou les états le cas échéant dans
+  l'expression ("U in_group G, P require_group G" dans l'exemple ci-dessus)
+
+
+Utilisation d'expression RQL sur les droits en lecture
+``````````````````````````````````````````````````````
+Les principes sont les mêmes mais avec les restrictions suivantes :
+
+* on ne peut de `RRQLExpression` sur les types de relation en lecture
+
+* les relations spéciales "has_<ACTION>_permission" ne sont pas utilisables
+
+
+Note sur l'utilisation d'expression RQL sur la permission 'add'
+```````````````````````````````````````````````````````````````
+L'utilisation d'expression RQL sur l'ajout d'entité ou de relation pose
+potentiellement un problème pour l'interface utilisateur car si l'expression
+utilise l'entité ou la relation à créer, on est pas capable de vérifier les
+droits avant d'avoir effectué l'ajout (noter que cela n'est pas un problème coté
+serveur rql car la vérification des droits est effectuée après l'ajout
+effectif). Dans ce cas les méthodes de vérification des droits (check_perm,
+has_perm) peuvent inidquer qu'un utilisateur n'a pas le droit d'ajout alors
+qu'il pourrait effectivement l'obtenir. Pour palier à ce soucis il est en général
+nécessaire dans tel cas d'utiliser une action reflétant les droits du schéma
+mais permettant de faire la vérification correctement afin qu'elle apparaisse
+bien le cas échéant.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_installation.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,82 @@
+.. -*- coding: utf-8 -*-
+
+============
+Installation
+============
+
+Installation de Cubicweb et de ses dépendances
+----------------------------------------------
+Tout le système Cubicweb est préparé pour l'installation sur une machine
+debian. L'installation manuelle est un peu pénible du fait des nombreuses
+dépendances à installer (twisted, postgres, autres paquets python...). Nous
+supposerons donc ici que l'installation se fait sur une machine debian ayant
+dans ses sources apt un entrepôt contenant les paquets pour Erudi.
+
+Pour tout installer sur le système ::
+
+  apt-get install cubicweb
+
+On peut également n'installer que les paquets erudi-server ou erudi-twisted pour
+n'avoir que la partie serveur ou client web sur une machine.
+
+Pour tout installer la documentation et les librairies/outils de développement ::
+
+  apt-get install cubicweb-documentation cubicweb-dev
+
+On pourra ensuite installer les paquets suivants :
+
+* `pyro` si vous voulez que l'entrepôt soit accessible via Pyro ou si le client
+  et le serveur ne sont pas sur la même machine (auquel cas il faut installer ce
+  paquet sur les machines clientes et serveur)
+
+* `python-ldap` si vous voulez utiliser une source ldap sur le serveur
+
+* `postgresql-8.1`, `postgresql-contrib-8.1` et `postgresql-plpython-8.1` la
+  machine devant héberger la base de données système
+
+Configuration de l'environnement
+--------------------------------
+Ajouter les lignes suivantes à son `.bashrc` ou `.bash_profile` pour configurer
+votre environnement de développement ::
+
+  export ERUDI_REGISTRY=~/etc/erudi.d/
+  export ERUDI_TEMPLATES=~/hg/
+  export ERUDI_RUNTIME=/tmp/
+
+Cela suppose que le composant erudi que vous développez est dans un
+sous-répertoire de *~/hg/* et que vous avez créé le répertoire *~/etc/erudi.d/*
+pour que `cubicweb-ctl` y place vos instances de test.
+
+
+Configuration Postgres
+----------------------
+* création d'un super utilisateur pour la création d'instance (**root**) ::
+
+    createuser --superuser --createdb -P pgadmin
+
+  Un mot de passe de connection pour cet utilisateur vous sera demandé. Il
+  faudra utiliser ce login / mot de passe à la création d'instance via
+  `cubicweb-ctl`
+
+* installation des extensions pour l'index plein texte ::
+
+    cat /usr/share/postgresql/8.1/contrib/tsearch2.sql | psql -U pgadmin template1
+
+* installation du langage plpythonu par défaut ::
+
+    createlang -U pgadmin plpythonu template1
+
+
+Configuration Pyro
+------------------
+Si vous utilisez Pyro, il est nécessaire d'avoir un serveur de noms Pyro
+tournant sur votre réseau (par défaut celui-ci est repéré par une requête
+broadcast). Pour cela il faut soit :
+
+* le lancer à la main avant le démarrage de erudi avec la commande `pyro-ns`
+
+* le lancer à la main avant le démarrage de erudi sous forme d'un serveur avec
+  la commande `pyro-nsd start`
+
+* éditer le fichier */etc/default/pyro-nsd* pour que le serveur de nom pyro soit
+  lancé automatiquement au démarrage de la machine
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_mercurial.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,112 @@
+.. -*- coding: utf-8 -*-
+
+Présentation de Mercurial
+-------------------------
+
+Introduction
+````````````
+Mercurial_ gère un ensemble distribué d'entrepôts contenant des arbres de
+révisions (chaque révision indique les changements à effectuer pour obtenir la
+version suivante, et ainsi de suite). Localement, on dispose d'un entrepôt
+contenant un arbre de révisions, et d'un répertoire de travail. Il est possible
+de mettre dans son répertoire de travail, une des versions issue de son entrepôt
+local, de la modifier puis de la verser dans son entrepôt. Il est également
+possible de récuprer dans son entrepôt local des révisions venant d'un autre
+entrepôt, ou d'exporter ses propres révisions depuis son entrepôt local vers un
+autre entrepôt.
+
+A noter que contrairement à CVS/Subversion, on crée généralement un entrepôt par
+projet à gérer.
+
+Lors d'un développement collaboratif, on crée généralement un entrepôt central
+accessible à tout les développeurs du projet. Ces entrepôts centraux servent de
+référence. Selon ses besoins, chacun peut ensuite disposer d'un entrepôt local,
+qu'il faudra penser à synchroniser avec l'entrepôt central de temps à autre. 
+
+
+Principales commandes
+`````````````````````
+* Créer un entrepôt local ::
+
+    hg clone ssh://orion//home/src/prive/rep
+
+* Voir le contenu de l'entrepôt local (outil graphique en Tk) ::
+
+    hg view
+
+* Ajouter un sous-répertoire ou un fichier dans le répertoire courant ::
+
+    hg add rep
+
+* Placer dans son répertoire de travail une révision spécifique (ou la dernière
+  revision) issue de l'entrepôt local ::
+
+    hg update [identifiant-revision]
+    hg up [identifiant-revision]
+
+* Récupérer dans son entrepôt local, l'arbre de révisions contenu dans un
+  entrepôt distant (cette opération ne modifie pas le répertoire local) ::
+
+    hg pull ssh://orion//home/src/prive/rep
+    hg pull -u ssh://orion//home/src/prive/rep # équivalent à pull + update
+
+* Voir quelles sont les têtes de branches de l'entrepôt local si un `pull` a
+  tiré une nouvelle branche ::
+
+    hg heads
+
+* Verser le répertoire de travail dans l'entrepôt local (et créer une nouvelle
+  révision) ::
+
+    hg commit
+    hg ci
+
+* Fusionner, avec la révision mère du répertoire local, une autre révision issue
+  de l'entrepôt local (la nouvelle révision qui en résultera aura alors deux
+  révisions mères) ::
+
+    hg merge identifiant-revision
+
+* Exporter dans un entrepôt distant, l'arbre de révisions contenu dans son
+  entrepôt local (cette opération ne modifie pas le répertoire local) ::
+
+    hg push ssh://orion//home/src/prive/rep
+
+* Voir quelle sont les révisions locales non présentes dans un autre entrepôt ::
+
+    hg outgoing ssh://orion//home/src/prive/rep
+
+* Voir quelle sont les révisions d'un autre entrepôt non présentes localement ::
+
+    hg incoming ssh://orion//home/src/prive/rep
+
+* Voir quelle est la révision issue de l'entrepôt local qui a été sortie dans le
+  répertoire de travail et modifiée ::
+
+    hg parent
+
+* Voir les différences entre le répertoire de travail et la révision mère de
+  l'entrepôt local, éventuellement permettant de les verser dans l'entrepôt
+  local ::
+
+    hg diff
+    hg commit-tool
+    hg ct
+
+
+Bonnes pratiques
+````````````````
+* penser à faire un `hg pull -u` régulièrement et particulièrement avant de
+  faire un `hg commit`
+
+* penser à faire un `hg push` lorsque votre entrepôt contient une version
+  relativement stable de vos modifications
+
+* si un `hg pull -u` a créé une nouvelle tête de branche :
+
+  1. identifier l'identifiant de celle-ci avec `hg head`
+  2. fusionner avec `hg merge`
+  3. `hg ci`
+  4. `hg push`
+
+.. _Mercurial: http://www.selenic.com/mercurial/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_stdlib_schemas.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,70 @@
+.. -*- coding: utf-8 -*-
+
+Schémas prédéfinies dans la librairie
+-------------------------------------
+
+La librairie définit un certain nombre de schémas d'entités nécessaires
+au système ou bien couramment utilisées dans les application `cubicweb`.
+Vous pouvez bien entendu étendre ces schémas au besoin.
+
+
+Schémas "systèmes"
+``````````````````
+
+* `EUser`, utilisateurs du système
+* `EGroup`, groupes d'utilisateurs
+* `EEType`, types d'entité
+* `ERType`, types de relation
+
+* `State`, état d'un workflow
+* `Transition`, transition d'un workflow
+* `TrInfo`, enregistrement d'un passage de transition pour une entité
+
+* `EmailAddress`, adresse électronique, utilisé par le système de notification
+  pour les utilisateurs et par d'autres schéma optionnels
+
+* `EProperty`, utilisé pour configurer l'application
+* `EPermission`, utilisé pour configurer la sécurité de l'application
+
+* `Card`, fiche documentaire générique
+* `Bookmark`, un type d'entité utilisé pour permetter à un utilisateur de
+  personnaliser ses liens de navigation dans l'application.
+
+
+Composants de la librairie
+``````````````````````````
+Une application est construite sur la base de plusieurs composants de base.
+Parmi les composants de base disponible, on trouve par exemple :
+
+* `ecomment`, fournit le type d'entité `Comment` permettant de commenter les
+  entités du site
+  
+* `emailinglist`, fournit le type d'entité `Mailinglist` regroupant des
+  informations sur une liste de discussion
+
+* `efile`, fournit les types d'entités `File` et `Image` utilisés pour
+  représenter des fichiers (texte ou binaire) avec quelques données
+  supplémentaires comme le type MIME ou l'encodage le cas échéant ().
+  
+* `elink`, fournit le type d'entité lien internet (`Link`)
+
+* `eblog`, fournit le type d'entité weblog (`Blog`)
+
+* `eperson`, fournit le type d'entité personne physique (`Person`)
+
+* `eaddressbook`, fournit les types d'entités utilisés pour représenter des n°
+  de téléphone (`PhoneNumber`) et des adresses postales (`PostalAddress`)
+  
+* `eclasstags`, système de classfication à base d'étiquettes (`Tag`)
+
+* `eclassfolders`, système de classification à base de dossiers hiérarchiques
+  destinés à créer des rubriques de navigation (`Folder`)
+
+* `eemail`, gestion d'archives de courriers électroniques (`Email`, `Emailpart`,
+  `Emailthread`)
+
+* `ebasket`, gestion de paniers (`Basket`) permettant de regrouper des entités
+
+Pour déclarer l'utilisation d'un composant, une fois celui-ci installé, ajoutez
+le nom du composant à la variable `__use__` du fichier `__pkginfo__.py` de
+votre propre composant.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/devmanual_fr/sect_stdlib_vues.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,65 @@
+.. -*- coding: utf-8 -*-
+
+Vues prédéfinies dans la librairie
+----------------------------------
+Un certain nombre de vues sont utilisées pour construire l'interface web, qui
+s'appliquent à une ou plusieurs entités. On les distingue par leur identifiant,
+et les principales sont : 
+
+:primary:
+    vue principale pour une entité, elle est appelée par défaut lorsqu'il n'y a
+    qu'un seul élément correspondant à la recherche. Cette vue est censée
+    afficher le maximum d'informations à propos de l'objet.
+:secondary:
+    vue secondaire d'une entité. Par défaut, Elle affiche les deux premiers
+    attributs de l'entité sous la forme d'un lien cliquable amenant sur la vue
+    primaire.
+:oneline:
+    similaire à la vue `secondary`, mais appelée dans des cas où l'on désire que
+    la vue tient sur une ligne, ou de manière générale juste avoir une vue plus
+    abbrégée. Par défaut, cette vue utilise le paramètre de configuration
+    `MAX_LINE_CHAR` pour contrôler la taille du résultat.
+:text:
+    similaire à la vue `oneline`, mais ne devant pas contenir de html.
+:incontext, outofcontext:
+    similaire à la vue `secondary`, mais appelé si l'entité est considérée comme
+    en dehors ou dans son contexte. Par défault renvoie respectivement le
+    résultat de `textincontext` et `textoutofcontext` entouré par un lien
+    permettant d'accéder à la vue primaire de l'entité
+:textincontext, textoutofcontext:
+    similaire à la vue `text`, mais appelé si l'entité est considérée comme
+    en dehors ou dans son contexte. Par défault renvoie respectivement le
+    résultat des méthodes `.dc_title` et `.dc_long_title` de l'entité
+:list:
+    crée une liste html (<ul>) et appelle la vue `listitem` pour chaque entité
+:listitem:
+    redirige par défaut vers la vue `outofcontext`
+:rss:
+    crée unvue RSS/XML et appelle la vue `rssitem` pour chaque entité
+:rssitem:
+    crée unvue RSS/XML pour une entité à partir des résultats renvoyés par les
+    méthodes dublin core de l'objet (`dc_*`)
+
+Vues de départ :
+
+:index:
+    page d'acceuil
+:schema:
+    affiche le schéma de l'application
+
+Vues particulières :
+
+:noresult:
+    appelé si le result set est vide
+:finall:
+    affiche la valeur de la cellule sans transformation (dans le cas d'une
+    entité non finale, on voit son eid). Appelable sur n'importe quel result
+    set.
+:table:
+    crée une table html (<table>) et appelle la vue `cell` pour chaque cellule
+    du résultat. Appelable sur n'importe quel result set.
+:cell:
+    par défaut redirige sur la vue `final` si c'est une entité finale
+    ou sur la vue `outofcontext` sinon
+:null:
+    vue toujours appelable et ne retournant rien
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/faq.fr.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,8 @@
+.. -*- coding: utf-8 -*-
+
+Frequently Asked Questions
+==========================
+
+[FILL ME]
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/howto.fr.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,32 @@
+.. -*- coding: utf-8 -*-
+
+HOW TO
+======
+
+* Comment mettre à jour une base de données après avoir modifié le schéma?
+  
+  Cela dépend de ce qui a été modifié dans le schéma. 
+  
+  * Modification d'une relation non finale
+
+  * Modification d'une relation finale 
+  [TO COMPLETE]
+
+* Comment créer un utilisateur anonyme?
+  
+  Dans le fichier ``all-in-one.conf`` de votre instance, définir l'utilisateur
+  anonyme en initilisant les valeurs des variables suivantes ::
+  
+    # login of the Erudi user account to use for anonymous user (if you want to
+    # allow anonymous)
+    anonymous-user=anon
+
+    # password of the Erudi user account matching login
+    anonymous-password=anon
+
+  Vous devez aussi vous assurer que cet utilisateur `anon` existe dans la base
+  de données, le plus simple étant de s'identifier sur votre application en
+  administrateur et de rajouter l'utilisateur `anon` via l'interface d'administration.
+
+* 
+
Binary file doc/html-build/.doctrees/devmanual_fr/advanced_notes.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_autres_composants_ui.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_bases_framework_cubicweb.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_configuration_instance.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_definition_schema.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_definition_workflows.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_fondements_cubicweb.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_i18n.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_manipulation_donnees.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_migration.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_mise_en_place_environnement.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_rql.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_serveur_crochets.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_serveur_notification.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_tests.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_ui_gestion_formulaire.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_ui_js_json.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/chap_visualisation_donnees.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/index.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_cubicweb-ctl.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_definition_entites.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_definition_schema.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_installation.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_mercurial.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_stdlib_schemas.doctree has changed
Binary file doc/html-build/.doctrees/devmanual_fr/sect_stdlib_vues.doctree has changed
Binary file doc/html-build/.doctrees/environment.pickle has changed
Binary file doc/html-build/.doctrees/index.doctree has changed
Binary file doc/html-build/.doctrees/plan_formation_python_cubicweb.doctree has changed
Binary file doc/html-build/.doctrees/querier.doctree has changed
Binary file doc/html-build/.doctrees/securite.doctree has changed
Binary file doc/html-build/.doctrees/source/index.doctree has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/html-build/source/index.html	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,108 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    <title>Welcome to Cubicweb&#8217;s documentation! &mdash; Cubicweb v2 documentation</title>
+    <link rel="stylesheet" href="../_static/sphinx-default.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+          URL_ROOT:    '../',
+          VERSION:     '2',
+          COLLAPSE_MODINDEX: false,
+          FILE_SUFFIX: ''
+      };
+    </script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/interface.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <link rel="contents" title="Global table of contents" href="../contents.html" />
+    <link rel="index" title="Global index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="top" title="Cubicweb v2 documentation" href="../index.html" />
+  </head>
+  <body>
+
+
+<div class="logilablogo">
+	<a class="logogo" href="http://www.logilab.org"><img border="0" src="../_static/logilab.png"/></a>
+  </div>
+
+    <div class="related">
+      <h3>Navigation</h3>
+      <ul>
+        <li class="right" style="margin-right: 10px">
+          <a href="../genindex.html" title="General Index"
+             accesskey="I">index</a></li>
+        <li class="right" >
+          <a href="../modindex.html" title="Global Module Index"
+             accesskey="M">modules</a> |</li>
+        <li><a href="../index.html">Cubicweb v2 documentation</a> &raquo;</li>
+      </ul>
+    </div>
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  
+  <div class="section" id="welcome-to-cubicweb-s-documentation">
+<h1 id="welcome-to-cubicweb-s-documentation">Welcome to Cubicweb&#8217;s documentation!<a class="headerlink" href="#welcome-to-cubicweb-s-documentation" title="Permalink to this headline">¶</a></h1>
+<p>Contents:</p>
+</div>
+<div class="section" id="indices-and-tables">
+<h1 id="indices-and-tables">Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline">¶</a></h1>
+<ul class="simple">
+<li><a class="reference external" href="../genindex.html"><em>Index</em></a></li>
+<li><a class="reference external" href="../modindex.html"><em>Module Index</em></a></li>
+<li><a class="reference external" href="../search.html"><em>Search Page</em></a></li>
+</ul>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="sphinxsidebar">
+        <div class="sphinxsidebarwrapper">
+            <h3>Table Of Contents</h3>
+            <ul>
+<li><a class="reference external" href="">Welcome to Cubicweb&#8217;s documentation!</a><ul>
+</ul>
+</li>
+<li><a class="reference external" href="#indices-and-tables">Indices and tables</a></li>
+</ul>
+
+            <h3>This Page</h3>
+            <ul class="this-page-menu">
+              <li><a href="../_sources/source/index.txt">Show Source</a></li>
+            </ul>
+            <h3>Quick search</h3>
+            <form class="search" action="../search.html" method="get">
+              <input type="text" name="q" size="18" /> <input type="submit" value="Go" />
+              <input type="hidden" name="check_keywords" value="yes" />
+              <input type="hidden" name="area" value="default" />
+            </form>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="related">
+      <h3>Navigation</h3>
+      <ul>
+        <li class="right" style="margin-right: 10px">
+          <a href="../genindex.html" title="General Index"
+             accesskey="I">index</a></li>
+        <li class="right" >
+          <a href="../modindex.html" title="Global Module Index"
+             accesskey="M">modules</a> |</li>
+        <li><a href="../index.html">Cubicweb v2 documentation</a> &raquo;</li>
+      </ul>
+    </div>
+    <div class="footer">
+      &copy; Copyright 2008, Logilab Inc..
+      Last updated on Oct 31, 2008.
+      Created using <a href="http://sphinx.pocoo.org/">Sphinx</a>.
+    </div>
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/index-content.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+.. -*- coding: utf-8 -*-
+
+.. _contents:
+
+Utilisateur
+===========
+
+Description du modèle de sécurité
+---------------------------------
+
+.. toctree::
+   :maxdepth: 1
+   
+   securite.txt
+
+
+
+Développeur
+===========
+
+Développement d'applications CubicWeb
+-------------------------------------
+
+.. toctree::
+   :maxdepth: 1
+
+   devmanual_fr/index.txt
+   devmanual_fr/gae.txt
+
+
+
+Détails d'implémentations
+=========================
+
+.. toctree::
+   :maxdepth: 1
+
+   cubicweb-uml.txt
+
+Détail sur l'éxécution d'une requête complexe en multi-sources
+--------------------------------------------------------------
+
+.. toctree::
+   :maxdepth: 1
+   
+   querier.txt
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/index.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,58 @@
+.. -*- coding: utf-8 -*-
+
+.. _contents:
+
+=======================================
+Index de la documentation pour CubicWeb
+=======================================
+:authors: Nicolas Chauvat, Sylvain Thénault, Adrien Di Mascio
+:date: 2008-07-12
+:version: 3.0.0 
+:organisation: Logilab
+:copyright: © 2008 Logilab
+:contact: contact@logilab.fr
+
+
+Documentation générale de Cubicweb
+==================================
+
+.. toctree::
+   :maxdepth: 3
+
+   index-content.txt
+
+Tutoriel : créer votre première application web
+===============================================
+
+.. toctree::
+   :maxdepth: 1
+
+   tutmanual_fr/tut-create-app.fr.txt
+
+FAQ
+===
+.. toctree::
+   :maxdepth: 1
+
+   faq.fr.txt
+
+
+HOW TO
+======
+.. toctree::
+   :maxdepth: 1
+
+   howto.fr.txt
+
+Références API
+==============
+
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/makefile	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,22 @@
+MKHTML=mkdoc
+MKHTMLOPTS=--doctype article --target html --stylesheet standard 
+SRC=.
+
+TXTFILES:= $(wildcard *.txt)
+TARGET := $(TXTFILES:.txt=.html)
+
+all: ${TARGET} devmanual apidoc
+
+%.html: %.txt
+	${MKHTML} ${MKHTMLOPTS} $<
+
+devmanual:
+	cd devmanual_fr && make	
+#apydoc: 
+#	epydoc --html -o epydoc/ -n ../server/*.py ../core/*.py ../common/*.py ../server/*/*.py ../modpython/*/*.py ../common/*/*.py
+apidoc:
+	epydoc --html -o apidoc -n "cubicweb" --exclude=setup --exclude=__pkginfo__ ../
+clean:
+	rm -rf apidoc/
+	rm -f *.html
+	cd devmanual_fr && make	clean
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/plan_formation_python_cubicweb.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,99 @@
+.. -*- coding: utf-8 -*-
+
+Formation Python-CubicWeb 5 jours
+==============================
+
+Bases Python
+------------
+
+Syntaxe de base
+~~~~~~~~~~~~~~~
+:durée: 1j
+
+Modèle objet
+~~~~~~~~~~~~
+:durée: 0.5j
+
+Traitement de chaînes
+~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.3j
+  Formattage de chaînes
+  Unicode
+
+Entrées/Sorties
+~~~~~~~~~~~~~~~
+:durée: 0.2j
+  Fichiers
+  `StringIO`
+
+Structures avancées
+~~~~~~~~~~~~~~~~~~~
+:durée: 0.5j
+  `object`, `super`
+  iterateurs
+  générateurs
+  list comprehension / generator expression
+  descripteurs, properties (`classmethod`, `property`)
+  décorateurs
+
+
+Développement CubicWeb
+-------------------
+
+Mise en place d'un environnement de développement CubicWeb
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.5j
+  Introduction mercurial
+  Installation de cubicweb et de ses dépendances (debian !)
+  Postgres
+  Notion de template et d'instance
+  L'outil `cubicweb-ctl`
+  Création du modèle
+  Création d'une instance de développement
+
+Fondements CubicWeb
+~~~~~~~~~~~~~~~~
+:durée: 0.1j
+  Vocabulaire
+  Le langage RQL
+  Client/Serveur
+
+Définition du modèle de données
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.4j
+  Définition d'entité
+  Définition de relation
+  Persistence du schéma
+  Migration de schéma
+
+Fondements du framework CubicWeb
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.1j
+  La classe `appobject`
+  La base de registres
+  Chargement dynamique des classes
+  
+Manipulation des données stockées
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.4j
+  Les classes `Entity` et `AnyEntity`
+  Paramétrages et extensions spécifiques
+  Écriture de tests unitaires
+
+Définition de vues
+~~~~~~~~~~~~~~~~~~
+:durée: 0.5j
+  Les classes de base des vues
+  Les vues prédéfinies dans la librairie
+  Les patrons
+  Vues binaires ou autre
+  Écriture de tests unitaires
+  Tests automatiques
+
+Autres composants de l'interface web
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+:durée: 0.5j
+  Actions
+  Component, VComponent
+  Forms, Controller
+  Eproperty
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/querier.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,62 @@
+.. -*- coding: utf-8 -*-
+
+Déroulement de l'éxecution d'une requête en multi-source avec insertion de sécurité
+===================================================================================
+
+
+* 3 sources (system, ldap (Euser) et rql (Card)
+* permission en lecture Card is elle appartient à l'utilisateur
+
+Soit la requête de départ: ::
+
+  Any X,T WHERE X owned_by U, U login "syt", X title T
+
+1. récupération arbre de syntaxe et solution (+cache) ::
+
+     -> {X: Card, U: Euser}, {X: Blog, U: Euser}, {X: Bookmark, U: Euser}
+
+2. insertion sécurité ::
+
+     -> Any X,T WHERE X owned_by U, U login "syt", X title T, EXISTS(X owned_by UEID) / {X: Card, U: Euser}
+        Any X,T WHERE X owned_by U, U login "syt", X title T / {X: Blog, U: Euser}, {X: Bookmark, U: Euser}
+   
+3. construction plan
+   0. preprocessing (annotation des arbres de syntaxe)
+   
+   1. Any U WHERE U login "syt" / {U: Euser}
+      [system+ldap] => table1/varmap1{U:U2}
+      
+   2. Any X,T WHERE X owned_by U2, X title T / {X: Blog, U: Euser}, {X: Bookmark, U: Euser}
+      [varmap1|system] => TABLE2
+      
+   3 Deux alernatives:
+   
+     1. Any X,T WHERE X is Card, X title T {X: Card} ::
+
+          [system+rql] => table3/varmap3{X:X3, T:T3}
+	   
+        Any X3,T3 WHERE X3 owned_by U2, X3 title T3, EXISTS(X owned_by UEID) / {X3: Card, U2: Euser} ::
+
+          [(varmap1, varmap3)|system] => TABLE2
+       
+     2 Any X WHERE X is Card X owned_by U2, EXISTS(X owned_by UEID) / {X: Card, U2: Euser} ::
+
+          [varmap1|system] => EIDS
+	   
+       Any X,T WHERE X title T, X eid IN(EIDS) {X: Card} ::
+	 
+          [system+rql] => TABLE2
+   
+   4. renvoie contenu TABLE2.
+      Note : si aggrégat / tri / distinct TABLE2 est nécessairement une table temporaire et besoin d'une
+      étape AggrStep supplémentaire
+      
+4. éxécution du plan
+
+5. [construction description]
+
+6. renvoie ResultSet
+
+Notes sur UNION
+===============
+* en multi-sources, les résultats des unions peuvent être mélangés
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/securite.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,43 @@
+Le contrôle d'accès
+===================
+
+
+Vocabulaire
+-----------
+* Personne, Societe définissent deux *types* d'entité 
+* "Personne travaille_pour Societé" déclare qu'une relation
+  travaille_pour peut exister entre une entité de type Personne et une
+  entité de type Societe. L'ensemble des règles de ce type appliqué
+  à la relation "travaille_pour" définit le schéma de la relation
+  "travaille_pour"
+
+
+Description du modèle de sécurité
+---------------------------------
+
+Le modèle de sécurité de cubicweb est un modèle fondé sur des `Access
+Control List`. Les notions sont les suivantes :
+
+* utilisateurs et groupes d'utilisateurs
+* un utilisateur appartient à au moins un groupe
+* droits (lire, modifier, créer, supprimer) 
+* les droits sont attribués aux groupes (et non aux utilisateurs)
+
+Pour CubicWeb plus spécifiquement :
+
+* on associe les droits au niveau des schemas d'entites / relations
+* pour chaque type d'entité, on distingue les droits de lecture,
+  ajout, modification et suppression
+* pour chaque type de relation, on distingue les droits de lecture,
+  ajout et suppression (on ne peut pas modifer une relation)
+* les groupes de base sont : Administrateurs, Utilisateurs, Invités
+* les utilisateurs font par défaut parti du groupe Utilisateurs
+* on a un groupe virtuel "Utilisateurs Propriétaires", auquel on peut
+  associer uniquement les droits de suppression et de modification
+* on ne peut pas mettre d'utilisateurs dans ce groupe, ils y sont
+  ajoutés implicitement dans le contexte des objets dont ils sont
+  propriétaires 
+* les droits de ce groupe ne sont vérifiés que sur
+  modification / suppression si tous les autres groupes auxquels
+  l'utilisateur appartient se sont vu interdir l'accès
+
Binary file doc/tutmanual_fr/images/lax-book.00-login.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.01-start.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.02-cookie-values.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.02-create-blog.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.03-list-one-blog.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.03-site-config-panel.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.03-state-submitted.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.03-transitions-view.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.04-detail-one-blog.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.05-list-two-blog.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.06-add-relation-entryof.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.06-header-no-login.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.06-main-template-layout.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.06-main-template-logo.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.06-simple-main-template.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.07-detail-one-blogentry.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.08-schema.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.09-new-view-blogentry.en.png has changed
Binary file doc/tutmanual_fr/images/lax-book.10-blog-with-two-entries.en.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutmanual_fr/tut-create-app.en.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,386 @@
+.. -*- coding: utf-8 -*-
+
+
+Tutoriel : créer votre première application web pour Google AppEngine
+=====================================================================
+
+[TRANSLATE ME TO FRENCH]
+
+This tutorial will guide you step by step to build a blog application 
+and discover the unique features of `LAX`. It assumes that you followed
+the :ref:`installation` guidelines and that both the `AppEngine SDK` and the
+`LAX` framework are setup on your computer.
+
+Creating a new application
+--------------------------
+
+We choosed in this tutorial to develop a blog as an example of web application
+and will go through each required steps/actions to have it running with `LAX`.
+When you installed `LAX`, you saw a directory named ``skel``. Make a copy of
+this directory and call it ``BlogDemo``.
+
+The location of this directory does not matter. But once decided, make sure your ``PYTHONPATH`` is properly set (:ref:`installation`).
+
+
+Defining a schema
+-----------------
+
+With `LAX`, the schema/datamodel is the core of the application. This is where
+you will define the type of content you have to hanlde in your application.
+
+Let us start with something simple and improve on it iteratively. 
+
+In schema.py, we define two entities : ``Blog`` and ``BlogEntry``.
+
+::
+
+  class Blog(EntityType):
+      title = String(maxsize=50, required=True)
+      description = String()
+
+  class BlogEntry(EntityType):
+      title = String(maxsize=100, required=True)
+      publish_date = Date(default='TODAY')
+      text = String(fulltextindexed=True)
+      category = String(vocabulary=('important','business'))
+      entry_of = SubjectRelation('Blog', cardinality='?*')
+
+A Blog has a title and a description. The title is a string that is
+required by the class EntityType and must be less than 50 characters. 
+The description is a string that is not constrained.
+
+A BlogEntry has a title, a publish_date and a text. The title is a
+string that is required and must be less than 100 characters. The
+publish_date is a Date with a default value of TODAY, meaning that
+when a BlogEntry is created, its publish_date will be the current day
+unless it is modified. The text is a string that will be indexed in
+the full-text index and has no constraint.
+
+A BlogEntry also has a relationship ``entry_of`` that link it to a
+Blog. The cardinality ``?*`` means that a BlogEntry can be part of
+zero or one Blog (``?`` means `zero or one`) and that a Blog can
+have any number of BlogEntry (``*`` means `any number including
+zero`). For completeness, remember that ``+`` means `one or more`.
+
+Running the application
+-----------------------
+
+Defining this simple schema is enough to get us started. Make sure you
+followed the setup steps described in detail in the installation
+chapter (especially visiting http://localhost:8080/_load as an
+administrator), then launch the application with the command::
+
+   python dev_appserver.py BlogDemo
+
+and point your browser at http://localhost:8080/ (if it is easier for
+you, use the on-line demo at http://lax.appspot.com/).
+
+.. image:: images/lax-book.00-login.en.png
+   :alt: login screen
+
+After you log in, you will see the home page of your application. It
+lists the entity types: Blog and BlogEntry. If these links read
+``blog_plural`` and ``blogentry_plural`` it is because
+internationalization (i18n) is not working for you yet. Please ignore
+this for now.
+
+.. image:: images/lax-book.01-start.en.png
+   :alt: home page
+
+Creating system entities
+------------------------
+You can only create new users if you decided not to use google authentication.
+
+
+[WRITE ME : create users manages permissions etc]
+
+
+
+Creating application entites
+----------------------------
+
+Create a Blog
+~~~~~~~~~~~~~
+
+Let us create a few of these entities. Click on the [+] at the right
+of the link Blog.  Call this new Blog ``Tech-blog`` and type in
+``everything about technology`` as the description, then validate the
+form by clicking on ``Validate``.
+
+.. image:: images/lax-book.02-create-blog.en.png
+   :alt: from to create blog
+
+Click on the logo at top left to get back to the home page, then
+follow the Blog link that will list for you all the existing Blog.
+You should be seeing a list with a single item ``Tech-blog`` you
+just created.
+
+.. image:: images/lax-book.03-list-one-blog.en.png
+   :alt: displaying a list of a single blog
+
+Clicking on this item will get you to its detailed description except
+that in this case, there is not much to display besides the name and
+the phrase ``everything about technology``.
+
+.. image:: images/lax-book.04-detail-one-blog.en.png
+   :alt: displaying the detailed view of a blog
+
+Now get back to the home page by clicking on the top-left logo, then
+create a new Blog called ``MyLife`` and get back to the home page
+again to follow the Blog link for the second time. The list now
+has two items.
+
+.. image:: images/lax-book.05-list-two-blog.en.png
+   :alt: displaying a list of two blogs
+
+
+Create a BlogEntry
+~~~~~~~~~~~~~~~~~~
+
+Get back to the home page and click on [+] at the right of the link
+BlogEntry. Call this new entry ``Hello World`` and type in some text
+before clicking on ``Validate``. You added a new blog entry without
+saying to what blog it belongs. There is a box on the left entitled
+``actions``, click on the menu item ``modify``. You are back to the form
+to edit the blog entry you just created, except that the form now has
+another section with a combobox titled ``add relation``. Chose
+``entry_of`` in this menu and a second combobox appears where you pick
+``MyLife``. 
+
+You could also have, at the time you started to fill the form for a
+new entity BlogEntry, hit ``Apply`` instead of ``Validate`` and the 
+combobox titled ``add relation`` would have showed up.
+
+.. image:: images/lax-book.06-add-relation-entryof.en.png
+   :alt: editing a blog entry to add a relation to a blog
+
+Validate the changes by clicking ``Validate``. The entity BlogEntry
+that is displayed now includes a link to the entity Blog named
+``MyLife``.
+
+.. image:: images/lax-book.07-detail-one-blogentry.en.png
+   :alt: displaying the detailed view of a blogentry
+
+Remember that all of this was handled by the framework and that the
+only input that was provided so far is the schema. To get a graphical
+view of the schema, run the ``laxctl genschema BlogDemo`` command as
+explained in the installation section and point your browser to the
+URL http://localhost:8080/schema
+
+.. image:: images/lax-book.08-schema.en.png
+   :alt: graphical view of the schema (aka data-model)
+
+Site configuration
+------------------
+
+.. image:: images/lax-book.03-site-config-panel.en.png
+
+This panel allows you to configure the appearance of your application site.
+Six menus are available and we will go through each of them to explain how
+to use them.
+
+Navigation
+~~~~~~~~~~
+This menu provides you a way to adjust some navigation options depending on
+your needs, such as the number of entities to display by page of results.
+Follows the detailled list of available options :
+  
+* navigation.combobox-limit : maximum number of entities to display in related
+  combo box (sample format: 23)
+* navigation.page-size : maximum number of objects displayed by page of results 
+  (sample format: 23)
+* navigation.related-limit : maximum number of related entities to display in 
+  the primary view (sample format: 23)
+* navigation.short-line-size : maximum number of characters in short description
+  (sample format: 23)
+
+UI
+~~
+This menu provides you a way to customize the user interface settings such as
+date format or encoding in the produced html.
+Follows the detailled list of available options :
+
+* ui.date-format : how to format date in the ui ("man strftime" for format description)
+* ui.datetime-format : how to format date and time in the ui ("man strftime" for format
+  description)
+* ui.default-text-format : default text format for rich text fields.
+* ui.encoding : user interface encoding
+* ui.fckeditor :should html fields being edited using fckeditor (a HTML WYSIWYG editor).
+  You should also select text/html as default text format to actually get fckeditor.
+* ui.float-format : how to format float numbers in the ui
+* ui.language : language of the user interface
+* ui.main-template : id of main template used to render pages
+* ui.site-title	: site title, which is displayed right next to the logo in the header
+* ui.time-format : how to format time in the ui ("man strftime" for format description)
+
+
+Actions
+~~~~~~~
+This menu provides a way to configure the context in which you expect the actions
+to be displayed to the user and if you want the action to be visible or not. 
+You must have notice that when you view a list of entities, an action box is 
+available on the left column which display some actions as well as a drop-down 
+menu for more actions. 
+
+The context available are :
+
+* mainactions : actions listed in the left box
+* moreactions : actions listed in the `more` menu of the left box
+* addrelated : add actions listed in the left box
+* useractions : actions listed in the first section of drop-down menu 
+  accessible from the right corner user login link
+* siteactions : actions listed in the second section of drop-down menu
+  accessible from the right corner user login link
+* hidden : select this to hide the specific action
+
+Boxes
+~~~~~
+The application has already a pre-defined set of boxes you can use right away. 
+This configuration section allows you to place those boxes where you want in the
+application interface to customize it. 
+
+The available boxes are :
+
+* actions box : box listing the applicable actions on the displayed data
+
+* boxes_blog_archives_box : box listing the blog archives 
+
+* possible views box : box listing the possible views for the displayed data
+
+* rss box : RSS icon to get displayed data as a RSS thread
+
+* search box : search box
+
+* startup views box : box listing the configuration options available for 
+  the application site, such as `Preferences` and `Site Configuration`
+
+Components
+~~~~~~~~~~
+[WRITE ME]
+
+Contextual components
+~~~~~~~~~~~~~~~~~~~~~
+[WRITE ME]
+
+Set-up a workflow
+-----------------
+
+Before starting, make sure you refresh your mind by reading [link to
+definition_workflow chapter].
+
+We want to create a workflow to control the quality of the BlogEntry 
+submitted on your application. When a BlogEntry is created by a user
+its state should be `submitted`. To be visible to all, it needs to
+be in the state `published`. To move from `submitted` to `published`
+we need a transition that we can name `approve_blogentry`.
+
+We do not want every user to be allowed to change the state of a 
+BlogEntry. We need to define a group of user, `moderators`, and 
+this group will have appropriate permissions to approve BlogEntry
+to be published and visible to all.
+
+There are two ways to create a workflow, form the user interface,
+and also by defining it in ``migration/postcreate.py``. This script
+is executed each time a new ``./bin/laxctl db-init`` is done. 
+If you create the states and transitions through the user interface
+this means that next time you will need to initialize the database
+you will have to re-create all the entities. 
+We strongly recommand you create the workflow in ``migration\postcreate.py``
+and we will now show you how.
+The user interface would only be a reference for you to view the states 
+and transitions but is not the appropriate interface to define your
+application workflow.
+
+Update the schema
+~~~~~~~~~~~~~~~~~
+To enable a BlogEntry to have a State, we have to define a relation
+``in_state`` in the schema of BlogEntry. Please do as follows, add
+the line ``in_state (...)``::
+
+  class BlogEntry(EntityType):
+      title = String(maxsize=100, required=True)
+      publish_date = Date(default='TODAY')
+      text_format = String(meta=True, internationalizable=True, maxsize=50,
+                           default='text/rest', constraints=[format_constraint])
+      text = String(fulltextindexed=True)
+      category = String(vocabulary=('important','business'))
+      entry_of = SubjectRelation('Blog', cardinality='?*')
+      in_state = SubjectRelation('State', cardinality='1*')
+
+As you updated the schema, you will have re-execute ``./bin/laxctl db-init``
+to initialize the database and migrate your existing entities.
+[WRITE ABOUT MIGRATION]
+
+Create states, transitions and group permissions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+At the time the ``postcreate.py`` script is executed, several methods
+can be used. They are all defined in the ``class ServerMigrationHelper``.
+We will only discuss the method we use to create a wrokflow here.
+
+To define our workflow for BlogDemo, please add the following lines
+to ``migration/postcreate.py``::
+  
+  _ = unicode
+
+  moderators      = add_entity('EGroup', name=u"moderators")
+
+  submitted = add_state(_('submitted'), 'BlogEntry', initial=True)
+  published = add_state(_('published'), 'BlogEntry')
+
+  add_transition(_('approve_blogentry'), 'BlogEntry', (submitted,), published, ('moderators', 'managers'),)
+
+  checkpoint()
+
+``add_entity`` is used here to define the new group of users that we
+need to define the transitions, `moderators`.
+If this group required by the transition is not defined before the
+transition is created, it will not create the relation `transition 
+require the group moderator`.
+
+``add_state`` expects as the first argument the name of the state you are
+willing to create, then the entity type on which the state can be applied, 
+and an optionnal argument to set if the state is the initial state
+of the entity type or not.
+
+``add_transition`` expects as the first argument the name of the 
+transition, then the entity type on which we can apply the transition,
+then the list of possible initial states from which the transition
+can be applied, the target state of the transition, and the permissions
+(e.g. list of the groups of users who can apply the transition).
+
+.. image:: images/lax-book.03-transitions-view.en.png
+
+You can now notice that in the actions box of a BlogEntry, the state
+is now listed as well as the possible transitions from this state
+defined by the workflow. This transition, as defined in the workflow,
+will only being displayed for the users belonging to the group
+moderators of managers.
+
+Change view permission
+~~~~~~~~~~~~~~~~~~~~~~
+
+
+
+Conclusion
+----------
+
+Exercise
+~~~~~~~~
+
+Create new blog entries in ``Tech-blog``.
+
+What we learned
+~~~~~~~~~~~~~~~
+
+Creating a simple schema was enough to set up a new application that
+can store blogs and blog entries. 
+
+What is next ?
+~~~~~~~~~~~~~~
+
+Although the application is fully functionnal, its look is very
+basic. In the following section we will learn to create views to
+customize how data is displayed.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutmanual_fr/tut-create-app.fr.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,386 @@
+.. -*- coding: utf-8 -*-
+
+
+Tutoriel : créer votre première application web pour Google AppEngine
+=====================================================================
+
+[TRANSLATE ME TO FRENCH]
+
+This tutorial will guide you step by step to build a blog application 
+and discover the unique features of `LAX`. It assumes that you followed
+the :ref:`installation` guidelines and that both the `AppEngine SDK` and the
+`LAX` framework are setup on your computer.
+
+Creating a new application
+--------------------------
+
+We choosed in this tutorial to develop a blog as an example of web application
+and will go through each required steps/actions to have it running with `LAX`.
+When you installed `LAX`, you saw a directory named ``skel``. Make a copy of
+this directory and call it ``BlogDemo``.
+
+The location of this directory does not matter. But once decided, make sure your ``PYTHONPATH`` is properly set (:ref:`installation`).
+
+
+Defining a schema
+-----------------
+
+With `LAX`, the schema/datamodel is the core of the application. This is where
+you will define the type of content you have to hanlde in your application.
+
+Let us start with something simple and improve on it iteratively. 
+
+In schema.py, we define two entities : ``Blog`` and ``BlogEntry``.
+
+::
+
+  class Blog(EntityType):
+      title = String(maxsize=50, required=True)
+      description = String()
+
+  class BlogEntry(EntityType):
+      title = String(maxsize=100, required=True)
+      publish_date = Date(default='TODAY')
+      text = String(fulltextindexed=True)
+      category = String(vocabulary=('important','business'))
+      entry_of = SubjectRelation('Blog', cardinality='?*')
+
+A Blog has a title and a description. The title is a string that is
+required by the class EntityType and must be less than 50 characters. 
+The description is a string that is not constrained.
+
+A BlogEntry has a title, a publish_date and a text. The title is a
+string that is required and must be less than 100 characters. The
+publish_date is a Date with a default value of TODAY, meaning that
+when a BlogEntry is created, its publish_date will be the current day
+unless it is modified. The text is a string that will be indexed in
+the full-text index and has no constraint.
+
+A BlogEntry also has a relationship ``entry_of`` that link it to a
+Blog. The cardinality ``?*`` means that a BlogEntry can be part of
+zero or one Blog (``?`` means `zero or one`) and that a Blog can
+have any number of BlogEntry (``*`` means `any number including
+zero`). For completeness, remember that ``+`` means `one or more`.
+
+Running the application
+-----------------------
+
+Defining this simple schema is enough to get us started. Make sure you
+followed the setup steps described in detail in the installation
+chapter (especially visiting http://localhost:8080/_load as an
+administrator), then launch the application with the command::
+
+   python dev_appserver.py BlogDemo
+
+and point your browser at http://localhost:8080/ (if it is easier for
+you, use the on-line demo at http://lax.appspot.com/).
+
+.. image:: images/lax-book.00-login.en.png
+   :alt: login screen
+
+After you log in, you will see the home page of your application. It
+lists the entity types: Blog and BlogEntry. If these links read
+``blog_plural`` and ``blogentry_plural`` it is because
+internationalization (i18n) is not working for you yet. Please ignore
+this for now.
+
+.. image:: images/lax-book.01-start.en.png
+   :alt: home page
+
+Creating system entities
+------------------------
+You can only create new users if you decided not to use google authentication.
+
+
+[WRITE ME : create users manages permissions etc]
+
+
+
+Creating application entites
+----------------------------
+
+Create a Blog
+~~~~~~~~~~~~~
+
+Let us create a few of these entities. Click on the [+] at the right
+of the link Blog.  Call this new Blog ``Tech-blog`` and type in
+``everything about technology`` as the description, then validate the
+form by clicking on ``Validate``.
+
+.. image:: images/lax-book.02-create-blog.en.png
+   :alt: from to create blog
+
+Click on the logo at top left to get back to the home page, then
+follow the Blog link that will list for you all the existing Blog.
+You should be seeing a list with a single item ``Tech-blog`` you
+just created.
+
+.. image:: images/lax-book.03-list-one-blog.en.png
+   :alt: displaying a list of a single blog
+
+Clicking on this item will get you to its detailed description except
+that in this case, there is not much to display besides the name and
+the phrase ``everything about technology``.
+
+.. image:: images/lax-book.04-detail-one-blog.en.png
+   :alt: displaying the detailed view of a blog
+
+Now get back to the home page by clicking on the top-left logo, then
+create a new Blog called ``MyLife`` and get back to the home page
+again to follow the Blog link for the second time. The list now
+has two items.
+
+.. image:: images/lax-book.05-list-two-blog.en.png
+   :alt: displaying a list of two blogs
+
+
+Create a BlogEntry
+~~~~~~~~~~~~~~~~~~
+
+Get back to the home page and click on [+] at the right of the link
+BlogEntry. Call this new entry ``Hello World`` and type in some text
+before clicking on ``Validate``. You added a new blog entry without
+saying to what blog it belongs. There is a box on the left entitled
+``actions``, click on the menu item ``modify``. You are back to the form
+to edit the blog entry you just created, except that the form now has
+another section with a combobox titled ``add relation``. Chose
+``entry_of`` in this menu and a second combobox appears where you pick
+``MyLife``. 
+
+You could also have, at the time you started to fill the form for a
+new entity BlogEntry, hit ``Apply`` instead of ``Validate`` and the 
+combobox titled ``add relation`` would have showed up.
+
+.. image:: images/lax-book.06-add-relation-entryof.en.png
+   :alt: editing a blog entry to add a relation to a blog
+
+Validate the changes by clicking ``Validate``. The entity BlogEntry
+that is displayed now includes a link to the entity Blog named
+``MyLife``.
+
+.. image:: images/lax-book.07-detail-one-blogentry.en.png
+   :alt: displaying the detailed view of a blogentry
+
+Remember that all of this was handled by the framework and that the
+only input that was provided so far is the schema. To get a graphical
+view of the schema, run the ``laxctl genschema BlogDemo`` command as
+explained in the installation section and point your browser to the
+URL http://localhost:8080/schema
+
+.. image:: images/lax-book.08-schema.en.png
+   :alt: graphical view of the schema (aka data-model)
+
+Site configuration
+------------------
+
+.. image:: images/lax-book.03-site-config-panel.en.png
+
+This panel allows you to configure the appearance of your application site.
+Six menus are available and we will go through each of them to explain how
+to use them.
+
+Navigation
+~~~~~~~~~~
+This menu provides you a way to adjust some navigation options depending on
+your needs, such as the number of entities to display by page of results.
+Follows the detailled list of available options :
+  
+* navigation.combobox-limit : maximum number of entities to display in related
+  combo box (sample format: 23)
+* navigation.page-size : maximum number of objects displayed by page of results 
+  (sample format: 23)
+* navigation.related-limit : maximum number of related entities to display in 
+  the primary view (sample format: 23)
+* navigation.short-line-size : maximum number of characters in short description
+  (sample format: 23)
+
+UI
+~~
+This menu provides you a way to customize the user interface settings such as
+date format or encoding in the produced html.
+Follows the detailled list of available options :
+
+* ui.date-format : how to format date in the ui ("man strftime" for format description)
+* ui.datetime-format : how to format date and time in the ui ("man strftime" for format
+  description)
+* ui.default-text-format : default text format for rich text fields.
+* ui.encoding : user interface encoding
+* ui.fckeditor :should html fields being edited using fckeditor (a HTML WYSIWYG editor).
+  You should also select text/html as default text format to actually get fckeditor.
+* ui.float-format : how to format float numbers in the ui
+* ui.language : language of the user interface
+* ui.main-template : id of main template used to render pages
+* ui.site-title	: site title, which is displayed right next to the logo in the header
+* ui.time-format : how to format time in the ui ("man strftime" for format description)
+
+
+Actions
+~~~~~~~
+This menu provides a way to configure the context in which you expect the actions
+to be displayed to the user and if you want the action to be visible or not. 
+You must have notice that when you view a list of entities, an action box is 
+available on the left column which display some actions as well as a drop-down 
+menu for more actions. 
+
+The context available are :
+
+* mainactions : actions listed in the left box
+* moreactions : actions listed in the `more` menu of the left box
+* addrelated : add actions listed in the left box
+* useractions : actions listed in the first section of drop-down menu 
+  accessible from the right corner user login link
+* siteactions : actions listed in the second section of drop-down menu
+  accessible from the right corner user login link
+* hidden : select this to hide the specific action
+
+Boxes
+~~~~~
+The application has already a pre-defined set of boxes you can use right away. 
+This configuration section allows you to place those boxes where you want in the
+application interface to customize it. 
+
+The available boxes are :
+
+* actions box : box listing the applicable actions on the displayed data
+
+* boxes_blog_archives_box : box listing the blog archives 
+
+* possible views box : box listing the possible views for the displayed data
+
+* rss box : RSS icon to get displayed data as a RSS thread
+
+* search box : search box
+
+* startup views box : box listing the configuration options available for 
+  the application site, such as `Preferences` and `Site Configuration`
+
+Components
+~~~~~~~~~~
+[WRITE ME]
+
+Contextual components
+~~~~~~~~~~~~~~~~~~~~~
+[WRITE ME]
+
+Set-up a workflow
+-----------------
+
+Before starting, make sure you refresh your mind by reading [link to
+definition_workflow chapter].
+
+We want to create a workflow to control the quality of the BlogEntry 
+submitted on your application. When a BlogEntry is created by a user
+its state should be `submitted`. To be visible to all, it needs to
+be in the state `published`. To move from `submitted` to `published`
+we need a transition that we can name `approve_blogentry`.
+
+We do not want every user to be allowed to change the state of a 
+BlogEntry. We need to define a group of user, `moderators`, and 
+this group will have appropriate permissions to approve BlogEntry
+to be published and visible to all.
+
+There are two ways to create a workflow, form the user interface,
+and also by defining it in ``migration/postcreate.py``. This script
+is executed each time a new ``./bin/laxctl db-init`` is done. 
+If you create the states and transitions through the user interface
+this means that next time you will need to initialize the database
+you will have to re-create all the entities. 
+We strongly recommand you create the workflow in ``migration\postcreate.py``
+and we will now show you how.
+The user interface would only be a reference for you to view the states 
+and transitions but is not the appropriate interface to define your
+application workflow.
+
+Update the schema
+~~~~~~~~~~~~~~~~~
+To enable a BlogEntry to have a State, we have to define a relation
+``in_state`` in the schema of BlogEntry. Please do as follows, add
+the line ``in_state (...)``::
+
+  class BlogEntry(EntityType):
+      title = String(maxsize=100, required=True)
+      publish_date = Date(default='TODAY')
+      text_format = String(meta=True, internationalizable=True, maxsize=50,
+                           default='text/rest', constraints=[format_constraint])
+      text = String(fulltextindexed=True)
+      category = String(vocabulary=('important','business'))
+      entry_of = SubjectRelation('Blog', cardinality='?*')
+      in_state = SubjectRelation('State', cardinality='1*')
+
+As you updated the schema, you will have re-execute ``./bin/laxctl db-init``
+to initialize the database and migrate your existing entities.
+[WRITE ABOUT MIGRATION]
+
+Create states, transitions and group permissions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+At the time the ``postcreate.py`` script is executed, several methods
+can be used. They are all defined in the ``class ServerMigrationHelper``.
+We will only discuss the method we use to create a wrokflow here.
+
+To define our workflow for BlogDemo, please add the following lines
+to ``migration/postcreate.py``::
+  
+  _ = unicode
+
+  moderators      = add_entity('EGroup', name=u"moderators")
+
+  submitted = add_state(_('submitted'), 'BlogEntry', initial=True)
+  published = add_state(_('published'), 'BlogEntry')
+
+  add_transition(_('approve_blogentry'), 'BlogEntry', (submitted,), published, ('moderators', 'managers'),)
+
+  checkpoint()
+
+``add_entity`` is used here to define the new group of users that we
+need to define the transitions, `moderators`.
+If this group required by the transition is not defined before the
+transition is created, it will not create the relation `transition 
+require the group moderator`.
+
+``add_state`` expects as the first argument the name of the state you are
+willing to create, then the entity type on which the state can be applied, 
+and an optionnal argument to set if the state is the initial state
+of the entity type or not.
+
+``add_transition`` expects as the first argument the name of the 
+transition, then the entity type on which we can apply the transition,
+then the list of possible initial states from which the transition
+can be applied, the target state of the transition, and the permissions
+(e.g. list of the groups of users who can apply the transition).
+
+.. image:: images/lax-book.03-transitions-view.en.png
+
+You can now notice that in the actions box of a BlogEntry, the state
+is now listed as well as the possible transitions from this state
+defined by the workflow. This transition, as defined in the workflow,
+will only being displayed for the users belonging to the group
+moderators of managers.
+
+Change view permission
+~~~~~~~~~~~~~~~~~~~~~~
+
+
+
+Conclusion
+----------
+
+Exercise
+~~~~~~~~
+
+Create new blog entries in ``Tech-blog``.
+
+What we learned
+~~~~~~~~~~~~~~~
+
+Creating a simple schema was enough to set up a new application that
+can store blogs and blog entries. 
+
+What is next ?
+~~~~~~~~~~~~~~
+
+Although the application is fully functionnal, its look is very
+basic. In the following section we will learn to create views to
+customize how data is displayed.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/README	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+This directory contains extra libraries which are needed
+to make cubicweb work.
+
+The mx.DateTime python implementation is directly taken from
+the mx.DateTime distribution. The only modification is the
+strptime function which has been mocked using the standard
+``datetime`` module. (as provided by the python2.5's stdlib)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/ARPA.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,227 @@
+""" This module provides a set of constructors and routines to convert
+    between DateTime[Delta] instances and ARPA representations of date
+    and time. The format is specified by RFC822 + RFC1123.
+
+    Note: Timezones are only interpreted by ParseDateTimeGMT(). All
+    other constructors silently ignore the time zone information.
+
+    Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author. All Rights Reserved.
+
+"""
+import DateTime,Timezone
+import re,string
+
+# Grammar: RFC822 + RFC1123 + depreciated RFC850
+_litday = '(?P<litday>Mon|Tue|Wed|Thu|Fri|Sat|Sun)[a-z]*'
+_litmonth = '(?P<litmonth>Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)'\
+            '[a-z]*'
+_date = ('(?:(?P<day>\d?\d)(?: +' + _litmonth + 
+         ' +|-(?P<month>\d?\d)-)(?P<year>(?:\d\d)?\d\d))')
+_zone = Timezone.zone
+_time = ('(?:(?P<hour>\d\d):(?P<minute>\d\d)'
+         '(?::(?P<second>\d\d))?(?: +'+_zone+')?)')
+#       Timezone information is made optional because some mail apps
+#       forget to add it (most of these seem to be spamming engines, btw).
+#       It defaults to UTC.
+
+_arpadate = '(?:'+ _litday + ',? )? *' + _date
+_arpadatetime = '(?:'+ _litday + ',? )? *' + _date + ' +' + _time
+
+#       We are not strict about the extra characters: some applications
+#       add extra information to the date header field. Additional spaces
+#       between the fields and extra characters in the literal day
+#       and month fields are also silently ignored.
+
+arpadateRE = re.compile(_arpadate)
+arpadatetimeRE = re.compile(_arpadatetime)
+
+# Translation tables
+litdaytable = {'mon':0, 'tue':1, 'wed':2, 'thu':3, 'fri':4, 'sat':5, 'sun':6 }
+litmonthtable = {'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6,
+                 'jul':7, 'aug':8, 'sep':9, 'oct':10, 'nov':11, 'dec':12 }
+_days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+_months = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+                 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' ]
+
+def ParseDate(arpastring,parse_arpadate=arpadateRE.match,
+
+              strip=string.strip,atoi=string.atoi,atof=string.atof,
+              lower=string.lower):
+
+    """ParseDate(arpastring)
+
+       Returns a DateTime instance reflecting the given ARPA
+       date. Only the date part is parsed, any time part will be
+       ignored. The instance's time is set to 0:00:00.
+
+    """
+    s = strip(arpastring)
+    date = parse_arpadate(s)
+    if not date:
+        raise ValueError,'wrong format'
+    litday,day,litmonth,month,year = date.groups()
+    if len(year) == 2:
+        year = DateTime.add_century(atoi(year))
+    else:
+        year = atoi(year)
+    if litmonth:
+        litmonth = lower(litmonth)
+        try:
+            month = litmonthtable[litmonth]
+        except KeyError:
+            raise ValueError,'wrong month format'
+    else:
+        month = atoi(month)
+    day = atoi(day)
+    # litday and timezone are ignored
+    return DateTime.DateTime(year,month,day)
+
+def ParseDateTime(arpastring,parse_arpadatetime=arpadatetimeRE.match,
+
+                  strip=string.strip,atoi=string.atoi,atof=string.atof,
+                  lower=string.lower):
+
+    """ParseDateTime(arpastring)
+
+       Returns a DateTime instance reflecting the given ARPA date assuming
+       it is local time (timezones are silently ignored).
+    """
+    s = strip(arpastring)
+    date = parse_arpadatetime(s)
+    if not date:
+        raise ValueError,'wrong format or unknown time zone'
+    litday,day,litmonth,month,year,hour,minute,second,zone = date.groups()
+    if len(year) == 2:
+        year = DateTime.add_century(atoi(year))
+    else:
+        year = atoi(year)
+    if litmonth:
+        litmonth = lower(litmonth)
+        try:
+            month = litmonthtable[litmonth]
+        except KeyError:
+            raise ValueError,'wrong month format'
+    else:
+        month = atoi(month)
+    day = atoi(day)
+    hour = atoi(hour)
+    minute = atoi(minute)
+    if second is None:
+        second = 0.0
+    else:
+        second = atof(second)
+    # litday and timezone are ignored
+    return DateTime.DateTime(year,month,day,hour,minute,second)
+
+def ParseDateTimeGMT(arpastring,parse_arpadatetime=arpadatetimeRE.match,
+
+                     strip=string.strip,atoi=string.atoi,atof=string.atof,
+                     lower=string.lower):
+
+    """ParseDateTimeGMT(arpastring)
+
+       Returns a DateTime instance reflecting the given ARPA date converting
+       it to UTC (timezones are honored).
+    """
+    s = strip(arpastring)
+    date = parse_arpadatetime(s)
+    if not date:
+        raise ValueError,'wrong format or unknown time zone'
+    litday,day,litmonth,month,year,hour,minute,second,zone = date.groups()
+    if len(year) == 2:
+        year = DateTime.add_century(atoi(year))
+    else:
+        year = atoi(year)
+    if litmonth:
+        litmonth = lower(litmonth)
+        try:
+            month = litmonthtable[litmonth]
+        except KeyError:
+            raise ValueError,'wrong month format'
+    else:
+        month = atoi(month)
+    day = atoi(day)
+    hour = atoi(hour)
+    minute = atoi(minute)
+    if second is None:
+        second = 0.0
+    else:
+        second = atof(second)
+    offset = Timezone.utc_offset(zone)
+    # litday is ignored
+    return DateTime.DateTime(year,month,day,hour,minute,second) - offset
+
+# Alias
+ParseDateTimeUTC = ParseDateTimeGMT
+
+def str(datetime,tz=None):
+
+    """str(datetime,tz=DateTime.tz_offset(datetime))
+
+    Returns the datetime instance as ARPA date string. tz can be given
+    as DateTimeDelta instance providing the time zone difference from
+    datetime's zone to UTC. It defaults to
+    DateTime.tz_offset(datetime) which assumes local time. """
+
+    if tz is None:
+        tz = datetime.gmtoffset()
+    return '%s, %02i %s %04i %02i:%02i:%02i %+03i%02i' % (
+        _days[datetime.day_of_week], datetime.day, 
+        _months[datetime.month], datetime.year,
+        datetime.hour, datetime.minute, datetime.second,
+        tz.hour,tz.minute)
+
+def strGMT(datetime):
+
+    """ strGMT(datetime)
+
+    Returns the datetime instance as ARPA date string assuming it
+    is given in GMT. """
+
+    return '%s, %02i %s %04i %02i:%02i:%02i GMT' % (
+        _days[datetime.day_of_week], datetime.day, 
+        _months[datetime.month], datetime.year,
+        datetime.hour, datetime.minute, datetime.second)
+
+def strUTC(datetime):
+
+    """ strUTC(datetime)
+
+    Returns the datetime instance as ARPA date string assuming it
+    is given in UTC. """
+
+    return '%s, %02i %s %04i %02i:%02i:%02i UTC' % (
+        _days[datetime.day_of_week], datetime.day, 
+        _months[datetime.month], datetime.year,
+        datetime.hour, datetime.minute, datetime.second)
+
+def _test():
+    import sys, os, rfc822
+    file = os.path.join(os.environ['HOME'], 'nsmail/Inbox')
+    f = open(file, 'r')
+    while 1:
+        m = rfc822.Message(f)
+        if not m:
+            break
+        print 'From:', m.getaddr('from')
+        print 'To:', m.getaddrlist('to')
+        print 'Subject:', m.getheader('subject')
+        raw = m.getheader('date')
+        try:
+            date = ParseDateTimeUTC(raw)
+            print 'Date:',strUTC(date)
+        except ValueError,why:
+            print 'PROBLEMS:',repr(raw),'-->',why
+            raw_input('...hit return to continue')
+        print
+        # Netscape mail file
+        while 1:
+            line = f.readline()
+            if line[:6] == 'From -':
+                break
+
+if __name__ == '__main__':
+    _test()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/DateTime.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1054 @@
+""" Python part of the low-level DateTime[Delta] type implementation.
+
+    Copyright (c) 1998-2001, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author. All Rights Reserved.
+"""
+# Import the python implementation module
+from mxDateTime_python import *
+from mxDateTime_python import __version__
+
+# Singletons
+oneSecond = DateTimeDelta(0,0,0,1)
+oneMinute = DateTimeDelta(0,0,1)
+oneHour = DateTimeDelta(0,1)
+oneDay = DateTimeDelta(1)
+oneWeek = DateTimeDelta(7)
+Epoch = DateTimeFromAbsDateTime(1,0)
+
+# Shortcuts for pickle; for backward compatibility only (they are now
+# defined in __init__.py to further reduce the pickles length)
+def _DT(absdate,abstime):
+    return DateTimeFromAbsDateTime(absdate,abstime)
+def _DTD(seconds):
+    return DateTimeDeltaFromSeconds(seconds)
+
+# Module init
+class modinit:
+
+    global _time,_string,_math,_types
+    import time,string,math,types
+    _time = time
+    _string = string
+    _math = math
+    _types = types
+
+del modinit
+
+### Helpers
+
+def _isstring(arg,
+
+              isinstance=isinstance, types=_types):
+    
+    if isinstance(arg, types.StringType):
+        return 1
+    try:
+        if isinstance(arg, types.UnicodeType):
+            return 1
+    except AttributeError:
+        pass
+    return 0
+
+### Compatibility APIs
+
+# Aliases and functions to make 'from mx.DateTime import *' work much
+# like 'from time import *'
+
+def localtime(ticks=None,
+              # Locals:
+              time=_time.time,float=float,localtime=_time.localtime,
+              round=round,int=int,DateTime=DateTime,floor=_math.floor):
+
+    """localtime(ticks=None)
+
+       Construct a DateTime instance using local time from ticks.  If
+       ticks are not given, it defaults to the current time.  The
+       result is similar to time.localtime(). Fractions of a second
+       are rounded to the nearest micro-second.
+
+    """
+    if ticks is None:
+        ticks = time()
+    else:
+        ticks = float(ticks)
+    ticks = round(ticks, 6)
+    fticks = floor(ticks)
+    Y,M,D,h,m,s = localtime(fticks)[:6]
+    s = s + (ticks - fticks)
+    return DateTime(Y,M,D,h,m,s)
+
+def gmtime(ticks=None,
+           # Locals:
+           time=_time.time,float=float,gmtime=_time.gmtime,
+           round=round,int=int,DateTime=DateTime,floor=_math.floor):
+
+    """gmtime(ticks=None)
+
+       Construct a DateTime instance using UTC time from ticks.  If
+       ticks are not given, it defaults to the current time.  The
+       result is similar to time.gmtime(). Fractions of a second are
+       rounded to the nearest micro-second.
+
+    """
+    if ticks is None:
+        ticks = time()
+    else:
+        ticks = float(ticks)
+    ticks = round(ticks, 6)
+    fticks = floor(ticks)
+    Y,M,D,h,m,s = gmtime(ticks)[:6]
+    s = s + (ticks - fticks)
+    return DateTime(Y,M,D,h,m,s)
+
+def mktime((year,month,day,hour,minute,second,dow,doy,dst),
+           # Locals:
+           DateTime=DateTime):
+
+    """mktime((year,month,day,hour,minute,second,dow,doy,dst))
+
+       Same as the DateTime() constructor accept that the interface
+       used is compatible to the similar time.mktime() API.
+
+       Note that the tuple elements dow, doy and dst are not used in
+       any way.
+      
+    """
+    return DateTime(year,month,day,hour,minute,second)
+
+def ctime(datetime):
+
+    """ctime(datetime)
+
+       Returns a string representation of the given DateTime instance
+       using the current locale's default settings.
+
+    """
+    return datetime.strftime('%c')
+
+def today(hour=0,minute=0,second=0.0,
+          # Locals:
+          localtime=_time.localtime,time=_time.time,DateTime=DateTime):
+
+    """today(hour=0,minute=0,second=0.0)
+
+       Returns a DateTime instance for today (in local time) at the
+       given time (defaults to midnight).
+
+    """
+    Y,M,D = localtime(time())[:3]
+    return DateTime(Y,M,D,hour,minute,second)
+
+def TimeDelta(hours=0.0,minutes=0.0,seconds=0.0,
+              # Locals:
+              DateTimeDelta=DateTimeDelta):
+
+    """TimeDelta(hours=0.0,minutes=0.0,seconds=0.0)
+
+       Returns a DateTimeDelta-object reflecting the given time
+       delta. Seconds can be given as float to indicate fractions.
+
+    """
+    return DateTimeDelta(0,hours,minutes,seconds)
+
+def gm2local(datetime):
+
+    """ gm2local(datetime)
+
+        Convert a DateTime instance holding UTC time to a DateTime
+        instance using local time.
+
+    """
+    return localtime(datetime.gmticks())
+
+def local2gm(datetime):
+
+    """ local2gm(datetime)
+
+        Convert a DateTime instance holding local time to a DateTime
+        instance using UTC time.
+
+    """
+    return gmtime(datetime.ticks())
+
+# Alias
+gmt = utc
+
+# Default value for DateTimeFromTJD's tjd_myriad parameter
+current_myriad = localtime().tjd_myriad
+
+def DateTimeFromTJD(tjd,tjd_myriad=current_myriad):
+
+    """ DateTimeFromTJD(tjd[,myriad])
+
+        Return a DateTime instance for the given Truncated Julian Day.
+        myriad defaults to the TJD myriad current at package import
+        time.
+
+        Note that this version of Truncated Julian Day number does
+        real truncation of important information. It's use is
+        discouraged and unsupported.
+
+    """
+    return DateTimeFromAbsDays(tjd + tjd_myriad * 10000.0 - 1721425.0)
+
+def DateTimeFromJDN(jdn):
+
+    """ DateTimeFromJDN(jdn)
+
+        Return a DateTime instance for the given Julian Day Number.
+
+        References:
+        -----------
+        Gregorian 2000-01-01 12:00:00 corresponds to JDN 2451545.0.
+        Gregorian 1858-11-17 00:00:00.00 corresponds to JDN 2400000.5; MJD 0.0.
+        Julian -4712-01-01 12:00:00.00 corresponds to JDN 0.0.
+        Gregorian -4713-11-24 12:00:00.00 corresponds to JDN 0.0.
+
+    """
+    return DateTimeFromAbsDays(jdn - 1721425.5)
+
+def DateTimeFromMJD(mjd):
+
+    """ DateTimeFromMJD(mjd)
+
+        Return a DateTime instance for the given Modified Julian Day
+        (MJD). The MJD is calculated the same way as the JDN except
+        that 1858-11-17 00:00:00.00 is taken as origin of the scale.
+
+    """
+    return DateTimeFromAbsDays(mjd + 678575.0)
+
+def DateTimeFrom(*args, **kws):
+
+    """ DateTimeFrom(*args, **kws)
+
+        Generic DateTime instance constructor. Can handle parsing
+        strings, numbers and keywords.
+
+        XXX Add support for Unicode.
+
+    """
+    if len(args) == 1:
+        # Single argument
+        arg = args[0]
+        argtype = type(arg)
+        if _isstring(arg):
+            import Parser
+            return apply(Parser.DateTimeFromString, args, kws)
+        elif argtype is DateTimeType:
+            return arg
+        elif argtype is DateTimeDeltaType:
+            raise TypeError,'cannot convert DateTimeDelta to DateTime'
+        else:
+            try:
+                value = float(arg)
+            except (TypeError, ValueError):
+                value = int(arg)
+            assert not kws
+            return DateTimeFromTicks(value)
+
+    elif len(args) > 1:
+        # More than one argument
+        if len(args) == 2 and _isstring(args[0]) and _isstring(args[1]):
+            # interpret as date and time string
+            import Parser
+            return apply(Parser.DateTimeFromString,
+                         (args[0] + ' ' + args[1],),
+                         kws)
+
+        # Assume the arguments are the same as for DateTime()
+        return apply(DateTime, args, kws)
+
+    elif len(kws) > 0:
+        # Keyword arguments; add defaults... today at 0:00:00
+        hour = kws.get('hour',0)
+        minute = kws.get('minute',0)
+        second = kws.get('second',0)
+        today = now()
+        day = kws.get('day',today.day)
+        month = kws.get('month',today.month)
+        year = kws.get('year',today.year)
+        return DateTime(year,month,day,hour,minute,second)
+
+    else:
+        raise TypeError,'cannot convert arguments to DateTime'
+
+def DateTimeDeltaFrom(*args, **kws):
+
+    """ DateTimeDeltaFrom(*args, **kws)
+
+        Generic DateTimeDelta instance constructor. Can handle parsing
+        strings, numbers and keywords.
+
+        XXX Add support for Unicode.
+
+    """
+    if len(args) == 1:
+        # Single argument
+        arg = args[0]
+        if _isstring(arg):
+            import Parser
+            return apply(Parser.DateTimeDeltaFromString, args, kws)
+        elif type(arg) is DateTimeDeltaType:
+            return arg
+        elif type(arg) is DateTimeType:
+            raise TypeError,'cannot convert DateTime to DateTimeDelta'
+        else:
+            try:
+                value = float(arg)
+            except TypeError:
+                value = int(arg)
+            assert not kws
+            return DateTimeDeltaFromSeconds(value)
+
+    elif len(args) > 1:
+        # Assume the arguments are the same as for DateTimeDelta()
+        return apply(DateTimeDelta, args, kws)
+
+    elif len(kws) > 0:
+        # Keyword arguments; default: 00:00:00:00.00
+        hours = kws.get('hours',0)
+        minutes = kws.get('minutes',0)
+        seconds = kws.get('seconds',0.0)
+        days = kws.get('days',0)
+        return DateTimeDelta(days,hours,minutes,seconds)
+
+    else:
+        raise TypeError,'cannot convert arguments to DateTimeDelta'
+
+def TimeDeltaFrom(*args, **kws):
+
+    """ TimeDeltaFrom(*args, **kws)
+
+        Generic TimeDelta instance constructor. Can handle parsing
+        strings, numbers and keywords.
+
+        XXX Add support for Unicode.
+
+    """
+    if len(args) > 1:
+        # Assume the arguments are the same as for TimeDelta(): without
+        # days part !
+        return apply(DateTimeDelta, (0,)+args, kws)
+    else:
+        # Otherwise treat the arguments just like for DateTimeDelta
+        # instances.
+        return apply(DateTimeDeltaFrom, args, kws)
+
+def DateFromTicks(ticks,
+                  # Locals:
+                  DateTime=DateTime,localtime=_time.localtime):
+
+    """ DateFromTicks(ticks)
+
+        Constructs a DateTime instance pointing to the local time date
+        at 00:00:00.00 (midnight) indicated by the given ticks value.
+        The time part is ignored.
+
+    """
+    return apply(DateTime, localtime(ticks)[:3])
+
+def TimestampFromTicks(ticks,
+                       # Locals:
+                       DateTime=DateTime,localtime=_time.localtime):
+
+    """ TimestampFromTicks(ticks)
+
+        Constructs a DateTime instance pointing to the local date and
+        time indicated by the given ticks value.
+
+    """
+    return apply(DateTime, localtime(ticks)[:6])
+
+def TimeFromTicks(ticks,
+                  # Locals:
+                  DateTimeDelta=DateTimeDelta,localtime=_time.localtime):
+
+    """ TimeFromTicks(ticks)
+
+        Constructs a DateTimeDelta instance pointing to the local time
+        indicated by the given ticks value. The date part is ignored.
+
+    """
+    return apply(DateTimeDelta, (0,) + localtime(ticks)[3:6])
+
+# Aliases
+utctime = gmtime
+utc2local = gm2local
+local2utc = local2gm
+DateTimeFromTicks = localtime
+Date = DateTime
+Time = TimeDelta
+Timestamp = DateTime
+DateFrom = DateTimeFrom # XXX should only parse the date part !
+TimeFrom = TimeDeltaFrom
+TimestampFrom = DateTimeFrom
+GregorianDateTime = DateTime
+GregorianDate = Date
+JulianDate = JulianDateTime
+
+
+### For backward compatibility (these are depreciated):
+
+def gmticks(datetime):
+
+    """gmticks(datetime)
+
+       [DEPRECIATED: use the .gmticks() method]
+    
+       Returns a ticks value based on the values stored in
+       datetime under the assumption that they are given in UTC,
+       rather than local time.
+
+    """
+    return datetime.gmticks()
+
+# Alias
+utcticks = gmticks
+
+def tz_offset(datetime,
+              # Locals:
+              oneSecond=oneSecond):
+
+    """tz_offset(datetime)
+
+       [DEPRECIATED: use the .gmtoffset() method]
+    
+       Returns a DateTimeDelta instance representing the UTC
+       offset for datetime assuming that the stored values refer
+       to local time. If you subtract this value from datetime,
+       you'll get UTC time.
+
+    """
+    return datetime.gmtoffset()
+
+### Constants (only English; see Locale.py for other languages)
+
+# Weekdays
+Monday =        0
+Tuesday =       1
+Wednesday =     2
+Thursday =      3
+Friday =        4
+Saturday =      5
+Sunday =        6
+# as mapping
+Weekday = {'Saturday': 5, 6: 'Sunday', 'Sunday': 6, 'Thursday': 3,
+           'Wednesday': 2, 'Friday': 4, 'Tuesday': 1, 'Monday': 0,
+           5: 'Saturday', 4: 'Friday', 3: 'Thursday', 2: 'Wednesday',
+           1: 'Tuesday', 0: 'Monday'}
+
+# Months
+January =       1
+February =      2
+March =         3
+April =         4
+May =           5
+June =          6
+July =          7
+August =        8 
+September =     9
+October =       10
+November =      11
+December =      12
+# as mapping
+Month = {2: 'February', 3: 'March', None: 0, 'July': 7, 11: 'November',
+    'December': 12, 'June': 6, 'January': 1, 'September': 9, 'August':
+    8, 'March': 3, 'November': 11, 'April': 4, 12: 'December', 'May':
+    5, 10: 'October', 9: 'September', 8: 'August', 7: 'July', 6:
+    'June', 5: 'May', 4: 'April', 'October': 10, 'February': 2, 1:
+    'January', 0: None}
+
+# Limits (see also the range checks in mxDateTime.c)
+MaxDateTime = DateTime(5867440,12,31) 
+MinDateTime = DateTime(-5851455,1,1)
+MaxDateTimeDelta = DateTimeDeltaFromSeconds(2147483647 * 86400.0)
+MinDateTimeDelta = -MaxDateTimeDelta
+
+###
+
+class RelativeDateTime:
+
+    """RelativeDateTime(years=0,months=0,days=0,
+                  hours=0,minutes=0,seconds=0,
+                  year=0,month=0,day=0,
+                  hour=None,minute=None,second=None,
+                  weekday=None,weeks=None)
+
+       Returns a RelativeDateTime instance for the specified relative
+       time. The constructor handles keywords, so you'll only have to
+       give those parameters which should be changed when you add the
+       relative to an absolute DateTime instance.
+
+       Adding RelativeDateTime instances is supported with the
+       following rules: deltas will be added together, right side
+       absolute values override left side ones.
+
+       Adding RelativeDateTime instances to DateTime instances will
+       return DateTime instances with the appropriate calculations
+       applied, e.g. to get a DateTime instance for the first of next
+       month, you'd call now() + RelativeDateTime(months=+1,day=1).
+
+    """
+    years = 0
+    months = 0
+    days = 0
+    year = None
+    month = 0
+    day = 0
+    hours = 0
+    minutes = 0
+    seconds = 0
+    hour = None
+    minute = None
+    second = None
+    weekday = None
+
+    # cached hash value
+    _hash = None
+
+    # For Zope security:
+    __roles__ = None
+    __allow_access_to_unprotected_subobjects__ = 1
+
+    def __init__(self,
+                 years=0,months=0,days=0,
+                 hours=0,minutes=0,seconds=0,
+                 year=None,month=None,day=None,
+                 hour=None,minute=None,second=None,
+                 weekday=None,weeks=0):
+        
+        self.years = years
+        self.months = months
+        self.days = days + weeks*7
+        self.year = year
+        self.month = month
+        self.day = day
+        self.hours = hours
+        self.minutes = minutes
+        self.seconds = seconds
+        self.hour = hour
+        self.minute = minute
+        self.second = second
+        if weekday is not None:
+            #  Make sure we've got a 2-tuple
+            assert len(weekday) == 2
+            self.weekday = weekday
+
+    def __add__(self,other,
+                # Locals:
+                isinstance=isinstance):
+
+        if isinstance(other,RelativeDateTime):
+            # RelativeDateTime (self) + RelativeDateTime (other)
+
+            r = RelativeDateTime()
+            # date deltas
+            r.years = self.years + other.years
+            r.months = self.months + other.months
+            r.days = self.days + other.days
+            # absolute entries of other override those in self, if given
+            r.year = other.year or self.year
+            r.month = other.month or self.month
+            r.day = other.day or self.day
+            r.weekday = other.weekday or self.weekday
+            # time deltas
+            r.hours = self.hours + other.hours
+            r.minutes = self.minutes + other.minutes
+            r.seconds = self.seconds + other.seconds
+            # absolute entries of other override those in self, if given
+            r.hour = other.hour or self.hour
+            r.minute = other.minute or self.minute
+            r.second = other.second or self.second
+            return r
+
+        else:
+            raise TypeError,"can't add the two types"
+
+    def __radd__(self,other,
+                 # Locals:
+                 isinstance=isinstance,DateTimeType=DateTimeType,
+                 DateTime=DateTime,DateTimeDelta=DateTimeDelta):
+
+        if isinstance(other,DateTimeType):
+            # DateTime (other) + RelativeDateTime (self)
+
+            # date
+            if self.year is None:
+                year = other.year + self.years
+            else:
+                year = self.year + self.years
+            if self.month is None:
+                month = other.month + self.months
+            else:
+                month = self.month + self.months
+            if self.day is None:
+                day = other.day
+            else:
+                day = self.day
+            if day < 0:
+                # fix negative day values
+                month = month + 1
+                day = day + 1
+            day = day + self.days
+            # time
+            if self.hour is None:
+                hour = other.hour + self.hours
+            else:
+                hour = self.hour + self.hours
+            if self.minute is None:
+                minute = other.minute + self.minutes
+            else:
+                minute = self.minute + self.minutes
+            if self.second is None:
+                second = other.second + self.seconds
+            else:
+                second = self.second + self.seconds
+
+            # Refit into proper ranges:
+            if month < 1 or month > 12:
+                month = month - 1
+                yeardelta, monthdelta = divmod(month, 12)
+                year = year + yeardelta
+                month = monthdelta + 1
+
+            # Make sure we have integers
+            year = int(year)
+            month = int(month)
+            day = int(day)
+
+            if self.weekday is None:
+                return DateTime(year, month, 1) + \
+                       DateTimeDelta(day-1,hour,minute,second)
+            
+            # Adjust to the correct weekday
+            day_of_week,index = self.weekday
+            d = DateTime(year, month, 1) + \
+                DateTimeDelta(day-1,hour,minute,second)
+            if index == 0:
+                # 0 index: next weekday if no match
+                return d + (day_of_week - d.day_of_week)
+            elif index > 0:
+                # positive index (1 == first weekday of month)
+                first = d - (d.day - 1)
+                diff = day_of_week - first.day_of_week
+                if diff >= 0:
+                    return first + (diff + (index-1) * 7)
+                else:
+                    return first + (diff + index * 7)
+            else:
+                # negative index (-1 == last weekday of month)
+                last = d + (d.days_in_month - d.day)
+                diff = day_of_week - last.day_of_week
+                if diff <= 0:
+                    return last + (diff + (index+1) * 7)
+                else:
+                    return last + (diff + index * 7)
+            
+        else:
+            raise TypeError,"can't add the two types"
+
+    def __sub__(self,other):
+
+        if isinstance(other,RelativeDateTime):
+            # RelativeDateTime (self) - RelativeDateTime (other)
+
+            r = RelativeDateTime()
+            # date deltas
+            r.years = self.years - other.years
+            r.months = self.months - other.months
+            r.days = self.days - other.days
+            # absolute entries of other override those in self, if given
+            r.year = other.year or self.year
+            r.month = other.month or self.month
+            r.day = other.day or self.day
+            r.weekday = other.weekday or self.weekday
+            # time deltas
+            r.hours = self.hours - other.hours
+            r.minutes = self.minutes - other.minutes
+            r.seconds = self.seconds - other.seconds
+            # absolute entries of other override those in self, if given
+            r.hour = other.hour or self.hour
+            r.minute = other.minute or self.minute
+            r.second = other.second or self.second
+
+            return r
+
+        else:
+            raise TypeError,"can't subtract the two types"
+
+    def __rsub__(self,other,
+                 # Locals:
+                 isinstance=isinstance,DateTimeType=DateTimeType):
+
+        if isinstance(other,DateTimeType):
+            # DateTime (other) - RelativeDateTime (self)
+            return other + self.__neg__()
+
+        else:
+            raise TypeError,"can't subtract the two types"
+
+    def __neg__(self):
+
+        # - RelativeDateTime(self)
+
+        r = RelativeDateTime()
+        # negate date deltas
+        r.years = - self.years
+        r.months = - self.months
+        r.days = - self.days
+        # absolute entries don't change
+        r.year = self.year
+        r.month = self.month
+        r.day = self.day
+        r.weekday = self.weekday
+        # negate time deltas
+        r.hours = - self.hours
+        r.minutes = - self.minutes
+        r.seconds = - self.seconds
+        # absolute entries don't change
+        r.hour = self.hour
+        r.minute = self.minute
+        r.second = self.second
+
+        return r
+
+    def __nonzero__(self):
+
+        # RelativeDateTime instances are considered false in case
+        # they do not define any alterations
+        if (self.year is None and
+            self.years == 0 and
+            self.month is None and
+            self.months == 0 and
+            self.day is None and
+            self.weekday is None and
+            self.days == 0 and
+            self.hour is None and
+            self.hours == 0 and
+            self.minute is None and
+            self.minutes == 0 and
+            self.second is None and
+            self.seconds == 0):
+            return 0
+        else:
+            return 1
+
+    def __mul__(self,other):
+
+        # RelativeDateTime (self) * Number (other)
+        factor = float(other)
+
+        r = RelativeDateTime()
+        # date deltas
+        r.years = factor * self.years
+        r.months = factor * self.months
+        r.days = factor * self.days
+        # time deltas
+        r.hours = factor * self.hours
+        r.minutes = factor * self.minutes
+        r.seconds = factor * self.seconds
+        return r
+
+    __rmul__ = __mul__
+
+    def __div__(self,other):
+
+        # RelativeDateTime (self) / Number (other)
+        return self.__mul__(1/float(other))
+
+    def __eq__(self, other):
+
+        if isinstance(self, RelativeDateTime) and \
+           isinstance(other, RelativeDateTime):
+            # RelativeDateTime (self) == RelativeDateTime (other)
+            if (self.years == other.years and
+                self.months == other.months and
+                self.days == other.days and
+                self.year == other.year and
+                self.day == other.day and
+                self.hours == other.hours and
+                self.minutes == other.minutes and
+                self.seconds == other.seconds and
+                self.hour == other.hour and
+                self.minute == other.minute and
+                self.second == other.second and
+                self.weekday == other.weekday):
+                return 1
+            else:
+                return 0
+        else:
+            raise TypeError,"can't compare the two types"
+
+    def __hash__(self):
+
+        if self._hash is not None:
+            return self._hash
+        x = 1234
+        for value in (self.years, self.months, self.days,
+                      self.year, self.day,
+                      self.hours, self.minutes, self.seconds,
+                      self.hour, self.minute, self.second,
+                      self.weekday):
+            if value is None:
+                x = 135051820 ^ x
+            else:
+                x = hash(value) ^ x
+        self._hash = x
+        return x
+
+    def __str__(self,
+
+                join=_string.join):
+
+        l = []
+        append = l.append
+
+        # Format date part
+        if self.year is not None:
+            append('%04i-' % self.year)
+        elif self.years:
+            append('(%0+5i)-' % self.years)
+        else:
+            append('YYYY-')
+        if self.month is not None:
+            append('%02i-' % self.month)
+        elif self.months:
+            append('(%0+3i)-' % self.months)
+        else:
+            append('MM-')
+        if self.day is not None:
+            append('%02i' % self.day)
+        elif self.days:
+            append('(%0+3i)' % self.days)
+        else:
+            append('DD')
+        if self.weekday:
+            append(' %s:%i' % (Weekday[self.weekday[0]][:3],self.weekday[1]))
+        append(' ')
+        
+        # Normalize relative time values to avoid fractions
+        hours = self.hours
+        minutes = self.minutes
+        seconds = self.seconds
+        hours_fraction = hours - int(hours)
+        minutes = minutes + hours_fraction * 60.0
+        minutes_fraction = minutes - int(minutes)
+        seconds = seconds + minutes_fraction * 6.0
+        seconds_fraction = seconds - int(seconds)
+
+        if 0:
+            # Normalize to standard time ranges
+            if seconds > 60.0:
+                extra_minutes, seconds = divmod(seconds, 60.0)
+                minutes = minutes + extra_minutes
+            elif seconds < -60.0:
+                extra_minutes, seconds = divmod(seconds, -60.0)
+                minutes = minutes - extra_minutes
+            if minutes >= 60.0:
+                extra_hours, minutes = divmod(minutes, 60.0)
+                hours = hours + extra_hours
+            elif minutes <= -60.0:
+                extra_hours, minutes = divmod(minutes, -60.0)
+                hours = hours - extra_hours
+
+        # Format time part
+        if self.hour is not None:
+            append('%02i:' % self.hour)
+        elif hours:
+            append('(%0+3i):' % hours)
+        else:
+            append('HH:')
+        if self.minute is not None:
+            append('%02i:' % self.minute)
+        elif minutes:
+            append('(%0+3i):' % minutes)
+        else:
+            append('MM:')
+        if self.second is not None:
+            append('%02i' % self.second)
+        elif seconds:
+            append('(%0+3i)' % seconds)
+        else:
+            append('SS')
+            
+        return join(l,'')
+
+    def __repr__(self):
+
+        return "<%s instance for '%s' at 0x%x>" % ( 
+            self.__class__.__name__, 
+            self.__str__(), 
+            id(self))
+
+# Alias
+RelativeDate = RelativeDateTime
+
+def RelativeDateTimeFrom(*args, **kws):
+
+    """ RelativeDateTimeFrom(*args, **kws)
+
+        Generic RelativeDateTime instance constructor. Can handle
+        parsing strings and keywords.
+
+    """
+    if len(args) == 1:
+        # Single argument
+        arg = args[0]
+        if _isstring(arg):
+            import Parser
+            return apply(Parser.RelativeDateTimeFromString, args, kws)
+        elif isinstance(arg, RelativeDateTime):
+            return arg
+        else:
+            raise TypeError,\
+                  'cannot convert argument to RelativeDateTime'
+
+    else:
+        return apply(RelativeDateTime,args,kws)
+
+def RelativeDateTimeDiff(date1,date2,
+
+                         floor=_math.floor,int=int,divmod=divmod,
+                         RelativeDateTime=RelativeDateTime):
+
+    """ RelativeDateTimeDiff(date1,date2)
+
+        Returns a RelativeDateTime instance representing the difference
+        between date1 and date2 in relative terms.
+
+        The following should hold: 
+        
+        date2 + RelativeDateDiff(date1,date2) == date1 
+
+        for all dates date1 and date2.
+
+        Note that due to the algorithm used by this function, not the
+        whole range of DateTime instances is supported; there could
+        also be a loss of precision.
+
+        XXX There are still some problems left (thanks to Carel
+        Fellinger for pointing these out):
+
+        29 1 1901 ->  1 3 1901 = 1 month
+        29 1 1901 ->  1 3 1900 = -10 month and -28 days, but
+        29 1 1901 -> 28 2 1900 = -11 month and -1 day
+
+        and even worse:
+
+        >>> print RelativeDateDiff(Date(1900,3,1),Date(1901,2,1))
+        YYYY-(-11)-DD HH:MM:SS
+
+        with:
+
+        >>> print Date(1901,1,29) + RelativeDateTime(months=-11)
+        1900-03-01 00:00:00.00
+        >>> print Date(1901,2,1) + RelativeDateTime(months=-11)
+        1900-03-01 00:00:00.00
+
+    """
+    diff = date1 - date2
+    if diff.days == 0:
+        return RelativeDateTime()
+    date1months = date1.year * 12 + (date1.month - 1)
+    date2months = date2.year * 12 + (date2.month - 1)
+    #print 'months',date1months,date2months
+
+    # Calculate the months difference
+    diffmonths = date1months - date2months
+    #print 'diffmonths',diffmonths
+    if diff.days > 0:
+        years,months = divmod(diffmonths,12)
+    else:
+        years,months = divmod(diffmonths,-12)
+        years = -years
+    date3 = date2 + RelativeDateTime(years=years,months=months)
+    diff3 = date1 - date3
+    days = date1.absdays - date3.absdays
+    #print 'date3',date3,'diff3',diff3,'days',days
+
+    # Correction to ensure that all relative parts have the same sign
+    while days * diff.days < 0:
+        if diff.days > 0:
+            diffmonths = diffmonths - 1
+            years,months = divmod(diffmonths,12)
+        else:
+            diffmonths = diffmonths + 1
+            years,months = divmod(diffmonths,-12)
+            years = -years
+        #print 'diffmonths',diffmonths
+        date3 = date2 + RelativeDateTime(years=years,months=months)
+        diff3 = date1 - date3
+        days = date1.absdays - date3.absdays
+        #print 'date3',date3,'diff3',diff3,'days',days
+
+    # Drop the fraction part of days
+    if days > 0:
+        days = int(floor(days))
+    else:
+        days = int(-floor(-days))
+
+    return RelativeDateTime(years=years,
+                            months=months,
+                            days=days,
+                            hours=diff3.hour,
+                            minutes=diff3.minute,
+                            seconds=diff3.second)
+
+# Aliases
+RelativeDateDiff = RelativeDateTimeDiff
+Age = RelativeDateTimeDiff
+
+###
+
+_current_year = now().year
+_current_century, _current_year_in_century = divmod(_current_year, 100)
+_current_century = _current_century * 100
+
+def add_century(year,
+
+                current_year=_current_year,
+                current_century=_current_century):
+    
+    """ Sliding window approach to the Y2K problem: adds a suitable
+        century to the given year and returns it as integer.
+
+        The window used depends on the current year (at import time).
+        If adding the current century to the given year gives a year
+        within the range current_year-70...current_year+30 [both
+        inclusive], then the current century is added. Otherwise the
+        century (current + 1 or - 1) producing the least difference is
+        chosen.
+
+    """
+    if year > 99:
+        # Take it as-is
+        return year
+    year = year + current_century
+    diff = year - current_year
+    if diff >= -70 and diff <= 30:
+        return year
+    elif diff < -70:
+        return year + 100
+    else:
+        return year - 100
+
+# Reference formulas for JDN taken from the Calendar FAQ:
+
+def gregorian_jdn(year,month,day):
+
+    # XXX These require proper integer division.
+    a = (14-month)/12
+    y = year+4800-a
+    m = month + 12*a - 3
+    return day + (306*m+5)/10 + y*365 + y/4 - y/100 + y/400 - 32045
+
+def julian_jdn(year,month,day):
+
+    # XXX These require proper integer division.
+    a = (14-month)/12
+    y = year+4800-a
+    m = month + 12*a - 3
+    return day + (306*m+5)/10 + y*365 + y/4 - 32083
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/ISO.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,366 @@
+""" This module provides a set of constructors and routines to convert
+    between DateTime[Delta] instances and ISO representations of date
+    and time.
+
+    Note: Timezones are only interpreted by ParseDateTimeGMT(). All
+    other constructors silently ignore the time zone information.
+
+    Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author.
+
+"""
+import DateTime,Timezone
+import re,string
+
+# Grammar: ISO 8601 (not all, but what we need from it)
+_year = '(?P<year>\d?\d\d\d)'
+_month = '(?P<month>\d?\d)'
+_day = '(?P<day>\d?\d)'
+_hour = '(?P<hour>\d?\d)'
+_minute = '(?P<minute>\d?\d)'
+_second = '(?P<second>\d?\d(?:\.\d+)?)'
+_sign = '(?P<sign>[-+])'
+_week = 'W(?P<week>\d?\d)'
+_zone = Timezone.isozone
+
+_weekdate = _year + '-?(?:' + _week + '-?' + _day + '?)?'
+_date = _year + '-?' + '(?:' + _month + '-?' + _day + '?)?'
+_time = _hour + ':?' + _minute + ':?' + _second + '?(?:' + _zone + ')?'
+
+isodatetimeRE = re.compile(_date + '(?:[ T]' + _time + ')?$')
+isodateRE = re.compile(_date + '$')
+isotimeRE = re.compile(_time + '$')
+isodeltaRE = re.compile(_sign + '?' + _time + '$')
+isoweekRE = re.compile(_weekdate + '$')
+isoweektimeRE = re.compile(_weekdate + '(?:[ T]' + _time + ')?$')
+
+def WeekTime(year,isoweek=1,isoday=1,hour=0,minute=0,second=0.0):
+
+    """Week(year,isoweek=1,isoday=1,hour=0,minute=0,second=0.0)
+
+       Returns a DateTime instance pointing to the given ISO week and
+       day.  isoday defaults to 1, which corresponds to Monday in the
+       ISO numbering. The time part is set as given.
+
+    """
+    d = DateTime.DateTime(year,1,1,hour,minute,second)
+    if d.iso_week[0] == year:
+        # 1.1. belongs to year (backup to Monday)
+        return d + (-d.day_of_week + 7 * (isoweek-1) + isoday-1)
+    else:
+        # 1.1. belongs to year-1 (advance to next Monday)
+        return d + (7-d.day_of_week + 7 * (isoweek-1) + isoday-1)
+
+# Alias
+Week = WeekTime
+
+# Aliases for the other constructors (they all happen to already use
+# ISO format)
+Date = DateTime.Date
+Time = DateTime.Time
+TimeDelta = DateTime.TimeDelta
+
+def ParseDateTime(isostring,parse_isodatetime=isodatetimeRE.match,
+
+                  strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseDateTime(isostring)
+
+       Returns a DateTime instance reflecting the given ISO date. A
+       time part is optional and must be delimited from the date by a
+       space or 'T'.
+
+       Time zone information is parsed, but not evaluated.
+
+    """
+    s = strip(isostring)
+    date = parse_isodatetime(s)
+    if not date:
+        raise ValueError,'wrong format, use YYYY-MM-DD HH:MM:SS'
+    year,month,day,hour,minute,second,zone = date.groups()
+    year = atoi(year)
+    if month is None:
+        month = 1
+    else:
+        month = atoi(month)
+    if day is None:
+        day = 1
+    else:
+        day = atoi(day)
+    if hour is None:
+        hour = 0
+    else:
+        hour = atoi(hour)
+    if minute is None:
+        minute = 0
+    else:
+        minute = atoi(minute)
+    if second is None:
+        second = 0.0
+    else:
+        second = atof(second)
+    return DateTime.DateTime(year,month,day,hour,minute,second)
+
+def ParseDateTimeGMT(isostring,parse_isodatetime=isodatetimeRE.match,
+
+                     strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseDateTimeGMT(isostring)
+
+       Returns a DateTime instance in UTC reflecting the given ISO
+       date. A time part is optional and must be delimited from the
+       date by a space or 'T'. Timezones are honored.
+
+    """
+    s = strip(isostring)
+    date = parse_isodatetime(s)
+    if not date:
+        raise ValueError,'wrong format, use YYYY-MM-DD HH:MM:SS'
+    year,month,day,hour,minute,second,zone = date.groups()
+    year = atoi(year)
+    if month is None:
+        month = 1
+    else:
+        month = atoi(month)
+    if day is None:
+        day = 1
+    else:
+        day = atoi(day)
+    if hour is None:
+        hour = 0
+    else:
+        hour = atoi(hour)
+    if minute is None:
+        minute = 0
+    else:
+        minute = atoi(minute)
+    if second is None:
+        second = 0.0
+    else:
+        second = atof(second)
+    offset = Timezone.utc_offset(zone)
+    return DateTime.DateTime(year,month,day,hour,minute,second) - offset
+
+# Alias
+ParseDateTimeUTC = ParseDateTimeGMT
+
+def ParseDate(isostring,parse_isodate=isodateRE.match,
+
+              strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseDate(isostring)
+
+       Returns a DateTime instance reflecting the given ISO date. A
+       time part may not be included.
+
+    """
+    s = strip(isostring)
+    date = parse_isodate(s)
+    if not date:
+        raise ValueError,'wrong format, use YYYY-MM-DD'
+    year,month,day = date.groups()
+    year = atoi(year)
+    if month is None:
+        month = 1
+    else:
+        month = atoi(month)
+    if day is None:
+        day = 1
+    else:
+        day = atoi(day)
+    return DateTime.DateTime(year,month,day)
+
+def ParseWeek(isostring,parse_isoweek=isoweekRE.match,
+
+              strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseWeek(isostring)
+
+       Returns a DateTime instance reflecting the given ISO date. A
+       time part may not be included.
+
+    """
+    s = strip(isostring)
+    date = parse_isoweek(s)
+    if not date:
+        raise ValueError,'wrong format, use yyyy-Www-d, e.g. 1998-W01-1'
+    year,week,day = date.groups()
+    year = atoi(year)
+    if week is None:
+        week = 1
+    else:
+        week = atoi(week)
+    if day is None:
+        day = 1
+    else:
+        day = atoi(day)
+    return Week(year,week,day)
+
+def ParseWeekTime(isostring,parse_isoweektime=isoweektimeRE.match,
+
+                  strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseWeekTime(isostring)
+
+       Returns a DateTime instance reflecting the given ISO date. A
+       time part is optional and must be delimited from the date by a
+       space or 'T'.
+
+    """
+    s = strip(isostring)
+    date = parse_isoweektime(s)
+    if not date:
+        raise ValueError,'wrong format, use e.g. "1998-W01-1 12:00:30"'
+    year,week,day,hour,minute,second,zone = date.groups()
+    year = atoi(year)
+    if week is None:
+        week = 1
+    else:
+        week = atoi(week)
+    if day is None:
+        day = 1
+    else:
+        day = atoi(day)
+    if hour is None:
+        hour = 0
+    else:
+        hour = atoi(hour)
+    if minute is None:
+        minute = 0
+    else:
+        minute = atoi(minute)
+    if second is None:
+        second = 0.0
+    else:
+        second = atof(second)
+    return WeekTime(year,week,day,hour,minute,second)
+
+def ParseTime(isostring,parse_isotime=isotimeRE.match,
+
+              strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseTime(isostring)
+
+       Returns a DateTimeDelta instance reflecting the given ISO time.
+       Hours and minutes must be given, seconds are
+       optional. Fractions of a second may also be used,
+       e.g. 12:23:12.34.
+
+    """
+    s = strip(isostring)
+    time = parse_isotime(s)
+    if not time:
+        raise ValueError,'wrong format, use HH:MM:SS'
+    hour,minute,second,zone = time.groups()
+    hour = atoi(hour)
+    minute = atoi(minute)
+    if second is not None:
+        second = atof(second)
+    else:
+        second = 0.0
+    return DateTime.TimeDelta(hour,minute,second)
+
+def ParseTimeDelta(isostring,parse_isodelta=isodeltaRE.match,
+
+                   strip=string.strip,atoi=string.atoi,atof=string.atof):
+
+    """ParseTimeDelta(isostring)
+
+       Returns a DateTimeDelta instance reflecting the given ISO time
+       as delta. Hours and minutes must be given, seconds are
+       optional. Fractions of a second may also be used,
+       e.g. 12:23:12.34. In addition to the ISO standard a sign may be
+       prepended to the time, e.g. -12:34.
+
+    """
+    s = strip(isostring)
+    time = parse_isodelta(s)
+    if not time:
+        raise ValueError,'wrong format, use [-]HH:MM:SS'
+    sign,hour,minute,second,zone = time.groups()
+    hour = atoi(hour)
+    minute = atoi(minute)
+    if second is not None:
+        second = atof(second)
+    else:
+        second = 0.0
+    if sign and sign == '-':
+        return -DateTime.TimeDelta(hour,minute,second)
+    else:
+        return DateTime.TimeDelta(hour,minute,second)
+
+def ParseAny(isostring):
+
+    """ParseAny(isostring)
+
+       Parses the given string and tries to convert it to a
+       DateTime[Delta] instance.
+
+    """
+    try:
+        return ParseDateTime(isostring)
+    except ValueError:
+        pass
+    try:
+        return ParseWeekTime(isostring)
+    except ValueError:
+        pass
+    try:
+        return ParseTimeDelta(isostring)
+    except ValueError:
+        raise ValueError,'unsupported format: "%s"' % isostring
+
+def str(datetime,tz=None):
+
+    """str(datetime,tz=DateTime.tz_offset(datetime))
+
+       Returns the datetime instance as ISO date string. tz can be
+       given as DateTimeDelta instance providing the time zone
+       difference from datetime's zone to UTC. It defaults to
+       DateTime.tz_offset(datetime) which assumes local time.
+
+    """
+    if tz is None:
+        tz = datetime.gmtoffset()
+    return '%04i-%02i-%02i %02i:%02i:%02i%+03i%02i' % (
+        datetime.year, datetime.month, datetime.day, 
+        datetime.hour, datetime.minute, datetime.second,
+        tz.hour,tz.minute)
+
+def strGMT(datetime):
+
+    """strGMT(datetime)
+
+       Returns the datetime instance as ISO date string assuming it is
+       given in GMT.
+
+    """
+    return '%04i-%02i-%02i %02i:%02i:%02i+0000' % (
+        datetime.year, datetime.month, datetime.day, 
+        datetime.hour, datetime.minute, datetime.second)
+
+def strUTC(datetime):
+
+    """strUTC(datetime)
+
+       Returns the datetime instance as ISO date string assuming it is
+       given in UTC.
+
+    """
+    return '%04i-%02i-%02i %02i:%02i:%02i+0000' % (
+        datetime.year, datetime.month, datetime.day, 
+        datetime.hour, datetime.minute, datetime.second)
+
+# Testing
+if __name__ == '__main__':
+    e = DateTime.Date(1900,1,1)
+    for i in range(100000):
+        d = e + i
+        year,week,day = d.iso_week
+        c = WeekTime(year,week,day)
+        if d != c:
+            print ' Check %s (given; %i) != %s (parsed)' % (d,d.day_of_week,c)
+        elif i % 1000 == 0:
+            print d,'ok'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/Parser.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1225 @@
+# -*- coding: latin-1 -*-
+
+""" Date/Time string parsing module.
+
+    Note about the Y2K problems:
+
+       The parser can only handle years with at least 2 digits. 2
+       digit year values get expanded by adding the century using
+       DateTime.add_century(), while 3 digit year get converted
+       literally. To have 2 digit years also be interpreted literally,
+       add leading zeros, e.g. year 99 must be written as 099 or 0099.
+
+    Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author. All Rights Reserved.
+
+"""
+import types,re,string
+import DateTime,ISO,ARPA,Timezone
+
+# Enable to produce debugging output
+_debug = 0
+
+# REs for matching date and time parts in a string; These REs
+# parse a superset of ARPA, ISO, American and European style dates.
+# Timezones are supported via the Timezone submodule.
+
+_year = '(?P<year>-?\d+\d(?!:))'
+_fullyear = '(?P<year>-?\d+\d\d(?!:))'
+_year_epoch = '(?:' + _year + '(?P<epoch> *[ABCDE\.]+)?)'
+_fullyear_epoch = '(?:' + _fullyear + '(?P<epoch> *[ABCDE\.]+)?)'
+_relyear = '(?:\((?P<relyear>[-+]?\d+)\))'
+
+_month = '(?P<month>\d?\d(?!:))'
+_fullmonth = '(?P<month>\d\d(?!:))'
+_litmonth = ('(?P<litmonth>'
+             'jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec|'
+             'mär|mae|mrz|mai|okt|dez|'
+             'fev|avr|juin|juil|aou|aoû|déc|'
+             'ene|abr|ago|dic|'
+             'out'
+             ')[a-z,\.;]*')
+litmonthtable = {
+    # English
+    'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6,
+    'jul':7, 'aug':8, 'sep':9, 'oct':10, 'nov':11, 'dec':12,
+    # German
+    'mär':3, 'mae':3, 'mrz':3, 'mai':5, 'okt':10, 'dez':12,
+    # French
+    'fev':2, 'avr':4, 'juin':6, 'juil':7, 'aou':8, 'aoû':8,
+    'déc':12,
+    # Spanish
+    'ene':1, 'abr':4, 'ago':8, 'dic':12,
+    # Portuguese
+    'out':10,
+    }
+_relmonth = '(?:\((?P<relmonth>[-+]?\d+)\))'
+
+_day = '(?P<day>\d?\d(?!:))'
+_usday = '(?P<day>\d?\d(?!:))(?:st|nd|rd|th|[,\.;])?'
+_fullday = '(?P<day>\d\d(?!:))'
+_litday = ('(?P<litday>'
+           'mon|tue|wed|thu|fri|sat|sun|'
+           'die|mit|don|fre|sam|son|'
+           'lun|mar|mer|jeu|ven|sam|dim|'
+           'mie|jue|vie|sab|dom|'
+           'pri|seg|ter|cua|qui'
+           ')[a-z]*')
+litdaytable = {
+    # English
+    'mon':0, 'tue':1, 'wed':2, 'thu':3, 'fri':4, 'sat':5, 'sun':6,
+    # German
+    'die':1, 'mit':2, 'don':3, 'fre':4, 'sam':5, 'son':6,
+    # French
+    'lun':0, 'mar':1, 'mer':2, 'jeu':3, 'ven':4, 'sam':5, 'dim':6,
+    # Spanish
+    'mie':2, 'jue':3, 'vie':4, 'sab':5, 'dom':6,
+    # Portuguese
+    'pri':0, 'seg':1, 'ter':2, 'cua':3, 'qui':4,
+    }
+_relday = '(?:\((?P<relday>[-+]?\d+)\))'
+
+_hour = '(?P<hour>[012]?\d)'
+_minute = '(?P<minute>[0-6]\d)'
+_second = '(?P<second>[0-6]\d(?:[.,]\d+)?)'
+
+_days = '(?P<days>\d*\d(?:[.,]\d+)?)'
+_hours = '(?P<hours>\d*\d(?:[.,]\d+)?)'
+_minutes = '(?P<minutes>\d*\d(?:[.,]\d+)?)'
+_seconds = '(?P<seconds>\d*\d(?:[.,]\d+)?)'
+
+_reldays = '(?:\((?P<reldays>[-+]?\d+(?:[.,]\d+)?)\))'
+_relhours = '(?:\((?P<relhours>[-+]?\d+(?:[.,]\d+)?)\))'
+_relminutes = '(?:\((?P<relminutes>[-+]?\d+(?:[.,]\d+)?)\))'
+_relseconds = '(?:\((?P<relseconds>[-+]?\d+(?:[.,]\d+)?)\))'
+
+_sign = '(?:(?P<sign>[-+]) *)'
+_week = 'W(?P<week>\d?\d)'
+_zone = Timezone.zone
+_ampm = '(?P<ampm>[ap][m.]+)'
+
+_time = (_hour + ':' + _minute + '(?::' + _second + '|[^:]|$) *'
+         + _ampm + '? *' + _zone + '?')
+_isotime = _hour + ':?' + _minute + ':?' + _second + '? *' + _zone + '?'
+
+_weekdate = _year + '-?(?:' + _week + '-?' + _day + '?)?'
+_eurodate = _day + '\.' + _month + '\.' + _year_epoch + '?'
+_usdate = _month + '/' + _day + '(?:/' + _year_epoch + '|[^/]|$)'
+_altusdate = _month + '-' + _day + '-' + _fullyear_epoch
+_isodate = _year + '-' + _month + '-?' + _day + '?(?!:)'
+_altisodate = _year + _fullmonth + _fullday + '(?!:)'
+_usisodate = _fullyear + '/' + _fullmonth + '/' + _fullday
+_litdate = ('(?:'+ _litday + ',? )? *' + 
+            _usday + ' *' + 
+            '[- ] *(?:' + _litmonth + '|'+ _month +') *[- ] *' +
+            _year_epoch + '?')
+_altlitdate = ('(?:'+ _litday + ',? )? *' + 
+               _litmonth + '[ ,.a-z]+' +
+               _usday + 
+               '(?:[ a-z]+' + _year_epoch + ')?')
+_eurlitdate = ('(?:'+ _litday + ',?[ a-z]+)? *' + 
+               '(?:'+ _usday + '[ a-z]+)? *' +
+               _litmonth + 
+               '(?:[ ,.a-z]+' + _year_epoch + ')?')
+
+_relany = '[*%?a-zA-Z]+'
+
+_relisodate = ('(?:(?:' + _relany + '|' + _year + '|' + _relyear + ')-' +
+               '(?:' + _relany + '|' + _month + '|' + _relmonth + ')-' +
+               '(?:' + _relany + '|' + _day + '|' + _relday + '))')
+
+_asctime = ('(?:'+ _litday + ',? )? *' + 
+                _usday + ' *' + 
+                '[- ] *(?:' + _litmonth + '|'+ _month +') *[- ]' +
+                '(?:[0-9: ]+)' + 
+                _year_epoch + '?')
+
+_relisotime = ('(?:(?:' + _relany + '|' + _hour + '|' + _relhours + '):' +
+               '(?:' + _relany + '|' + _minute + '|' + _relminutes + ')' +
+               '(?::(?:' + _relany + '|' + _second + '|' + _relseconds + '))?)')
+
+_isodelta1 = (_sign + '?' +
+              _days + ':' + _hours + ':' + _minutes + ':' + _seconds)
+_isodelta2 = (_sign + '?' + 
+              _hours + ':' + _minutes + ':' + _seconds)
+_isodelta3 = (_sign + '?' + 
+              _hours + ':' + _minutes)
+_litdelta = (_sign + '?' +
+             '(?:' + _days + ' *d[a-z]*[,; ]*)?' + 
+             '(?:' + _hours + ' *h[a-z]*[,; ]*)?' + 
+             '(?:' + _minutes + ' *m[a-z]*[,; ]*)?' +
+             '(?:' + _seconds + ' *s[a-z]*[,; ]*)?')
+_litdelta2 = (_sign + '?' +
+             '(?:' + _days + ' *d[a-z]*[,; ]*)?' + 
+              _hours + ':' + _minutes + '(?::' + _seconds + ')?')
+
+_timeRE = re.compile(_time, re.I)
+_isotimeRE = re.compile(_isotime, re.I)
+_isodateRE = re.compile(_isodate, re.I)
+_altisodateRE = re.compile(_altisodate, re.I)
+_usisodateRE = re.compile(_usisodate, re.I)
+_eurodateRE = re.compile(_eurodate, re.I)
+_usdateRE = re.compile(_usdate, re.I)
+_altusdateRE = re.compile(_altusdate, re.I)
+_litdateRE = re.compile(_litdate, re.I)
+_altlitdateRE = re.compile(_altlitdate, re.I)
+_eurlitdateRE = re.compile(_eurlitdate, re.I)
+_relisodateRE = re.compile(_relisodate, re.I)
+_asctimeRE = re.compile(_asctime, re.I)
+_isodelta1RE = re.compile(_isodelta1)
+_isodelta2RE = re.compile(_isodelta2)
+_isodelta3RE = re.compile(_isodelta3)
+_litdeltaRE = re.compile(_litdelta)
+_litdelta2RE = re.compile(_litdelta2)
+_relisotimeRE = re.compile(_relisotime, re.I)
+
+# Available date parsers
+_date_formats = ('euro',
+                 'usiso', 'us', 'altus',
+                 'iso', 'altiso', 
+                 'lit', 'altlit', 'eurlit',
+                 'unknown')
+
+# Available time parsers
+_time_formats = ('standard',
+                 'iso',
+                 'unknown')
+
+def _parse_date(text, formats=_date_formats, defaultdate=None,
+
+                int=int,float=float,lower=string.lower,
+                add_century=DateTime.add_century,
+                now=DateTime.now,us_formats=('us', 'altus'),
+                iso_formats=('iso', 'altiso', 'usiso')):
+
+    """ Parses the date part given in text and returns a tuple
+        (text,day,month,year,style) with the following
+        meanings:
+
+        * text gives the original text without the date part
+
+        * day,month,year give the parsed date
+
+        * style gives information about which parser was successful:
+          'euro' - the European date parser
+          'us' - the US date parser
+          'altus' - the alternative US date parser (with '-' instead of '/')
+          'iso' - the ISO date parser
+          'altiso' - the alternative ISO date parser (without '-')
+          'usiso' - US style ISO date parser (yyyy/mm/dd)
+          'lit' - the US literal date parser
+          'altlit' - the alternative US literal date parser
+          'eurlit' - the Eurpean literal date parser
+          'unknown' - no date part was found, defaultdate was used
+
+        formats may be set to a tuple of style strings specifying
+        which of the above parsers to use and in which order to try
+        them. Default is to try all of them in the above order.
+
+        defaultdate provides the defaults to use in case no date part
+        is found. Most other parsers default to the current year
+        January 1 if some of these date parts are missing.
+
+        If 'unknown' is not given in formats and the date cannot be
+        parsed, a ValueError is raised.
+
+    """
+    match = None
+    style = ''
+    
+    # Apply parsers in the order given in formats
+    for format in formats:
+
+        if format == 'euro':
+            # European style date
+            match = _eurodateRE.search(text)
+            if match is not None:
+                day,month,year,epoch = match.groups()
+                if year:
+                    if len(year) == 2:
+                        # Y2K problem:
+                        year = add_century(int(year))
+                    else:
+                        year = int(year)
+                else:
+                    if defaultdate is None:
+                        defaultdate = now()
+                    year = defaultdate.year
+                if epoch and 'B' in epoch:
+                    year = -year + 1
+                month = int(month)
+                day = int(day)
+                # Could have mistaken euro format for us style date
+                # which uses month, day order
+                if month > 12 or month == 0:
+                    match = None
+                    continue
+                break
+
+        elif format in iso_formats:
+            # ISO style date
+            if format == 'iso':
+                match = _isodateRE.search(text)
+            elif format == 'altiso':
+                match = _altisodateRE.search(text)
+                # Avoid mistaking ISO time parts ('Thhmmss') for dates
+                if match is not None:
+                    left, right = match.span()
+                    if left > 0 and \
+                       text[left - 1:left] == 'T':
+                        match = None
+                        continue
+            else:
+                match = _usisodateRE.search(text)
+            if match is not None:
+                year,month,day = match.groups()
+                if len(year) == 2:
+                    # Y2K problem:
+                    year = add_century(int(year))
+                else:
+                    year = int(year)
+                # Default to January 1st
+                if not month:
+                    month = 1
+                else:
+                    month = int(month)
+                if not day:
+                    day = 1
+                else:
+                    day = int(day)
+                break
+
+        elif format in us_formats:
+            # US style date
+            if format == 'us':
+                match = _usdateRE.search(text)
+            else:
+                match = _altusdateRE.search(text)
+            if match is not None:
+                month,day,year,epoch = match.groups()
+                if year:
+                    if len(year) == 2:
+                        # Y2K problem:
+                        year = add_century(int(year))
+                    else:
+                        year = int(year)
+                else:
+                    if defaultdate is None:
+                        defaultdate = now()
+                    year = defaultdate.year
+                if epoch and 'B' in epoch:
+                    year = -year + 1
+                # Default to 1 if no day is given
+                if day:
+                    day = int(day)
+                else:
+                    day = 1
+                month = int(month)
+                # Could have mistaken us format for euro style date
+                # which uses day, month order
+                if month > 12 or month == 0:
+                    match = None
+                    continue
+                break
+
+        elif format == 'lit':
+            # US style literal date
+            match = _litdateRE.search(text)
+            if match is not None:
+                litday,day,litmonth,month,year,epoch = match.groups()
+                break
+
+        elif format == 'altlit':
+            # Alternative US style literal date
+            match = _altlitdateRE.search(text)
+            if match is not None: 
+                litday,litmonth,day,year,epoch = match.groups()
+                month = '<missing>'
+                break
+
+        elif format == 'eurlit':
+            # European style literal date
+            match = _eurlitdateRE.search(text)
+            if match is not None: 
+                litday,day,litmonth,year,epoch = match.groups()
+                month = '<missing>'
+                break
+
+        elif format == 'unknown':
+            # No date part: use defaultdate
+            if defaultdate is None:
+                defaultdate = now()
+            year = defaultdate.year
+            month = defaultdate.month
+            day = defaultdate.day
+            style = format
+            break
+
+    # Check success
+    if match is not None:
+        # Remove date from text
+        left, right = match.span()
+        if 0 and _debug:
+            print 'parsed date:',repr(text[left:right]),\
+                  'giving:',year,month,day
+        text = text[:left] + text[right:]
+        style = format
+        
+    elif not style:
+        # Not recognized: raise an error
+        raise ValueError, 'unknown date format: "%s"' % text
+
+    # Literal date post-processing
+    if style in ('lit', 'altlit', 'eurlit'):
+        if 0 and _debug: print match.groups()
+        # Default to current year, January 1st
+        if not year:
+            if defaultdate is None:
+                defaultdate = now()
+            year = defaultdate.year
+        else:
+            if len(year) == 2:
+                # Y2K problem:
+                year = add_century(int(year))
+            else:
+                year = int(year)
+        if epoch and 'B' in epoch:
+            year = -year + 1
+        if litmonth:
+            litmonth = lower(litmonth)
+            try:
+                month = litmonthtable[litmonth]
+            except KeyError:
+                raise ValueError,\
+                      'wrong month name: "%s"' % litmonth
+        elif month:
+            month = int(month)
+        else:
+            month = 1
+        if day:
+            day = int(day)
+        else:
+            day = 1
+
+    #print '_parse_date:',text,day,month,year,style
+    return text,day,month,year,style
+
+def _parse_time(text, formats=_time_formats,
+
+                int=int,float=float,replace=string.replace):
+
+    """ Parses a time part given in text and returns a tuple
+        (text,hour,minute,second,offset,style) with the following
+        meanings:
+
+        * text gives the original text without the time part
+        * hour,minute,second give the parsed time
+        * offset gives the time zone UTC offset
+        * style gives information about which parser was successful:
+          'standard' - the standard parser
+          'iso' - the ISO time format parser
+          'unknown' - no time part was found
+
+        formats may be set to a tuple specifying the parsers to use:
+          'standard' - standard time format with ':' delimiter
+          'iso' - ISO time format (superset of 'standard')
+          'unknown' - default to 0:00:00, 0 zone offset
+
+        If 'unknown' is not given in formats and the time cannot be
+        parsed, a ValueError is raised.
+
+    """
+    match = None
+    style = ''
+
+    # Apply parsers in the order given in formats
+    for format in formats:
+
+        # Standard format
+        if format == 'standard':
+            match = _timeRE.search(text)
+            if match is not None:
+                hour,minute,second,ampm,zone = match.groups()
+                style = 'standard'
+                break
+
+        # ISO format
+        if format == 'iso':
+            match =  _isotimeRE.search(text)
+            if match is not None:
+                hour,minute,second,zone = match.groups()
+                ampm = None
+                style = 'iso'
+                break
+
+        # Default handling
+        elif format == 'unknown':
+            hour,minute,second,offset = 0,0,0.0,0
+            style = 'unknown'
+            break
+
+    if not style:
+        # If no default handling should be applied, raise an error
+        raise ValueError, 'unknown time format: "%s"' % text
+
+    # Post-processing
+    if match is not None:
+        if zone:
+            # Convert to UTC offset
+            offset = Timezone.utc_offset(zone)
+        else:
+            offset = 0
+        hour = int(hour)
+        if ampm:
+            if ampm[0] in ('p', 'P'):
+                # 12pm = midday
+                if hour < 12:
+                    hour = hour + 12
+            else:
+                # 12am = midnight 
+                if hour >= 12:
+                    hour = hour - 12
+        if minute:
+            minute = int(minute)
+        else:
+            minute = 0
+        if not second:
+            second = 0.0
+        else:
+            if ',' in second:
+                second = replace(second, ',', '.')
+            second = float(second)
+
+        # Remove time from text
+        left,right = match.span()
+        if 0 and _debug: 
+            print 'parsed time:',repr(text[left:right]),\
+                  'giving:',hour,minute,second,offset
+        text = text[:left] + text[right:]
+
+    #print '_parse_time:',text,hour,minute,second,offset,style
+    return text,hour,minute,second,offset,style
+
+###
+
+def DateTimeFromString(text, formats=_date_formats, defaultdate=None,
+                       time_formats=_time_formats,
+
+                       DateTime=DateTime):
+
+    """ DateTimeFromString(text, [formats, defaultdate])
+    
+        Returns a DateTime instance reflecting the date and time given
+        in text. In case a timezone is given, the returned instance
+        will point to the corresponding UTC time value. Otherwise, the
+        value is set as given in the string.
+
+        formats may be set to a tuple of strings specifying which of
+        the following parsers to use and in which order to try
+        them. Default is to try all of them in the order given below:
+
+          'euro' - the European date parser
+          'us' - the US date parser
+          'altus' - the alternative US date parser (with '-' instead of '/')
+          'iso' - the ISO date parser
+          'altiso' - the alternative ISO date parser (without '-')
+          'usiso' - US style ISO date parser (yyyy/mm/dd)
+          'lit' - the US literal date parser
+          'altlit' - the alternative US literal date parser
+          'eurlit' - the Eurpean literal date parser
+          'unknown' - if no date part is found, use defaultdate
+
+        defaultdate provides the defaults to use in case no date part
+        is found. Most of the parsers default to the current year
+        January 1 if some of these date parts are missing.
+
+        If 'unknown' is not given in formats and the date cannot
+        be parsed, a ValueError is raised.
+
+        time_formats may be set to a tuple of strings specifying which
+        of the following parsers to use and in which order to try
+        them. Default is to try all of them in the order given below:
+
+          'standard' - standard time format HH:MM:SS (with ':' delimiter)
+          'iso' - ISO time format (superset of 'standard')
+          'unknown' - default to 00:00:00 in case the time format
+                      cannot be parsed
+
+        Defaults to 00:00:00.00 for time parts that are not included
+        in the textual representation.
+
+        If 'unknown' is not given in time_formats and the time cannot
+        be parsed, a ValueError is raised.
+
+    """
+    origtext = text
+    formats = tuple(formats)
+
+    if formats is _date_formats or \
+       'iso' in formats or \
+       'altiso' in formats:
+
+        # First try standard order (parse time, then date)
+        if formats[0] not in ('iso', 'altiso'):
+            text,hour,minute,second,offset,timestyle = _parse_time(
+                origtext,
+                time_formats)
+            text,day,month,year,datestyle = _parse_date(
+                text,
+                formats + ('unknown',),
+                defaultdate)
+            if 0 and _debug:
+                print 'tried time/date on %s, date=%s, time=%s' % (origtext,
+                                                                   datestyle,
+                                                                   timestyle)
+        else:
+            timestyle = 'iso'
+            
+        # If this fails, try the ISO order (date, then time)
+        if timestyle in ('iso', 'unknown'):
+            text,day,month,year,datestyle = _parse_date(
+                origtext,
+                formats,
+                defaultdate)
+            text,hour,minute,second,offset,timestyle = _parse_time(
+                text,
+                time_formats)
+            if 0 and _debug:
+                print 'tried ISO on %s, date=%s, time=%s' % (origtext,
+                                                             datestyle,
+                                                             timestyle)
+    else:
+        # Standard order: time part, then date part
+        text,hour,minute,second,offset,timestyle = _parse_time(
+            origtext,
+            time_formats)
+        text,day,month,year,datestyle = _parse_date(
+            text,
+            formats,
+            defaultdate)
+
+    if (datestyle == 'unknown' and 'unknown' not in formats) or \
+       (timestyle == 'unknown' and 'unknown' not in time_formats):
+        raise ValueError,\
+              'Failed to parse "%s": found "%s" date, "%s" time' % \
+              (origtext, datestyle, timestyle)
+    
+    try:
+        return DateTime.DateTime(year,month,day,hour,minute,second) - offset
+    except DateTime.RangeError, why:
+        raise DateTime.RangeError,\
+              'Failed to parse "%s": %s' % (origtext, why)
+
+def DateFromString(text, formats=_date_formats, defaultdate=None,
+
+                   DateTime=DateTime):
+
+    """ DateFromString(text, [formats, defaultdate])
+    
+        Returns a DateTime instance reflecting the date given in
+        text. A possibly included time part is ignored.
+
+        formats and defaultdate work just like for
+        DateTimeFromString().
+
+    """
+    _text,day,month,year,datestyle = _parse_date(text, formats, defaultdate)
+
+    if datestyle == 'unknown' and \
+       'unknown' not in formats:
+        raise ValueError,\
+              'Failed to parse "%s": found "%s" date' % \
+              (origtext, datestyle)
+
+    try:
+        return DateTime.DateTime(year,month,day)
+    except DateTime.RangeError, why:
+        raise DateTime.RangeError,\
+              'Failed to parse "%s": %s' % (text, why)
+
+def validateDateTimeString(text, formats=_date_formats):
+
+    """ validateDateTimeString(text, [formats, defaultdate])
+
+        Validates the given text and returns 1/0 depending on whether
+        text includes parseable date and time values or not.
+
+        formats works just like for DateTimeFromString() and defines
+        the order of date/time parsers to apply. It defaults to the
+        same list of parsers as for DateTimeFromString().
+
+        XXX Undocumented !
+    
+    """
+    formats = list(formats)
+    if 'unknown' in formats:
+        formats.remove('unknown')
+    try:
+        DateTimeFromString(text, formats)
+    except (DateTime.RangeError, ValueError), why:
+        return 0
+    return 1
+
+def validateDateString(text, formats=_date_formats):
+
+    """ validateDateString(text, [formats, defaultdate])
+
+        Validates the given text and returns 1/0 depending on whether
+        text includes a parseable date value or not.
+
+        formats works just like for DateTimeFromString() and defines
+        the order of date/time parsers to apply. It defaults to the
+        same list of parsers as for DateTimeFromString().
+    
+        XXX Undocumented !
+    
+    """
+    formats = list(formats)
+    if 'unknown' in formats:
+        formats.remove('unknown')
+    try:
+        DateFromString(text, formats)
+    except (DateTime.RangeError, ValueError), why:
+        return 0
+    return 1
+
+def TimeFromString(text, formats=_time_formats,
+
+                   DateTime=DateTime):
+
+    """ TimeFromString(text, [formats])
+    
+        Returns a DateTimeDelta instance reflecting the time given in
+        text. A possibly included date part is ignored.
+
+        formats may be set to a tuple of strings specifying which of
+        the following parsers to use and in which order to try
+        them. Default is to try all of them in the order given below:
+
+          'standard' - standard time format with ':' delimiter
+          'iso' - ISO time format (superset of 'standard')
+          'unknown' - default to 00:00:00 in case the time format
+                      cannot be parsed
+
+        Defaults to 00:00:00.00 for parts that are not included in the
+        textual representation.
+        
+    """
+    _text,hour,minute,second,offset,timestyle = _parse_time(
+        text,
+        formats)
+
+    if timestyle == 'unknown' and \
+       'unknown' not in formats:
+        raise ValueError,\
+              'Failed to parse "%s": found "%s" time' % \
+              (text, timestyle)
+
+    try:
+        dtd = DateTime.DateTimeDelta(0.0, hour, minute, second)
+    except DateTime.RangeError, why:
+        raise DateTime.RangeError,\
+              'Failed to parse "%s": %s' % (text, why)
+    else:
+        # XXX What to do with offset ?
+        return dtd
+
+#
+# XXX Still missing: validateTimeString(), validateDateTimeDeltaString()
+#                    and validateTimeDeltaString()
+#
+
+def DateTimeDeltaFromString(text,
+
+                            float=float,DateTime=DateTime):
+
+    """ DateTimeDeltaFromString(text)
+    
+        Returns a DateTimeDelta instance reflecting the delta given in
+        text. Defaults to 0:00:00:00.00 for parts that are not
+        included in the textual representation or cannot be parsed.
+
+    """
+    match = _isodelta1RE.search(text)
+    if match is not None:
+        sign, days, hours, minutes, seconds = match.groups()
+    else:
+        match = _litdelta2RE.search(text)
+        if match is not None:
+            sign, days, hours, minutes, seconds = match.groups()
+        else:
+            match = _isodelta2RE.search(text)
+            if match is not None:
+                sign, hours, minutes, seconds = match.groups()
+                days = None
+            else:
+                match = _isodelta3RE.search(text)
+                if match is not None:
+                    sign, hours, minutes = match.groups()
+                    days = None
+                    seconds = None
+                else:
+                    match = _litdeltaRE.search(text)
+                    if match is not None:
+                        sign, days, hours, minutes, seconds = match.groups()
+
+                    else:
+                        # Not matched:
+                        return DateTime.DateTimeDelta(0.0)
+
+    # Conversions
+    if days:
+        days = float(days)
+    else:
+        days = 0.0
+    if hours:
+        hours = float(hours)
+    else:
+        hours = 0.0
+    if minutes:
+        minutes = float(minutes)
+    else:
+        minutes = 0.0
+    if seconds:
+        seconds = float(seconds)
+    else:
+        seconds = 0.0
+    if sign != '-':
+        sign = 1
+    else:
+        sign = -1
+
+    try:
+        dtd = DateTime.DateTimeDelta(days,hours,minutes,seconds)
+    except DateTime.RangeError, why:
+        raise DateTime.RangeError,\
+              'Failed to parse "%s": %s' % (text, why)
+    else:
+        if sign < 0:
+            return -dtd
+        else:
+            return dtd
+
+# Aliases
+TimeDeltaFromString = DateTimeDeltaFromString
+
+###
+
+def _parse_reldate(text,
+
+                   int=int,float=float):
+
+    match = _relisodateRE.search(text)
+    if match is not None:
+        groups = match.groups()
+        if 0 and _debug: print groups
+        year,years,month,months,day,days = groups
+        if year:
+            year = int(year)
+        if years:
+            years = float(years)
+        else:
+            years = 0
+        if month:
+            month = int(month)
+        if months:
+            months = float(months)
+        else:
+            months = 0
+        if day:
+            day = int(day)
+        if days:
+            days = float(days)
+        else:
+            days = 0
+        return year,years,month,months,day,days
+    else:
+        return None,0,None,0,None,0
+
+def _parse_reltime(text,
+
+                   int=int,float=float):
+
+    match = _relisotimeRE.search(text)
+    if match is not None:
+        groups = match.groups()
+        if 0 and _debug: print groups
+        hour,hours,minute,minutes,second,seconds = groups
+        if hour:
+            hour = int(hour)
+        if hours:
+            hours = float(hours)
+        else:
+            hours = 0
+        if minute:
+            minute = int(minute)
+        if minutes:
+            minutes = float(minutes)
+        else:
+            minutes = 0
+        if second:
+            second = int(second)
+        if seconds:
+            seconds = float(seconds)
+        else:
+            seconds = 0
+        return hour,hours,minute,minutes,second,seconds
+    else:
+        return None,0,None,0,None,0
+
+def RelativeDateTimeFromString(text,
+
+                               RelativeDateTime=DateTime.RelativeDateTime):
+
+    """ RelativeDateTimeFromString(text)
+    
+        Returns a RelativeDateTime instance reflecting the relative
+        date and time given in text.
+
+        Defaults to wildcards for parts or values which are not
+        included in the textual representation or cannot be parsed.
+
+        The format used in text must adhere to the following syntax:
+
+                        [YYYY-MM-DD] [HH:MM[:SS]]
+
+        with the usual meanings. Values which should not be altered
+        may be replaced with '*', '%', '?' or any combination of
+        letters, e.g. 'YYYY'. Relative settings must be enclosed in
+        parenthesis if given and should include a sign, e.g. '(+0001)'
+        for the year part. All other settings are interpreted as
+        absolute values.
+
+        Date and time parts are both optional as a whole. Seconds in
+        the time part are optional too. Everything else (including the
+        hyphens and colons) is mandatory.
+
+    """
+    year,years,month,months,day,days = _parse_reldate(text)
+    hour,hours,minute,minutes,second,seconds = _parse_reltime(text)
+    return RelativeDateTime(year=year,years=years,
+                            month=month,months=months,
+                            day=day,days=days,
+                            hour=hour,hours=hours,
+                            minute=minute,minutes=minutes,
+                            second=second,seconds=seconds)
+
+def RelativeDateFromString(text,
+
+                           RelativeDateTime=DateTime.RelativeDateTime):
+
+    """ RelativeDateFromString(text)
+    
+        Same as RelativeDateTimeFromString(text) except that only the
+        date part of text is taken into account.
+
+    """
+    year,years,month,months,day,days = _parse_reldate(text)
+    return RelativeDateTime(year=year,years=years,
+                            month=month,months=months,
+                            day=day,days=days)
+
+def RelativeTimeFromString(text,
+
+                           RelativeDateTime=DateTime.RelativeDateTime):
+
+    """ RelativeTimeFromString(text)
+    
+        Same as RelativeDateTimeFromString(text) except that only the
+        time part of text is taken into account.
+
+    """
+    hour,hours,minute,minutes,second,seconds = _parse_reltime(text)
+    return RelativeDateTime(hour=hour,hours=hours,
+                            minute=minute,minutes=minutes,
+                            second=second,seconds=seconds)
+
+### Tests
+
+def _test():
+
+    import sys
+
+    t = DateTime.now()
+
+    print 'Testing DateTime Parser...'
+
+    l = [
+
+        # Literal formats
+        ('Sun Nov  6 08:49:37 1994', '1994-11-06 08:49:37.00'),
+        ('sun nov  6 08:49:37 1994', '1994-11-06 08:49:37.00'),
+        ('sUN NOV  6 08:49:37 1994', '1994-11-06 08:49:37.00'),
+        ('Sunday, 06-Nov-94 08:49:37 GMT', '1994-11-06 08:49:37.00'),
+        ('Sun, 06 Nov 1994 08:49:37 GMT', '1994-11-06 08:49:37.00'),
+        ('06-Nov-94 08:49:37', '1994-11-06 08:49:37.00'),
+        ('06-Nov-94', '1994-11-06 00:00:00.00'),
+        ('06-NOV-94', '1994-11-06 00:00:00.00'),
+        ('November 19 08:49:37', '%s-11-19 08:49:37.00' % t.year),
+        ('Nov. 9', '%s-11-09 00:00:00.00' % t.year),
+        ('Sonntag, der 6. November 1994, 08:49:37 GMT', '1994-11-06 08:49:37.00'),
+        ('6. November 2001, 08:49:37', '2001-11-06 08:49:37.00'),
+        ('sep 6', '%s-09-06 00:00:00.00' % t.year),
+        ('sep 6 2000', '2000-09-06 00:00:00.00'),
+        ('September 29', '%s-09-29 00:00:00.00' % t.year),
+        ('Sep. 29', '%s-09-29 00:00:00.00' % t.year),
+        ('6 sep', '%s-09-06 00:00:00.00' % t.year),
+        ('29 September', '%s-09-29 00:00:00.00' % t.year),
+        ('29 Sep.', '%s-09-29 00:00:00.00' % t.year),
+        ('sep 6 2001', '2001-09-06 00:00:00.00'),
+        ('Sep 6, 2001', '2001-09-06 00:00:00.00'),
+        ('September 6, 2001', '2001-09-06 00:00:00.00'),
+        ('sep 6 01', '2001-09-06 00:00:00.00'),
+        ('Sep 6, 01', '2001-09-06 00:00:00.00'),
+        ('September 6, 01', '2001-09-06 00:00:00.00'),
+        ('30 Apr 2006 20:19:00', '2006-04-30 20:19:00.00'),
+        
+        # ISO formats
+        ('1994-11-06 08:49:37', '1994-11-06 08:49:37.00'),
+        ('010203', '2001-02-03 00:00:00.00'),
+        ('2001-02-03 00:00:00.00', '2001-02-03 00:00:00.00'),
+        ('2001-02 00:00:00.00', '2001-02-01 00:00:00.00'),
+        ('2001-02-03', '2001-02-03 00:00:00.00'),
+        ('2001-02', '2001-02-01 00:00:00.00'),
+        ('20000824/2300', '2000-08-24 23:00:00.00'),
+        ('20000824/0102', '2000-08-24 01:02:00.00'),
+        ('20000824', '2000-08-24 00:00:00.00'),
+        ('20000824/020301', '2000-08-24 02:03:01.00'),
+        ('20000824 020301', '2000-08-24 02:03:01.00'),
+        ('-20000824 020301', '-2000-08-24 02:03:01.00'),
+        ('20000824T020301', '2000-08-24 02:03:01.00'),
+        ('20000824 020301', '2000-08-24 02:03:01.00'),
+        ('2000-08-24 02:03:01.00', '2000-08-24 02:03:01.00'),
+        ('T020311', '%s 02:03:11.00' % t.date),
+        ('2003-12-9', '2003-12-09 00:00:00.00'),
+        ('03-12-9', '2003-12-09 00:00:00.00'),
+        ('003-12-9', '0003-12-09 00:00:00.00'),
+        ('0003-12-9', '0003-12-09 00:00:00.00'),
+        ('2003-1-9', '2003-01-09 00:00:00.00'),
+        ('03-1-9', '2003-01-09 00:00:00.00'),
+        ('003-1-9', '0003-01-09 00:00:00.00'),
+        ('0003-1-9', '0003-01-09 00:00:00.00'),
+
+        # US formats
+        ('06/11/94 08:49:37', '1994-06-11 08:49:37.00'),
+        ('11/06/94 08:49:37', '1994-11-06 08:49:37.00'),
+        ('9/23/2001', '2001-09-23 00:00:00.00'),
+        ('9-23-2001', '2001-09-23 00:00:00.00'),
+        ('9/6', '%s-09-06 00:00:00.00' % t.year),
+        ('09/6', '%s-09-06 00:00:00.00' % t.year),
+        ('9/06', '%s-09-06 00:00:00.00' % t.year),
+        ('09/06', '%s-09-06 00:00:00.00' % t.year),
+        ('9/6/2001', '2001-09-06 00:00:00.00'),
+        ('09/6/2001', '2001-09-06 00:00:00.00'),
+        ('9/06/2001', '2001-09-06 00:00:00.00'),
+        ('09/06/2001', '2001-09-06 00:00:00.00'),
+        ('9-6-2001', '2001-09-06 00:00:00.00'),
+        ('09-6-2001', '2001-09-06 00:00:00.00'),
+        ('9-06-2001', '2001-09-06 00:00:00.00'),
+        ('09-06-2001', '2001-09-06 00:00:00.00'),
+        ('2002/05/28 13:10:56.1147 GMT+2', '2002-05-28 13:10:56.11'),
+        ('1970/01/01', '1970-01-01 00:00:00.00'),
+        ('20021025 12:00 PM', '2002-10-25 12:00:00.00'),
+        ('20021025 12:30 PM', '2002-10-25 12:30:00.00'),
+        ('20021025 12:00 AM', '2002-10-25 00:00:00.00'),
+        ('20021025 12:30 AM', '2002-10-25 00:30:00.00'),
+        ('20021025 1:00 PM', '2002-10-25 13:00:00.00'),
+        ('20021025 2:00 AM', '2002-10-25 02:00:00.00'),
+        ('Thursday, February 06, 2003 12:40 PM', '2003-02-06 12:40:00.00'),
+        ('Mon, 18 Sep 2006 23:03:00', '2006-09-18 23:03:00.00'),
+
+        # European formats
+        ('6.11.2001, 08:49:37', '2001-11-06 08:49:37.00'),
+        ('06.11.2001, 08:49:37', '2001-11-06 08:49:37.00'),
+        ('06.11. 08:49:37', '%s-11-06 08:49:37.00' % t.year),
+        #('21/12/2002', '2002-12-21 00:00:00.00'),
+        #('21/08/2002', '2002-08-21 00:00:00.00'),
+        #('21-08-2002', '2002-08-21 00:00:00.00'),
+        #('13/01/03', '2003-01-13 00:00:00.00'),
+        #('13/1/03', '2003-01-13 00:00:00.00'),
+        #('13/1/3', '2003-01-13 00:00:00.00'),
+        #('13/01/3', '2003-01-13 00:00:00.00'),
+
+        # Time only formats
+        ('01:03', '%s 01:03:00.00' % t.date),
+        ('01:03:11', '%s 01:03:11.00' % t.date),
+        ('01:03:11.50', '%s 01:03:11.50' % t.date),
+        ('01:03:11.50 AM', '%s 01:03:11.50' % t.date),
+        ('01:03:11.50 PM', '%s 13:03:11.50' % t.date),
+        ('01:03:11.50 a.m.', '%s 01:03:11.50' % t.date),
+        ('01:03:11.50 p.m.', '%s 13:03:11.50' % t.date),
+
+        # Invalid formats
+        ('6..2001, 08:49:37', '%s 08:49:37.00' % t.date),
+        ('9//2001', 'ignore'),
+        ('06--94 08:49:37', 'ignore'),
+        ('20000824020301', 'ignore'),
+        ('20-03 00:00:00.00', 'ignore'),
+        ('9/2001', 'ignore'),
+        ('9-6', 'ignore'),
+        ('09-6', 'ignore'),
+        ('9-06', 'ignore'),
+        ('09-06', 'ignore'),
+        ('20000824/23', 'ignore'),
+        ('November 1994 08:49:37', 'ignore'),
+        ('Nov. 94', 'ignore'),
+        ('Mon, 18 Sep 2006 23:03:00 +1234567890', 'ignore'),
+
+        ]
+
+    # Add Unicode versions
+    try:
+        unicode
+    except NameError:
+        pass
+    else:
+        k = []
+        for text, result in l:
+            k.append((unicode(text), result))
+        l.extend(k)
+
+    for text, reference in l:
+        try:
+            value = DateTimeFromString(text)
+        except:
+            if reference is None:
+                continue
+            else:
+                value = str(sys.exc_info()[1])
+        valid_datetime = validateDateTimeString(text)
+        valid_date = validateDateString(text)
+        if str(value) != reference and \
+           not reference == 'ignore':
+            print 'Failed to parse "%s"' % text
+            print '  expected: %s' % (reference or '<exception>')
+            print '  parsed:   %s' % value
+        elif _debug:
+            print 'Parsed "%s" successfully' % text
+        if _debug:
+            if not valid_datetime:
+                print '  "%s" failed date/time validation' % text
+            if not valid_date:
+                print '  "%s" failed date validation' % text
+
+    et = DateTime.now()
+    print 'done. (after %f seconds)' % ((et-t).seconds)
+
+    ###
+
+    print 'Testing DateTimeDelta Parser...'
+
+    t = DateTime.now()
+    l = [
+
+        # Literal formats
+        ('Sun Nov  6 08:49:37 1994', '08:49:37.00'),
+        ('1 day, 8 hours, 49 minutes, 37 seconds', '1:08:49:37.00'),
+        ('10 days, 8 hours, 49 minutes, 37 seconds', '10:08:49:37.00'),
+        ('8 hours, 49 minutes, 37 seconds', '08:49:37.00'),
+        ('49 minutes, 37 seconds', '00:49:37.00'),
+        ('37 seconds', '00:00:37.00'),
+        ('37.5 seconds', '00:00:37.50'),
+        ('8 hours later', '08:00:00.00'),
+        ('2 days', '2:00:00:00.00'),
+        ('2 days 23h', '2:23:00:00.00'),
+        ('2 days 23:57', '2:23:57:00.00'),
+        ('2 days 23:57:13', '2:23:57:13.00'),
+        ('', '00:00:00.00'),
+        
+        # ISO formats
+        ('1994-11-06 08:49:37', '08:49:37.00'),
+        ('10:08:49:37', '10:08:49:37.00'),
+        ('08:49:37', '08:49:37.00'),
+        ('08:49', '08:49:00.00'),
+        ('-10:08:49:37', '-10:08:49:37.00'),
+        ('-08:49:37', '-08:49:37.00'),
+        ('-08:49', '-08:49:00.00'),
+        ('- 10:08:49:37', '-10:08:49:37.00'),
+        ('- 08:49:37', '-08:49:37.00'),
+        ('- 08:49', '-08:49:00.00'),
+        ('10:08:49:37.5', '10:08:49:37.50'),
+        ('08:49:37.5', '08:49:37.50'),
+        ('10:8:49:37', '10:08:49:37.00'),
+        ('8:9:37', '08:09:37.00'),
+        ('8:9', '08:09:00.00'),
+        ('8', '00:00:00.00'),
+
+        # Invalid formats
+        #('', None),
+        #('8', None),
+
+        ]
+
+    for text, reference in l:
+        try:
+            value = DateTimeDeltaFromString(text)
+        except:
+            if reference is None:
+                continue
+            else:
+                value = str(sys.exc_info()[1])
+        if str(value) != reference and \
+           not reference == 'ignore':
+            print 'Failed to parse "%s"' % text
+            print '  expected: %s' % (reference or '<exception>')
+            print '  parsed:   %s' % value
+        elif _debug:
+            print 'Parsed "%s" successfully' % text
+
+    et = DateTime.now()
+    print 'done. (after %f seconds)' % ((et-t).seconds)
+
+    ###
+
+    print 'Testing Time Parser...'
+
+    t = DateTime.now()
+    l = [
+
+        # Standard formats
+        ('08:49:37 AM', '08:49:37.00'),
+        ('08:49:37 PM', '20:49:37.00'),
+        ('12:00:00 AM', '00:00:00.00'),
+        ('12:00:00 PM', '12:00:00.00'),
+        ('8:09:37', '08:09:37.00'),
+        ('8:09', '08:09:00.00'),
+        
+        # ISO formats
+        ('08:49:37', '08:49:37.00'),
+        ('08:49', '08:49:00.00'),
+        ('08:49:37.5', '08:49:37.50'),
+        ('08:49:37,5', '08:49:37.50'),
+        ('08:09', '08:09:00.00'),
+
+        # Invalid formats
+        ('', None),
+        ('8:9:37', 'XXX Should give an exception'),
+        ('08:9:37', 'XXX Should give an exception'),
+        ('8:9', None),
+        ('8', None),
+
+        ]
+
+    for text, reference in l:
+        try:
+            value = TimeFromString(text, formats=('standard', 'iso'))
+        except:
+            if reference is None:
+                continue
+            else:
+                value = str(sys.exc_info()[1])
+        if str(value) != reference and \
+           not reference == 'ignore':
+            print 'Failed to parse "%s"' % text
+            print '  expected: %s' % (reference or '<exception>')
+            print '  parsed:   %s' % value
+        elif _debug:
+            print 'Parsed "%s" successfully' % text
+
+    et = DateTime.now()
+    print 'done. (after %f seconds)' % ((et-t).seconds)
+
+if __name__ == '__main__':
+    _test()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/Timezone.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,169 @@
+# -*- coding: latin-1 -*-
+
+""" Timezone information.
+
+    XXX This module still has prototype status and is undocumented.
+
+    XXX Double check the offsets given in the zonetable below.
+
+    XXX Add TZ environment variable parsing functions. The REs are already
+        there.
+
+    Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author. All Rights Reserved.
+
+"""
+import DateTime
+import re,string
+
+### REs
+
+# time zone parsing
+isozone = ('(?P<zone>[+-]\d\d:?(?:\d\d)?|Z)')
+zone = ('(?P<zone>[A-Z]+|[+-]\d\d?:?(?:\d\d)?)')
+zoneoffset = ('(?:'
+              '(?P<zonesign>[+-])?'
+              '(?P<hours>\d\d?)'
+              ':?'
+              '(?P<minutes>\d\d)?'
+              '(?P<extra>\d+)?'
+              ')'
+              )
+
+# TZ environment variable parsing
+dstswitchtime = ('(?P<hour>\d\d?):?'
+                 '(?P<minute>\d\d)?:?'
+                 '(?P<second>\d\d)?')
+dstswitch = ('(?:'
+              '(?P<doy>\d+)|'
+              '(?:J(?P<jdoy>\d+))|'
+              '(?:M(?P<month>\d+).(?P<week>\d+).(?P<day>\d+))'
+             ')'
+             '(?:/' + dstswitchtime + ')?'
+             )
+
+# XXX Doesn't work since re doesn't like multiple occurrences of
+#     group names.
+#tz = ('(?::(?P<filename>.+))|'
+#      '(?P<std>[A-Z]+)' + zoneoffset + 
+#      '(?:'
+#       '(?P<dst>[A-Z]+)' + zoneoffset + '?'+
+#       '(?:[;,]' + dstswitch + '[;,]' + dstswitch + ')'
+#      ')?'
+#      )
+
+# Compiled RE objects
+isozoneRE = re.compile(zone)
+zoneRE = re.compile(zone)
+zoneoffsetRE = re.compile(zoneoffset)
+#tzRE= re.compile(tz)
+
+### Time zone offset table
+#
+# The offset given here represent the difference between UTC and the
+# given time zone.
+#
+# Additions and corrections are always welcome :-)
+#
+# Note that some zone names are ambiguous, e.g. IST can refer to Irish
+# Summer Time, Indian Standard Time, Israel Standard Time. We've
+# usualy chosen meaning with the most wide-spread use.
+#
+zonetable = {
+              # Timezone abbreviations
+              # Std     Summer
+
+              # Standards
+              'UT':0,
+              'UTC':0,
+              'GMT':0,
+
+              # A few common timezone abbreviations
+              'CET':1,  'CEST':2, 'CETDST':2, # Central European
+              'MET':1,  'MEST':2, 'METDST':2, # Mean European
+              'MEZ':1,  'MESZ':2,             # Mitteleuropäische Zeit
+              'EET':2,  'EEST':3, 'EETDST':3, # Eastern Europe
+              'WET':0,  'WEST':1, 'WETDST':1, # Western Europe
+              'MSK':3,  'MSD':4,  # Moscow
+              'IST':5.5,          # India
+              'JST':9,            # Japan
+              'KST':9,            # Korea
+              'HKT':8,            # Hong Kong
+
+              # US time zones
+              'AST':-4, 'ADT':-3, # Atlantic
+              'EST':-5, 'EDT':-4, # Eastern
+              'CST':-6, 'CDT':-5, # Central
+              'MST':-7, 'MDT':-6, # Midwestern
+              'PST':-8, 'PDT':-7, # Pacific
+
+              # Australian time zones
+              'CAST':9.5, 'CADT':10.5, # Central
+              'EAST':10,  'EADT':11,   # Eastern
+              'WAST':8,   'WADT':9,    # Western
+              'SAST':9.5, 'SADT':10.5, # Southern
+
+              # US military time zones
+              'Z': 0,
+              'A': 1,
+              'B': 2,
+              'C': 3,
+              'D': 4,
+              'E': 5,
+              'F': 6,
+              'G': 7,
+              'H': 8,
+              'I': 9,
+              'K': 10,
+              'L': 11,
+              'M': 12,
+              'N':-1,
+              'O':-2,
+              'P':-3,
+              'Q':-4,
+              'R':-5,
+              'S':-6,
+              'T':-7,
+              'U':-8,
+              'V':-9,
+              'W':-10,
+              'X':-11,
+              'Y':-12
+              }    
+
+def utc_offset(zone,
+
+               atoi=string.atoi,zoneoffset=zoneoffsetRE,
+               zonetable=zonetable,zerooffset=DateTime.DateTimeDelta(0),
+               oneMinute=DateTime.oneMinute,upper=string.upper):
+
+    """ utc_offset(zonestring)
+
+        Return the UTC time zone offset as DateTimeDelta instance.
+
+        zone must be string and can either be given as +-HH:MM,
+        +-HHMM, +-HH numeric offset or as time zone
+        abbreviation. Daylight saving time must be encoded into the
+        zone offset.
+
+        Timezone abbreviations are treated case-insensitive.
+
+    """
+    if not zone:
+        return zerooffset
+    uzone = upper(zone)
+    if zonetable.has_key(uzone):
+        return zonetable[uzone]*DateTime.oneHour
+    offset = zoneoffset.match(zone)
+    if not offset:
+        raise ValueError,'wrong format or unknown time zone: "%s"' % zone
+    zonesign,hours,minutes,extra = offset.groups()
+    if extra:
+        raise ValueError,'illegal time zone offset: "%s"' % zone
+    offset = int(hours or 0) * 60 + int(minutes or 0)
+    if zonesign == '-':
+        offset = -offset
+    return offset*oneMinute
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+""" mxDateTime - Date and time handling routines and types
+
+    Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
+    See the documentation for further information on copyrights,
+    or contact the author. All Rights Reserved.
+"""
+from DateTime import *
+from DateTime import __version__
+
+## mock strptime implementation
+from datetime import datetime
+
+def strptime(datestr, formatstr, datetime=datetime):
+    """mocked strptime implementation"""
+    date = datetime.strptime(datestr, formatstr)
+    return DateTime(date.year, date.month, date.day,
+                    date.hour, date.minute, date.second)
+
+# don't expose datetime directly
+del datetime
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/embedded/mx/DateTime/mxDateTime_python.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,614 @@
+"""
+    Python implementation courtesy of Drew Csillag (StarMedia Network, Inc.)
+
+    This version has been somewhat modified by MAL. It is still fairly
+    rough though and not necessarily high performance... 
+
+    XXX Still needs testing and checkup !!!
+
+    WARNING: Using this file is only recommended if you really must
+    use it for some reason. It is not being actively maintained !
+
+"""
+
+__version__ = '1.2.0 [Python]'
+
+import time,types,exceptions,math
+
+### Errors
+
+class Error(exceptions.StandardError):
+    pass
+
+class RangeError(Error):
+    pass
+
+### Constants (internal use only)
+
+month_offset=(
+    (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365),
+    (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366),
+    )
+
+days_in_month=(
+    (31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31),
+    (31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31),
+    )
+
+### Helpers
+
+def _IS_LEAPYEAR(d):
+    return ((d.year % 4 == 0)
+            and (
+                (d.year % 100 != 0)
+                or (d.year % 400 == 0)
+                )
+            )
+
+def _YEAROFFSET(d):
+    return (
+        (d.year - 1) * 365
+        + (d.year - 1) / 4
+        - (d.year - 1) / 100
+        + (d.year - 1) / 400
+        )
+
+class _EmptyClass:
+    pass
+
+def createEmptyObject(Class,
+                      _EmptyClass=_EmptyClass):
+
+    o = _EmptyClass()
+    o.__class__ = Class
+    return o
+
+### DateTime class
+
+class DateTime:
+
+    def __init__(self, year, month=1, day=1, hour=0, minute=0, second=0.0):
+
+        second=1.0 * second
+        if month <= 0:
+            raise RangeError, "year out of range (>0)"
+
+        #calculate absolute date
+        leap = (year % 4 == 0) and ((year % 100 != 0) or (year % 400 == 0))
+
+        #Negative values indicate days relative to the years end
+        if month < 0:
+            month = month + 13 
+
+        if not (month >= 1 and month <= 12):
+            raise RangeError, "month out of range (1-12)"
+
+        #Negative values indicate days relative to the months end
+        if (day < 0):
+            day = day + days_in_month[leap][month - 1] + 1;
+
+        if not (day >= 1 and day <= days_in_month[leap][month - 1]):
+            raise RangeError, "day out of range"
+
+        year = year - 1
+        yearoffset = year * 365 + year / 4 - year / 100 + year / 400
+        year = year + 1
+        absdate = day + month_offset[leap][month - 1] + yearoffset;
+
+        self.absdate = absdate
+        self.year = year
+        self.month = month
+        self.day = day
+        self.day_of_week = (absdate - 1) % 7
+        self.day_of_year = absdate - yearoffset
+        self.days_in_month = days_in_month[leap][month - 1]
+        comdate = absdate - 693594
+
+        if not (hour >=0 and hour <= 23):
+            raise RangeError, "hour out of range (0-23)"
+        if not (minute >= 0 and minute <= 59):
+            raise RangeError, "minute out of range (0-59)"
+        if not (second >= 0.0 and
+                (second < 60.0 or 
+                 (hour == 23 and minute == 59 and second < 61.0))):
+            raise RangeError, "second out of range (0.0 - <60.0; <61.0 for 23:59)"
+
+        self.abstime = (hour * 3600 + minute * 60) + second
+        self.hour = hour
+        self.minute = minute
+        self.second = second
+        self.dst = -1
+        self.tz = "???"
+        self.is_leapyear = leap
+        self.yearoffset = yearoffset
+        self.iso_week = (self.year, self.day, self.day_of_week)
+
+        if comdate < 0.0:
+            comdate = comdate - self.abstime / 86400.0
+        else:
+            comdate = comdate + self.abstime / 86400.0
+
+        self.comdate = comdate
+
+    def COMDate(self):
+        return self.comdate
+    
+    def __str__(self):
+        return "%04d-%02d-%02d %02d:%02d:%05.2f" % (
+            self.year, self.month, self.day, self.hour, self.minute,
+            self.second)
+    
+    def __getattr__(self, attr):
+        if attr == 'mjd':
+            return (self - mjd0).days
+        elif attr == 'jdn':
+            return (self - jdn0).days
+        elif attr == 'tjd':
+            return (self - jdn0).days % 10000
+        elif attr == 'tjd_myriad':
+            return int((self - jdn0).days) / 10000 + 240
+        elif attr == 'absdays':
+            return self.absdate - 1 + self.abstime / 86400.0
+        else:
+            try:
+                return self.__dict__[attr]
+            except:
+                raise AttributeError, attr
+
+    def __mul__(self, other):
+        raise TypeError, "bad operand type(s) for *"
+
+    def __div__(self, other):
+        raise TypeError, "bad operand type(s) for /"
+    
+    def strftime(self, format_string="%c"):
+        "localtime([seconds]) -> (tm_year,tm_mon,tm_day,tm_hour,tm_min,tm_sec,tm_wday,tm_yday,tm_isdst)"
+        # The map prevents a deprecation warning on Python 2.5.1 (Mac)
+        # DeprecationWarning: integer argument expected, got float
+        items = [int(item) for item in self.tuple()]
+        return time.strftime(format_string, items)
+
+    # Alias
+    Format = strftime
+    
+    def tuple(self):
+        return (self.year, self.month, self.day, self.hour, self.minute,
+                self.second, self.day_of_week, self.day_of_year, -1)
+        #return time.localtime(self.ticks())
+
+    def absvalues(self):
+        return self.absdate, self.abstime
+    
+    def __float__(self):
+        return self.ticks()
+
+    def __int__(self):
+        return int(self.ticks)
+    
+    def ticks(self, offset=0.0, dst=-1):
+        tticks=time.mktime(self.year, self.month, self.day, self.hour,
+                           self.minute, self.second, self.day_of_week, 0, dst)
+        if tticks == -1:
+            raise OverflowError, "cannot convert value to a time value"
+        ticks = (1.0*tticks) + (self.abstime - int(self.abstime)) - offset
+        return ticks
+
+    def gmticks(self, offset=0.0):
+        from mx.DateTime import tz_offset
+        return (self-tz_offset(self)).ticks()
+
+    def gmtoffset(self):
+        gmtime = DateTime(*time.gmtime()[:6])
+        return - (now() - gmtime)
+    
+    def __repr__(self):
+        return "<DateTime object for '%d-%02d-%02d %02d:%02d:%05.2f' at %x>"% (
+            self.year, self.month, self.day, self.hour, self.minute,
+            self.second, id(self))
+
+    def __cmp__(self, other,
+                cmp=cmp):
+
+        if isinstance(other,DateTime):
+            cmpdate = cmp(self.absdate,other.absdate)
+            if cmpdate == 0:
+                return cmp(self.abstime,other.abstime)
+            else:
+                return cmpdate
+        elif type(other) == types.NoneType:
+            return -1
+        elif type(other) == types.StringType:
+            return -1
+        elif type(other) in (types.FloatType, types.LongType, types.IntType):
+            return 1
+        return -1
+
+    def __hash__(self):
+        return hash(self.tuple())
+    
+    def __add__(self, other):
+        abstime=self.abstime
+        absdate=self.absdate
+
+        didadd=0
+        
+        if type(other) == types.InstanceType:
+            if other.__class__ == DateTimeDelta:
+                abstime = abstime + other.seconds
+                didadd=1
+            elif other.__class__ == DateTime:
+                raise TypeError, "DateTime + DateTime is not supported"
+            else:
+                return other.__class__.__radd__(other, self)
+            
+        elif type(other) == types.IntType or type(other) == types.FloatType:
+            abstime = abstime + other * 86400.0
+            didadd=1
+
+        if not didadd:
+            raise TypeError, "cannot add these two types"
+
+        if abstime >= 86400.0:
+            days = abstime / 86400.0
+            absdate = absdate + days
+            abstime = abstime - (86400.0 * int(days))
+            #print "absdate, abstime = ", absdate, abstime
+        elif abstime < 0.0:
+            days = int(((-abstime - 1) / 86400.0)) + 1
+            #days = int(-abstime / 86400.0)
+            absdate = absdate - days
+            abstime = abstime + 86400.0 * int(days)
+
+        if absdate < 1:
+            raise RangeError, "underflow while adding"
+
+        return DateTimeFromAbsDateTime(absdate, abstime)
+
+    def __radd__(self, other):
+        return DateTime.__add__(other, self)
+    
+    def __sub__(self, other):
+        abstime=self.abstime
+        absdate=self.absdate
+
+        didsub=0
+        if type(other) == types.InstanceType:
+            if other.__class__ == DateTimeDelta:
+                abstime = abstime - other.seconds
+                didsub = 1
+            elif other.__class__ == DateTime:
+                absdate = absdate - other.absdate
+                abstime = abstime - other.abstime
+                return DateTimeDelta(absdate,0.0,0.0,abstime)
+            else:
+                return other.__rsub__(self)
+
+        elif type(other) == types.IntType or type(other) == types.FloatType:
+            abstime = abstime - other * 86400.0;
+            didsub=1
+
+        if not didsub:
+            raise TypeError, "cannot subtract these two types"
+
+        if abstime >= 86400.0:
+            days = abstime / 86400.0
+            absdate = absdate + days
+            abstime = abstime - (86400.0 * days)
+            #print "absdate, abstime = ", absdate, abstime
+        elif abstime < 0.0:
+            #print "abstime < 0"
+            days = int( ((-abstime - 1) / 86400.0) + 1)
+            #days = -abstime / 86400.0
+            absdate = absdate - int(days)
+            abstime = (1.0*abstime) + (86400.0 * days)
+            #print "absdate, abstime", absdate, abstime
+        if absdate < 1:
+            raise RangeError, "underflow while adding"
+
+        return DateTimeFromAbsDateTime(absdate, abstime)
+
+# Constants
+mjd0 = DateTime(1858, 11, 17)
+jdn0 = DateTime(-4713, 1, 1, 12, 0, 0.0)
+
+# Other DateTime constructors
+
+def DateTimeFromCOMDate(comdate):
+
+    absdate = int(comdate)
+    abstime = (comdate - float(absdate)) * 86400.0
+    if abstime < 0.0:
+        abstime = -abstime
+    absdate = absdate + 693594;
+    dt = DateTimeFromAbsDateTime(absdate, abstime)
+    dt.comdate = comdate
+    return dt
+    
+def DateTimeFromAbsDateTime(absdate, abstime):
+
+    # Create the object without calling its default constructor
+    dt = createEmptyObject(DateTime)
+
+    # Init. the object
+    abstime=1.0 * abstime
+    if abstime < 0 and abstime > -0.001: abstime = 0.0
+    if not (absdate > 0):
+        raise RangeError, "absdate out of range (>0)"
+    if not (abstime >= 0.0 and abstime <= 86400.0):
+        raise RangeError, "abstime out of range (0.0 - 86400.0) <%s>" % abstime
+
+    dt.absdate=absdate
+    dt.abstime=abstime
+
+    #calculate com date
+    comdate = 1.0 * (dt.absdate - 693594)
+    if comdate < 0.0:
+        comdate = comdate - dt.abstime / 86400.0
+    else:
+        comdate = comdate + dt.abstime / 86400.0
+    dt.comdate = comdate
+
+    #calculate the date
+    #print "absdate=", absdate
+    year = int((1.0 * absdate) / 365.2425)
+
+    #newApproximation:
+    while 1:
+        #print "year=", year
+        yearoffset = year * 365 + year / 4 - year / 100 + year / 400
+        #print "yearoffset=", yearoffset
+        #print "absdate=", absdate
+        if yearoffset >= absdate:
+            year = year - 1
+            #print "year = ", year
+            continue #goto newApproximation
+
+        year = year + 1
+        leap = (year % 4 == 0) and ((year % 100 != 0) or (year % 400 == 0))
+        dayoffset = absdate - yearoffset
+        #print "dayoffset=", dayoffset
+        if dayoffset > 365 and leap == 0:
+            #print "dayoffset=", dayoffset
+            continue #goto newApproximation
+
+        monthoffset = month_offset[leap]
+        for month in range(1, 13):
+            if monthoffset[month] >= dayoffset:
+                break
+        dt.year = year
+        dt.month = month
+        dt.day = dayoffset - month_offset[leap][month-1]
+        dt.day_of_week = (dt.absdate - 1) % 7
+        dt.day_of_year = dayoffset
+        break
+    
+    #calculate the time
+    inttime = int(abstime)
+    hour = inttime / 3600
+    minute = (inttime % 3600) / 60
+    second = abstime - 1.0 * (hour*3600 + minute*60)
+    dt.hour = hour;
+    dt.minute = minute;
+    dt.second = second;
+    dt.days_in_month = days_in_month[leap][month - 1]
+    dt.dst = -1
+    dt.tz = "???"
+    dt.is_leapyear = leap
+    dt.yearoffset = yearoffset
+    return dt
+
+def now(
+        time=time.time,float=float,localtime=time.localtime,
+        round=round,int=int,DateTime=DateTime,floor=math.floor):
+    ticks = time()
+    Y,M,D,h,m,s = localtime(ticks)[:6]
+    s = s + (ticks - floor(ticks))
+    return DateTime(Y,M,D,h,m,s)
+
+def utc(
+        time=time.time,float=float,gmtime=time.gmtime,
+        round=round,int=int,DateTime=DateTime,floor=math.floor):
+
+    ticks = time()
+    Y,M,D,h,m,s = gmtime(ticks)[:6]
+    s = s + (ticks - floor(ticks))
+    return DateTime(Y,M,D,h,m,s)
+
+# Aliases
+Date = Timestamp = DateTime
+
+# XXX Calendars are not supported:
+def notSupported(*args,**kws):
+    raise Error,'calendars are not supported by the Python version of mxDateTime'
+JulianDateTime = notSupported
+
+### DateTimeDelta class
+               
+class DateTimeDelta:
+
+    def __init__(self, days=0, hours=0, minutes=0, seconds=0):
+
+        seconds = seconds + (days * 86400.0 + hours * 3600.0 + minutes * 60.0)
+        self.seconds = seconds
+        if seconds < 0.0:
+            seconds = -seconds
+        day = long(seconds / 86400.0)
+        seconds = seconds - (86400.0 * day)
+        wholeseconds = int(seconds)
+        hour = wholeseconds / 3600
+        minute = (wholeseconds % 3600) / 60
+        second = seconds - (hour * 3600.0 + minute * 60.0)
+        self.day = day
+        self.hour = hour
+        self.minute = minute
+        self.second = second
+        seconds=self.seconds
+        self.minutes = seconds / 60.0
+        self.hours = seconds / 3600.0
+        self.days = seconds / 86400.0
+
+    def __str__(self):
+        if self.day != 0:
+            if self.seconds >= 0.0:
+                r="%s:%02d:%02d:%05.2f" % (
+                    self.day, self.hour, self.minute, self.second)
+            else:
+                r="-%s:%02d:%02d:%05.2f" % (
+                    self.day, self.hour, self.minute, self.second)
+        else:
+            if self.seconds >= 0.0:
+                r="%02d:%02d:%05.2f" % (self.hour, self.minute, self.second)
+            else:
+                r="-%02d:%02d:%05.2f" % (self.hour, self.minute, self.second)
+        return r
+            
+    def absvalues(self):
+        days=self.seconds / 86400
+        seconds=self.seconds - (days * 86400.0)
+        return days, seconds
+
+    def tuple(self):
+        return (self.day, self.hour, self.minute, self.second)
+
+    def strftime(self, format_string):
+        raise NotImplementedError
+    
+    def __int__(self):
+        return int(self.seconds)
+
+    def __float__(self):
+        return self.seconds
+    
+    def __cmp__(self, other, accuracy=0.0):
+        if (type(other) == types.InstanceType
+            and other.__class__ == DateTimeDelta):
+
+            diff=self.seconds - other.seconds
+            if abs(diff) > accuracy:
+                if diff > 0: return 1
+                return -1
+            
+        elif type(other) == types.FloatType:
+            diff=self.seconds - other
+            if abs(diff) > accuracy:
+                if diff > 0: return 1
+                return -1
+            
+        elif type(other) == types.IntType:
+            diff=self.seconds - other
+            if abs(diff) > accuracy:
+                if diff > 0: return 1
+                return -1
+            
+        return 0
+    
+    def __getattr__(self, attr):
+        seconds=self.__dict__['seconds']
+        if attr in ('hour', 'minute', 'second', 'day'):
+            if seconds >= 0.0:
+                return self.__dict__[attr]
+            else:
+                return -self.__dict__[attr]
+        else:
+            try:
+                return self.__dict__[attr]
+            except:
+                raise AttributeError, attr
+
+    def __div__(self, other):
+        if type(other) in (types.IntType, types.FloatType):
+            return DateTimeDelta(0.0,0.0,0.0,self.seconds / other)
+        elif (type(other) == types.InstanceType
+              and isinstance(other,DateTimeDelta)):
+            return DateTimeDelta(0.0,0.0,0.0,self.seconds / other.seconds)
+        raise TypeError, "bad operand types for /"
+    
+    def __mul__(self, other):
+        if type(other) == types.IntType or type(other) == types.FloatType:
+            return DateTimeDelta(0.0,0.0,0.0,self.seconds * other)
+        else:
+            #print "type", type(other)
+            raise TypeError, "cannot multiply these two types"
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+    
+    def __neg__(self):
+        return DateTimeDelta(0.0,0.0,0.0,-self.seconds)
+        
+    def __repr__(self):
+        if self.day != 0:
+            if self.seconds >= 0.0:
+                strval="%s:%02d:%02d:%05.2f" % (self.day, self.hour,
+                                                 self.minute, self.second)
+            else:
+                strval="-%s:%02d:%02d:%05.2f" % (self.day, self.hour,
+                                                  self.minute, self.second)
+        else:
+            if self.seconds >= 0.0:
+                strval="%02d:%02d:%05.2f" % (self.hour, self.minute,
+                                            self.second)
+            else:
+                strval="-%02d:%02d:%05.2f" % (self.hour, self.minute,
+                                             self.second)
+        return "<DateTimeDelta object for '%s' at %x>" % (strval, id(self))
+    
+    def __abs__(self):
+        if self.seconds < 0:
+            return -self
+        return self
+
+    def __nonzero__(self):
+        return self.seconds != 0.0
+    
+    def __add__(self, other):
+        if type(other) == types.InstanceType:
+            if isinstance(other,DateTime):
+                return other + self
+            elif isinstance(other,DateTimeDelta):
+                return DateTimeDelta(0.0,0.0,0.0,self.seconds + other.seconds)
+
+    # What about __radd__ ?
+        
+# Other DateTimeDelta constructors
+
+def TimeDelta(hour=0.0, minute=0.0, second=0.0):
+    return DateTimeDelta(0.0, hours, minutes, seconds)
+
+Time=TimeDelta
+
+def DateTimeDeltaFromSeconds(seconds):
+    return DateTimeDelta(0.0,0.0,0.0,seconds)
+
+def DateTimeDeltaFromDays(days):
+    return DateTimeDelta(days)
+
+### Types
+
+DateTimeType = DateTime
+DateTimeDeltaType = DateTimeDelta
+
+### Functions
+
+def cmp(a,b,acc):
+
+    if isinstance(a,DateTime) and isinstance(b,DateTime):
+        diff = a.absdays - b.absdays
+        if (diff >= 0 and diff <= acc) or (diff < 0 and -diff <= acc):
+            return 0
+        elif diff < 0:
+            return 1
+        else:
+            return -1
+
+    elif isinstance(a,DateTimeDelta) and isinstance(b,DateTimeDelta):
+        diff = a.days - b.days
+        if (diff >= 0 and diff <= acc) or (diff < 0 and -diff <= acc):
+            return 0
+        elif diff < 0:
+            return 1
+        else:
+            return -1
+
+    else:
+        raise TypeError,"objects must be DateTime[Delta] instances"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,432 @@
+"""base application's entities class implementation: `AnyEntity`
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from logilab.common.deprecation import deprecated_function
+from logilab.common.decorators import cached
+
+from cubicweb import Unauthorized, typed_eid
+from cubicweb.common.utils import dump_class
+from cubicweb.common.entity import Entity
+from cubicweb.schema import FormatConstraint
+
+from cubicweb.interfaces import IBreadCrumbs
+
+class AnyEntity(Entity):
+    """an entity instance has e_schema automagically set on the class and
+    instances have access to their issuing cursor
+    """
+    id = 'Any'   
+    __rtags__ = {
+        'is' : ('generated', 'link'),
+        'is_instance_of' : ('generated', 'link'),
+        'identity' : ('generated', 'link'),
+        
+        # use primary and not generated for eid since it has to be an hidden
+        # field in edition
+        ('eid',                '*', 'subject'): 'primary',
+        ('creation_date',      '*', 'subject'): 'generated',
+        ('modification_date',  '*', 'subject'): 'generated',
+        ('has_text',           '*', 'subject'): 'generated',
+        
+        ('require_permission', '*', 'subject') : ('generated', 'link'),
+        ('owned_by',           '*', 'subject') : ('generated', 'link'),
+        ('created_by',         '*', 'subject') : ('generated', 'link'),
+        
+        ('wf_info_for',        '*', 'subject') : ('generated', 'link'),
+        ('wf_info_for',        '*', 'object')  : ('generated', 'link'),
+                 
+        ('description',        '*', 'subject'): 'secondary',
+
+        # XXX should be moved in their respective cubes
+        ('filed_under',        '*', 'subject') : ('generic', 'link'),
+        ('filed_under',        '*', 'object')  : ('generic', 'create'),
+        # generated since there is a componant to handle comments
+        ('comments',           '*', 'subject') : ('generated', 'link'),
+        ('comments',           '*', 'object')  : ('generated', 'link'),
+        }
+
+    __implements__ = (IBreadCrumbs,)
+    
+    @classmethod
+    def selected(cls, etype):
+        """the special Any entity is used as the default factory, so
+        the actual class has to be constructed at selection time once we
+        have an actual entity'type
+        """
+        if cls.id == etype:
+            return cls
+        usercls = dump_class(cls, etype)
+        usercls.id = etype
+        usercls.__initialize__()
+        return usercls
+    
+    fetch_attrs = ('modification_date',)
+    @classmethod
+    def fetch_order(cls, attr, var):
+        """class method used to control sort order when multiple entities of
+        this type are fetched
+        """
+        return cls.fetch_unrelated_order(attr, var)
+    
+    @classmethod
+    def fetch_unrelated_order(cls, attr, var):
+        """class method used to control sort order when multiple entities of
+        this type are fetched to use in edition (eg propose them to create a
+        new relation on an edited entity).
+        """
+        if attr == 'modification_date':
+            return '%s DESC' % var
+        return None
+
+    @classmethod
+    def __initialize__(cls): 
+        super(ANYENTITY, cls).__initialize__() # XXX
+        eschema = cls.e_schema
+        eschema.format_fields = {}
+        # set a default_ATTR method for rich text format fields
+        for attr, formatattr in eschema.rich_text_fields():
+            if not hasattr(cls, 'default_%s' % formatattr):
+                setattr(cls, 'default_%s' % formatattr, cls._default_format)
+            eschema.format_fields[formatattr] = attr
+            
+    def _default_format(self):
+        return self.req.property_value('ui.default-text-format')
+
+    def use_fckeditor(self, attr):
+        """return True if fckeditor should be used to edit entity's attribute named
+        `attr`, according to user preferences
+        """
+        req = self.req
+        if req.property_value('ui.fckeditor') and self.has_format(attr):
+            if self.has_eid() or '%s_format' % attr in self:
+                return self.format(attr) == 'text/html'
+            return req.property_value('ui.default-text-format') == 'text/html'
+        return False
+    
+    # meta data api ###########################################################
+
+    def dc_title(self):
+        """return a suitable *unicode* title for this entity"""
+        for rschema, attrschema in self.e_schema.attribute_definitions():
+            if rschema.meta:
+                continue
+            value = self.get_value(rschema.type)
+            if value:
+                # make the value printable (dates, floats, bytes, etc.)
+                return self.printable_value(rschema.type, value, attrschema.type,
+                                            format='text/plain')
+        return u'%s #%s' % (self.dc_type(), self.eid)
+
+    def dc_long_title(self):
+        """return a more detailled title for this entity"""
+        return self.dc_title()
+    
+    def dc_description(self, format='text/plain'):
+        """return a suitable description for this entity"""
+        if hasattr(self, 'description'):
+            return self.printable_value('description', format=format)
+        return u''
+
+    def dc_authors(self):
+        """return a suitable description for the author(s) of the entity"""
+        try:
+            return ', '.join(u.name() for u in self.owned_by)
+        except Unauthorized:
+            return u''
+
+    def dc_creator(self):
+        """return a suitable description for the creator of the entity"""
+        if self.creator:
+            return self.creator.name()
+        return u''
+
+    def dc_date(self, date_format=None):# XXX default to ISO 8601 ?
+        """return latest modification date of this entity"""
+        return self.format_date(self.modification_date, date_format=date_format)
+
+    def dc_type(self, form=''):
+        """return the display name for the type of this entity (translated)"""
+        return self.e_schema.display_name(self.req, form)
+    display_name = deprecated_function(dc_type) # require agueol > 0.8.1, asteretud > 0.10.0 for removal
+
+    def dc_language(self):
+        """return language used by this entity (translated)"""
+        # check if entities has internationalizable attributes
+        # XXX one is enough or check if all String attributes are internationalizable?
+        for rschema, attrschema in self.e_schema.attribute_definitions():
+            if rschema.rproperty(self.e_schema, attrschema,
+                                 'internationalizable'):
+                return self.req._(self.req.user.property_value('ui.language'))
+        return self.req._(self.vreg.property_value('ui.language'))
+        
+    @property
+    def creator(self):
+        """return the EUser entity which has created this entity, or None if
+        unknown or if the curent user doesn't has access to this euser
+        """
+        try:
+            return self.created_by[0]
+        except (Unauthorized, IndexError):
+            return None
+
+    def breadcrumbs(self, view=None, recurs=False):
+        path = [self]
+        if hasattr(self, 'parent'):
+            parent = self.parent()
+            if parent is not None:
+                try:
+                    path = parent.breadcrumbs(view, True) + [self]
+                except TypeError:
+                    warn("breadcrumbs method's now takes two arguments "
+                         "(view=None, recurs=False), please update",
+                         DeprecationWarning)
+                    path = parent.breadcrumbs(view) + [self]
+        if not recurs:
+            if view is None:
+                if 'vtitle' in self.req.form:
+                    # embeding for instance
+                    path.append( self.req.form['vtitle'] )
+            elif view.id != 'primary' and hasattr(view, 'title'):
+                path.append( self.req._(view.title) )
+        return path
+
+    # abstractions making the whole things (well, some at least) working ######
+    
+    @classmethod
+    def get_widget(cls, rschema, x='subject'):
+        """return a widget to view or edit a relation
+
+        notice that when the relation support multiple target types, the widget
+        is necessarily the same for all those types
+        """
+        # let ImportError propage if web par isn't available
+        from cubicweb.web.widgets import widget
+        if isinstance(rschema, basestring):
+            rschema = cls.schema.rschema(rschema)
+        if x == 'subject':
+            tschema = rschema.objects(cls.e_schema)[0]
+            wdg = widget(cls.vreg, cls, rschema, tschema, 'subject')
+        else:
+            tschema = rschema.subjects(cls.e_schema)[0]
+            wdg = widget(cls.vreg, tschema, rschema, cls, 'object')
+        return wdg
+        
+    def sortvalue(self, rtype=None):
+        """return a value which can be used to sort this entity or given
+        entity's attribute
+        """
+        if rtype is None:
+            return self.dc_title().lower()
+        value = self.get_value(rtype)
+        # do not restrict to `unicode` because Bytes will return a `str` value
+        if isinstance(value, basestring):
+            return self.printable_value(rtype, format='text/plain').lower()
+        return value
+
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        return str(self.e_schema).lower(), {}
+
+    def add_related_schemas(self):
+        """this is actually used ui method to generate 'addrelated' actions from
+        the schema.
+
+        If you're using explicit 'addrelated' actions for an entity types, you
+        should probably overrides this method to return an empty list else you
+        may get some unexpected actions.
+        """
+        req = self.req
+        eschema = self.e_schema
+        for role, rschemas in (('subject', eschema.subject_relations()),
+                               ('object', eschema.object_relations())):
+            for rschema in rschemas:
+                if rschema.is_final():
+                    continue
+                # check the relation can be added as well
+                if role == 'subject'and not rschema.has_perm(req, 'add', fromeid=self.eid):
+                    continue
+                if role == 'object'and not rschema.has_perm(req, 'add', toeid=self.eid):
+                    continue
+                # check the target types can be added as well
+                for teschema in rschema.targets(eschema, role):
+                    if not self.relation_mode(rschema, teschema, role) == 'create':
+                        continue
+                    if teschema.has_local_role('add') or teschema.has_perm(req, 'add'):
+                        yield rschema, teschema, role
+
+    def relation_mode(self, rtype, targettype, role='subject'):
+        """return a string telling if the given relation is usually created
+        to a new entity ('create' mode) or to an existant entity ('link' mode)
+        """
+        return self.rtags.get_mode(rtype, targettype, role)
+
+    # edition helper functions ################################################
+    
+    def relations_by_category(self, categories=None, permission=None):
+        if categories is not None:
+            if not isinstance(categories, (list, tuple, set, frozenset)):
+                categories = (categories,)
+            if not isinstance(categories, (set, frozenset)):
+                categories = frozenset(categories)
+        eschema, rtags  = self.e_schema, self.rtags
+        if self.has_eid():
+            eid = self.eid
+        else:
+            eid = None
+        for rschema, targetschemas, role in eschema.relation_definitions(True):
+            if rschema in ('identity', 'has_text'):
+                continue
+            # check category first, potentially lower cost than checking
+            # permission which may imply rql queries
+            if categories is not None:
+                targetschemas = [tschema for tschema in targetschemas
+                                 if rtags.get_tags(rschema.type, tschema.type, role).intersection(categories)]
+                if not targetschemas:
+                    continue
+            tags = rtags.get_tags(rschema.type, role=role)
+            if permission is not None:
+                # tag allowing to hijack the permission machinery when
+                # permission is not verifiable until the entity is actually
+                # created...
+                if eid is None and ('%s_on_new' % permission) in tags:
+                    yield (rschema, targetschemas, role)
+                    continue
+                if rschema.is_final():
+                    if not rschema.has_perm(self.req, permission, eid):
+                        continue
+                elif role == 'subject':
+                    if not ((eid is None and rschema.has_local_role(permission)) or
+                            rschema.has_perm(self.req, permission, fromeid=eid)):
+                        continue
+                    # on relation with cardinality 1 or ?, we need delete perm as well
+                    # if the relation is already set
+                    if (permission == 'add'
+                        and rschema.cardinality(eschema, targetschemas[0], role) in '1?'
+                        and self.has_eid() and self.related(rschema.type, role)
+                        and not rschema.has_perm(self.req, 'delete', fromeid=eid,
+                                                 toeid=self.related(rschema.type, role)[0][0])):
+                        continue
+                elif role == 'object':
+                    if not ((eid is None and rschema.has_local_role(permission)) or
+                            rschema.has_perm(self.req, permission, toeid=eid)):
+                        continue
+                    # on relation with cardinality 1 or ?, we need delete perm as well
+                    # if the relation is already set
+                    if (permission == 'add'
+                        and rschema.cardinality(targetschemas[0], eschema, role) in '1?'
+                        and self.has_eid() and self.related(rschema.type, role)
+                        and not rschema.has_perm(self.req, 'delete', toeid=eid,
+                                                 fromeid=self.related(rschema.type, role)[0][0])):
+                        continue
+            yield (rschema, targetschemas, role)
+
+    def srelations_by_category(self, categories=None, permission=None):
+        result = []
+        for rschema, ttypes, target in self.relations_by_category(categories,
+                                                                  permission):
+            if rschema.is_final():
+                continue
+            result.append( (rschema.display_name(self.req, target), rschema, target) )
+        return sorted(result)
+                
+    def attribute_values(self, attrname):
+        if self.has_eid() or attrname in self:
+            try:
+                values = self[attrname]
+            except KeyError:
+                values = getattr(self, attrname)
+            # actual relation return a list of entities
+            if isinstance(values, list):
+                return [v.eid for v in values]
+            return (values,)
+        # the entity is being created, try to find default value for
+        # this attribute
+        try:
+            values = self.req.form[attrname]
+        except KeyError:
+            try:
+                values = self[attrname] # copying
+            except KeyError:
+                values = getattr(self, 'default_%s' % attrname,
+                                 self.e_schema.default(attrname))
+                if callable(values):
+                    values = values()
+        if values is None:
+            values = ()
+        elif not isinstance(values, (list, tuple)):
+            values = (values,)
+        return values
+
+    def linked_to(self, rtype, target, remove=True):
+        """if entity should be linked to another using __linkto form param for
+        the given relation/target, return eids of related entities
+
+        This method is consuming matching link-to information from form params
+        if `remove` is True (by default).
+        """
+        try:
+            return self.__linkto[(rtype, target)]
+        except AttributeError:
+            self.__linkto = {}
+        except KeyError:
+            pass
+        linktos = list(self.req.list_form_param('__linkto'))
+        linkedto = []
+        for linkto in linktos[:]:
+            ltrtype, eid, lttarget = linkto.split(':')
+            if rtype == ltrtype and target == lttarget:
+                # delete __linkto from form param to avoid it being added as
+                # hidden input
+                if remove:
+                    linktos.remove(linkto)
+                    self.req.form['__linkto'] = linktos
+                linkedto.append(typed_eid(eid))
+        self.__linkto[(rtype, target)] = linkedto
+        return linkedto
+
+    def pre_web_edit(self):
+        """callback called by the web editcontroller when an entity will be
+        created/modified, to let a chance to do some entity specific stuff.
+
+        Do nothing by default.
+        """
+        pass
+    
+    # server side helpers #####################################################
+    
+    def notification_references(self, view):
+        """used to control References field of email send on notification
+        for this entity. `view` is the notification view.
+        
+        Should return a list of eids which can be used to generate message ids
+        of previously sent email
+        """
+        return ()
+
+# XXX:  store a reference to the AnyEntity class since it is hijacked in goa
+#       configuration and we need the actual reference to avoid infinite loops
+#       in mro
+ANYENTITY = AnyEntity
+
+def fetch_config(fetchattrs, mainattr=None, pclass=AnyEntity, order='ASC'):
+    if pclass is ANYENTITY:
+        pclass = AnyEntity # AnyEntity and ANYENTITY may be different classes
+    if pclass is not None:
+        fetchattrs += pclass.fetch_attrs
+    if mainattr is None:
+        mainattr = fetchattrs[0]
+    @classmethod
+    def fetch_order(cls, attr, var):
+        if attr == mainattr:
+            return '%s %s' % (var, order)
+        return None
+    return fetchattrs, fetch_order
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/authobjs.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,139 @@
+from logilab.common.decorators import cached
+
+from cubicweb import Unauthorized
+from cubicweb.entities import AnyEntity, fetch_config
+
+class EGroup(AnyEntity):
+    id = 'EGroup'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+    __rtags__ = dict(in_group='create')
+
+    def db_key_name(self):
+        """XXX goa specific"""
+        return self.get('name')
+
+    
+class EUser(AnyEntity):
+    id = 'EUser'
+    fetch_attrs, fetch_order = fetch_config(['login', 'firstname', 'surname'])
+    
+    __rtags__ = { 'firstname'  : 'secondary',
+                  'surname'    : 'secondary',
+                  'last_login_time' : 'generated',
+                  'todo_by'    : 'create',
+                  'use_email'  : 'inlineview', # 'primary',
+                  'in_state'   : 'primary', 
+                  'in_group'   : 'primary', 
+                  ('owned_by', '*', 'object') : ('generated', 'link'),
+                  ('created_by','*','object') : ('generated', 'link'),
+                  }
+    
+    # used by repository to check if  the user can log in or not
+    AUTHENTICABLE_STATES = ('activated',)
+
+    # low level utilities #####################################################
+    def __init__(self, *args, **kwargs):
+        groups = kwargs.pop('groups', None)
+        properties = kwargs.pop('properties', None)
+        super(EUser, self).__init__(*args, **kwargs)
+        if groups is not None:
+            self._groups = groups
+        if properties is not None:
+            self._properties = properties
+            
+    @property
+    def groups(self):
+        try:
+            return self._groups
+        except AttributeError:
+            self._groups = set(g.name for g in self.in_group)
+            return self._groups
+        
+    @property
+    def properties(self):
+        try:
+            return self._properties
+        except AttributeError:
+            self._properties = dict((p.pkey, p.value) for p in self.reverse_for_user)
+            return self._properties
+
+    def property_value(self, key):
+        try:
+            # properties stored on the user aren't correctly typed
+            # (e.g. all values are unicode string)
+            return self.vreg.typed_value(key, self.properties[key])
+        except KeyError:
+            pass
+        except ValueError:
+            self.warning('incorrect value for eproperty %s of user %s', key, self.login)
+        return self.vreg.property_value(key)
+    
+    def matching_groups(self, groups):
+        """return the number of the given group(s) in which the user is
+
+        :type groups: str or iterable(str)
+        :param groups: a group name or an iterable on group names
+        """
+        if isinstance(groups, basestring):
+            groups = frozenset((groups,))
+        elif isinstance(groups, (tuple, list)):
+            groups = frozenset(groups)
+        return len(groups & self.groups)
+
+    def is_in_group(self, group):
+        """convience / shortcut method to test if the user belongs to `group`
+        """
+        return self.matching_groups(group) == 1
+
+    def owns(self, eid):
+        if hasattr(self.req, 'unsafe_execute'):
+            # use unsafe_execute on the repository side, in case
+            # session's user doesn't have access to EUser
+            execute = self.req.unsafe_execute
+        else:
+            execute = self.req.execute
+        try:
+            return execute('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
+                           {'x': eid, 'u': self.eid}, 'x')
+        except Unauthorized:
+            return False
+    owns = cached(owns, keyarg=1)
+
+    def has_permission(self, pname, contexteid=None):
+        rql = 'Any P WHERE P is EPermission, U eid %(u)s, U in_group G, '\
+              'P name %(pname)s, P require_group G'
+        kwargs = {'pname': pname, 'u': self.eid}
+        cachekey = None
+        if contexteid is not None:
+            rql += ', X require_permission P, X eid %(x)s'
+            kwargs['x'] = contexteid
+            cachekey = 'x'
+        try:
+            return self.req.execute(rql, kwargs, cachekey)
+        except Unauthorized:
+            return False
+    
+    # presentation utilities ##################################################
+    
+    def name(self):
+        """construct a name using firstname / surname or login if not defined"""
+        
+        if self.firstname and self.surname:
+            return self.req._('%(firstname)s %(surname)s') % {
+                'firstname': self.firstname, 'surname' : self.surname}
+        if self.firstname:
+            return self.firstname
+        return self.login
+
+    def dc_title(self):
+        return self.login
+
+    dc_long_title = name
+
+    def db_key_name(self):
+        """XXX goa specific"""
+        return self.get('login')
+
+from logilab.common.deprecation import class_renamed
+Euser = class_renamed('Euser', EUser)
+Euser.id = 'Euser'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/lib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,166 @@
+"""entity classes for optional library entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from urlparse import urlsplit, urlunsplit
+from mx.DateTime import now
+
+from logilab.common.decorators import cached
+
+from cubicweb.common.entity import _marker
+from cubicweb.entities import AnyEntity, fetch_config
+
+def mangle_email(address):
+    try:
+        name, host = address.split('@', 1)
+    except ValueError:
+        return address
+    return '%s at %s' % (name, host.replace('.', ' dot '))
+
+class EmailAddress(AnyEntity):
+    id = 'EmailAddress'
+    fetch_attrs, fetch_order = fetch_config(['address', 'alias', 'canonical'])
+
+    widgets = {
+        'address' : "EmailWidget",
+        }
+
+    def dc_title(self):
+        if self.alias:
+            return '%s <%s>' % (self.alias, self.display_address())
+        return self.display_address()
+    
+    @property
+    def email_of(self):
+        return self.reverse_use_email and self.reverse_use_email[0]
+    
+    @cached
+    def canonical_form(self):
+        if self.canonical:
+            return self
+        rql = 'EmailAddress X WHERE X identical_to Y, X canonical TRUE, Y eid %(y)s'
+        cnrset = self.req.execute(rql, {'y': self.eid}, 'y')
+        if cnrset:
+            return cnrset.get_entity(0, 0)
+        return None
+
+    def related_emails(self, skipeids=None):
+        # XXX move to eemail
+        # check email relations are in the schema first
+        subjrels = self.e_schema.object_relations()
+        if not ('sender' in subjrels and 'recipients' in subjrels):
+            return
+        rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s'
+        rset = self.req.execute(rql, {'y': self.eid}, 'y')
+        if skipeids is None:
+            skipeids = set()
+        for i in xrange(len(rset)):
+            eid = rset[i][0]
+            if eid in skipeids:
+                continue
+            skipeids.add(eid)
+            yield rset.get_entity(i, 0)
+
+    def display_address(self):
+        if self.vreg.config['mangle-emails']:
+            return mangle_email(self.address)
+        return self.address
+
+    def printable_value(self, attr, value=_marker, attrtype=None,
+                        format='text/html'):
+        """overriden to return displayable address when necessary"""
+        if attr == 'address':
+            return self.display_address()
+        return super(EmailAddress, self).printable_value(attr, value, attrtype, format)
+
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.email_of:
+            return self.email_of.rest_path(), {}
+        return super(EmailAddress, self).after_deletion_path()
+
+
+from logilab.common.deprecation import class_renamed
+Emailaddress = class_renamed('Emailaddress', EmailAddress)
+Emailaddress.id = 'Emailaddress'
+
+
+class EProperty(AnyEntity):
+    id = 'EProperty'
+
+    fetch_attrs, fetch_order = fetch_config(['pkey', 'value'])
+
+    widgets = {
+        'pkey' : "PropertyKeyWidget",
+        'value' : "PropertyValueWidget",
+        }
+    
+    rest_attr = 'pkey'
+
+    def typed_value(self):
+        return self.vreg.typed_value(self.pkey, self.value)
+        
+    def dc_description(self):
+        return self.req._(self.vreg.property_info(self.pkey)['help'])
+
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        return 'view', {}
+        
+
+class Bookmark(AnyEntity):
+    """customized class for Bookmark entities"""
+    id = 'Bookmark'
+    fetch_attrs, fetch_order = fetch_config(['title', 'path'])
+    widgets = {
+        'path' : "StringWidget",
+        }
+    __rtags__ = {'path': 'primary'}
+
+    def actual_url(self):
+        url = self.req.build_url(self.path)
+        if self.title:
+            urlparts = list(urlsplit(url))
+            if urlparts[3]:
+                urlparts[3] += '&vtitle=%s' % self.req.url_quote(self.title)
+            else:
+                urlparts[3] = 'vtitle=%s' % self.req.url_quote(self.title)
+            url = urlunsplit(urlparts)
+        return url
+
+    def action_url(self):
+        return self.absolute_url() + '/follow'
+
+
+class Card(AnyEntity):
+    """customized class for Card entities"""
+    id = 'Card'
+    rest_attr = 'wikiid'
+    
+    fetch_attrs, fetch_order = fetch_config(['title'])
+
+    def dc_title(self):
+        return self.title
+
+    def dc_description(self, format='text/plain'):
+        return self.synopsis or u''
+
+class ECache(AnyEntity):
+    """Cache"""
+    id = 'ECache'
+    
+    fetch_attrs, fetch_order = fetch_config(['name'])
+
+    def touch(self):
+        self.req.execute('SET X timestamp %(t)s WHERE X eid %(x)s', {'t': now(), 'x': self.eid}, 'x')
+
+    def valid(self, date):
+        return date < self.timestamp
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/schemaobjs.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,229 @@
+"""schema definition related entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached
+
+from cubicweb import ValidationError
+from cubicweb.schema import ERQLExpression, RRQLExpression
+
+from cubicweb.entities import AnyEntity, fetch_config
+
+
+class EEType(AnyEntity):
+    id = 'EEType'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+    __rtags__ = {
+        ('final',         '*', 'subject'): 'generated',
+        
+        ('state_of',      '*', 'object'): 'create',
+        ('transition_of', '*', 'object'): 'create',
+        ('from_entity',   '*', 'object'): 'link',
+        ('to_entity',     '*', 'object'): 'link',
+        }
+    def dc_title(self):
+        return self.req._(self.name)
+    
+    def dc_long_title(self):
+        stereotypes = []
+        _ = self.req._
+        if self.meta:
+            stereotypes.append(_('meta'))
+        if self.final:
+            stereotypes.append(_('final'))
+        if stereotypes:
+            return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes))
+        return self.dc_title()
+
+    def db_key_name(self):
+        """XXX goa specific"""
+        return self.get('name')
+
+
+class ERType(AnyEntity):
+    id = 'ERType'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+    __rtags__ = {
+        ('final',         '*', 'subject'): 'generated',
+        
+        ('relation_type', '*', 'object') : 'create',
+        }
+    
+    def dc_title(self):
+        return self.req._(self.name)
+    
+    def dc_long_title(self):
+        stereotypes = []
+        _ = self.req._
+        if self.meta:
+            stereotypes.append(_('meta'))
+        if self.symetric:
+            stereotypes.append(_('symetric'))
+        if self.inlined:
+            stereotypes.append(_('inlined'))
+        if self.final:
+            stereotypes.append(_('final'))
+        if stereotypes:
+            return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes))
+        return self.dc_title()
+
+    def inlined_changed(self, inlined):
+        """check inlining is necessary and possible:
+        
+        * return False if nothing has changed
+        * raise ValidationError if inlining is'nt possible
+        * eventually return True
+        """
+        rtype = self.name
+        rschema = self.schema.rschema(rtype)
+        if inlined == rschema.inlined:
+            return False
+        if inlined:
+            for (stype, otype) in rschema.iter_rdefs():
+                card = rschema.rproperty(stype, otype, 'cardinality')[0]
+                if not card in '?1':
+                    msg = self.req._("can't set inlined=%(inlined)s, "
+                                     "%(stype)s %(rtype)s %(otype)s "
+                                     "has cardinality=%(card)s")
+                    raise ValidationError(self.eid, {'inlined': msg % locals()})
+        return True
+
+    def db_key_name(self):
+        """XXX goa specific"""
+        return self.get('name')
+
+
+class ENFRDef(AnyEntity):
+    id = 'ENFRDef'
+    fetch_attrs = fetch_config(['cardinality'])[0]
+    __rtags__ = {
+        ('relation_type', 'ERType', 'subject') : 'inlineview',
+        ('from_entity', 'EEType', 'subject') : 'inlineview',
+        ('to_entity', 'EEType', 'subject') : 'inlineview',
+        }
+    
+    def dc_title(self):
+        return u'%s %s %s' % (
+            self.from_entity[0].name,
+            self.relation_type[0].name, 
+            self.to_entity[0].name)
+    
+    def dc_long_title(self):
+        card = self.cardinality
+        scard, ocard = u'', u''
+        if card[0] != '1':
+            scard = '[%s]' % card[0]
+        if card[1] != '1':
+            ocard = '[%s]' % card[1]
+        return u'%s %s%s%s %s' % (
+            self.from_entity[0].name,
+            scard, self.relation_type[0].name, ocard,
+            self.to_entity[0].name)
+
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.relation_type:
+            return self.relation_type[0].rest_path(), {}
+        return super(ENFRDef, self).after_deletion_path()
+
+
+class EFRDef(ENFRDef):
+    id = 'EFRDef'
+    
+    def dc_long_title(self):
+        card = self.cardinality
+        scard = u''
+        if card[0] == '1':
+            scard = '+'
+        return u'%s %s%s %s' % (
+            self.from_entity[0].name,
+            scard, self.relation_type[0].name, 
+            self.to_entity[0].name)
+
+
+class EConstraint(AnyEntity):
+    id = 'EConstraint'
+    fetch_attrs, fetch_order = fetch_config(['value'])
+
+    def dc_title(self):
+        return '%s(%s)' % (self.cstrtype[0].name, self.value or u'')
+        
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.reverse_constrained_by:
+            return self.reverse_constrained_by[0].rest_path(), {}
+        return super(EConstraint, self).after_deletion_path()
+
+    @property
+    def type(self):
+        return self.cstrtype[0].name
+
+        
+class RQLExpression(AnyEntity):
+    id = 'RQLExpression'
+    fetch_attrs, fetch_order = fetch_config(['exprtype', 'mainvars', 'expression'])
+
+    widgets = {
+        'expression' : "StringWidget",
+        }
+
+    def dc_title(self):
+        return '%s(%s)' % (self.exprtype, self.expression or u'')
+
+    @property
+    def expression_of(self):
+        for rel in ('read_permission', 'add_permission', 'delete_permission',
+                    'update_permission', 'condition'):
+            values = getattr(self, 'reverse_%s' % rel)
+            if values:
+                return values[0]
+            
+    @cached
+    def _rqlexpr(self):
+        if self.exprtype == 'ERQLExpression':
+            return ERQLExpression(self.expression, self.mainvars, self.eid)
+        #if self.exprtype == 'RRQLExpression':
+        return RRQLExpression(self.expression, self.mainvars, self.eid)
+    
+    def check_expression(self, *args, **kwargs):
+        return self._rqlexpr().check(*args, **kwargs)
+    
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.expression_of:
+            return self.expression_of.rest_path(), {}
+        return super(RQLExpression, self).after_deletion_path()
+
+
+class EPermission(AnyEntity):
+    id = 'EPermission'
+    fetch_attrs, fetch_order = fetch_config(['name', 'label'])
+
+
+    __rtags__ = {
+        'require_group' : 'primary',
+        }
+
+    def dc_title(self):
+        if self.label:
+            return '%s (%s)' % (self.req._(self.name), self.label)
+        return self.req._(self.name)
+    
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        permissionof = getattr(self, 'reverse_require_permission', ())
+        if len(permissionof) == 1:
+            return permissionof[0].rest_path(), {}
+        return super(EPermission, self).after_deletion_path()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/test/data/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+class Company(EntityType):
+    name = String()
+
+class Division(Company):
+    __specializes_schema__ = True
+
+class SubDivision(Division):
+    __specializes_schema__ = True
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/test/unittest_base.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,329 @@
+# -*- coding: utf-8 -*-
+"""unit tests for cubicweb.entities.base module"""
+
+from mx.DateTime import now
+
+from logilab.common.testlib import unittest_main
+from logilab.common.decorators import clear_cache
+from logilab.common.interface import implements
+
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb import ValidationError
+from cubicweb.interfaces import IMileStone, IWorkflowable
+from cubicweb.entities import AnyEntity
+from cubicweb.entities.authobjs import EUser
+from cubicweb.web.widgets import AutoCompletionWidget
+
+
+class BaseEntityTC(EnvBasedTC):
+
+    def setup_database(self):
+        self.member = self.create_user('member')
+    
+                     
+    
+class MetadataTC(BaseEntityTC):
+
+    def test_creator(self):
+        self.login(u'member')
+        card = self.add_entity('Card', title=u"hello")
+        self.commit()
+        self.assertEquals(card.creator.eid, self.member.eid)
+        self.assertEquals(card.dc_creator(), u'member')
+
+    def test_type(self):
+        self.assertEquals(self.member.dc_type(), 'euser')
+
+    def test_custom_widget(self):
+        class EUser2(EUser):
+            widgets = {
+                'login' : 'AutoCompletionWidget',
+                }
+        clear_cache(self.vreg, 'etype_class')
+        self.vreg.register_vobject_class(EUser2)
+        p = self.entity('EUser U WHERE U login "member"')
+        self.failUnless(isinstance(p, EUser2))
+        w = p.get_widget('login')
+        self.failUnless(isinstance(w, AutoCompletionWidget))
+
+    def test_format_vocabulary(self):
+        card = self.add_entity('Card', title=u"hello")
+        self.assertEquals(card.default_content_format(), 'text/html')
+        self.execute('INSERT EProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"')
+        self.commit()
+        self.assertEquals(card.default_content_format(), 'text/rest')
+        
+
+
+class EUserTC(BaseEntityTC):
+    def test_dc_title_and_name(self):
+        e = self.entity('EUser U WHERE U login "member"')
+        self.assertEquals(e.dc_title(), 'member')
+        self.assertEquals(e.name(), 'member')
+        self.execute(u'SET X firstname "bouah" WHERE X is EUser, X login "member"')
+        self.assertEquals(e.dc_title(), 'member')
+        self.assertEquals(e.name(), u'bouah')
+        self.execute(u'SET X surname "lôt" WHERE X is EUser, X login "member"')
+        self.assertEquals(e.dc_title(), 'member')
+        self.assertEquals(e.name(), u'bouah lôt')
+
+    
+class StateAndTransitionsTC(BaseEntityTC):
+        
+    def test_transitions(self):
+        user = self.entity('EUser X')
+        e = self.entity('State S WHERE S name "activated"')
+        trs = list(e.transitions(user))
+        self.assertEquals(len(trs), 1)
+        self.assertEquals(trs[0].name, u'deactivate')
+        self.assertEquals(trs[0].destination().name, u'deactivated')
+        self.assert_(user.can_pass_transition('deactivate'))
+        self.assert_(not user.can_pass_transition('activate'))
+        # test a std user get no possible transition
+        self.login('member')
+        # fetch the entity using the new session
+        e = self.entity('State S WHERE S name "activated"')
+        trs = list(e.transitions(user))
+        self.assertEquals(len(trs), 0)
+        user = self.entity('EUser X')
+        self.assert_(not user.can_pass_transition('deactivate'))
+        self.assert_(not user.can_pass_transition('activate'))
+        
+    def test_transitions_with_dest_specfied(self):
+        user = self.entity('EUser X')
+        e = self.entity('State S WHERE S name "activated"')
+        e2 = self.entity('State S WHERE S name "deactivated"')
+        trs = list(e.transitions(user, e2.eid))
+        self.assertEquals(len(trs), 1)
+        self.assertEquals(trs[0].name, u'deactivate')
+        self.assertEquals(trs[0].destination().name, u'deactivated')
+        trs = list(e.transitions(user, e.eid))
+        self.assertEquals(len(trs), 0)
+    
+    def test_transitions_maybe_passed(self):
+        self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+                     'X expression "X owned_by U", T condition X '
+                     'WHERE T name "deactivate"')
+        self._test_deactivated()
+        
+    def test_transitions_maybe_passed_using_has_update_perm(self):
+        self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+                     'X expression "U has_update_permission X", T condition X '
+                     'WHERE T name "deactivate"')
+        self._test_deactivated()
+        
+        
+    def _test_deactivated(self):
+        ueid = self.create_user('toto').eid
+        self.create_user('tutu')
+        cnx = self.login('tutu')
+        cu = cnx.cursor()
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S name "deactivated"',
+                          {'x': ueid}, 'x')
+        cnx.close()
+        cnx = self.login('toto')
+        cu = cnx.cursor()
+        cu.execute('SET X in_state S WHERE X eid %(x)s, S name "deactivated"',
+                   {'x': ueid}, 'x')
+        cnx.commit()
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S name "activated"',
+                          {'x': ueid}, 'x')
+    
+
+    def test_transitions_selection(self):
+        """
+        ------------------------  tr1    -----------------
+        | state1 (Card, Bookmark) | ------> | state2 (Card) |
+        ------------------------         -----------------
+                  |  tr2    ------------------
+                  `------>  | state3 (Bookmark) |
+                            ------------------
+        """
+        state1 = self.add_entity('State', name=u'state1')
+        state2 = self.add_entity('State', name=u'state2')
+        state3 = self.add_entity('State', name=u'state3')
+        tr1 = self.add_entity('Transition', name=u'tr1')
+        tr2 = self.add_entity('Transition', name=u'tr2')
+        self.execute('SET X state_of Y WHERE X eid in (%s, %s), Y is EEType, Y name "Card"' %
+                      (state1.eid, state2.eid))
+        self.execute('SET X state_of Y WHERE X eid in (%s, %s), Y is EEType, Y name "Bookmark"' %
+                      (state1.eid, state3.eid))
+        self.execute('SET X transition_of Y WHERE X eid %s, Y name "Card"' % tr1.eid)
+        self.execute('SET X transition_of Y WHERE X eid %s, Y name "Bookmark"' % tr2.eid)
+        self.execute('SET X allowed_transition Y WHERE X eid %s, Y eid %s' %
+                      (state1.eid, tr1.eid))
+        self.execute('SET X allowed_transition Y WHERE X eid %s, Y eid %s' %
+                      (state1.eid, tr2.eid))
+        self.execute('SET X destination_state Y WHERE X eid %s, Y eid %s' %
+                      (tr1.eid, state2.eid))
+        self.execute('SET X destination_state Y WHERE X eid %s, Y eid %s' %
+                      (tr2.eid, state3.eid))
+        self.execute('SET X initial_state Y WHERE Y eid %s, X name "Card"' % state1.eid)
+        self.execute('SET X initial_state Y WHERE Y eid %s, X name "Bookmark"' % state1.eid)
+        card = self.add_entity('Card', title=u't1')
+        bookmark = self.add_entity('Bookmark', title=u'111', path=u'/view')
+        
+        transitions = list(state1.transitions(card))
+        self.assertEquals(len(transitions), 1)
+        self.assertEquals(transitions[0].name, 'tr1')
+        transitions = list(state1.transitions(bookmark))
+        self.assertEquals(len(transitions), 1)
+        self.assertEquals(transitions[0].name, 'tr2')
+        
+
+    def test_transitions_selection2(self):
+        """
+        ------------------------  tr1 (Bookmark)   -----------------------
+        | state1 (Card, Bookmark) | -------------> | state2 (Card,Bookmark) |
+        ------------------------                -----------------------
+                  |  tr2 (Card)                     |
+                  `---------------------------------/
+        """
+        state1 = self.add_entity('State', name=u'state1')
+        state2 = self.add_entity('State', name=u'state2')
+        tr1 = self.add_entity('Transition', name=u'tr1')
+        tr2 = self.add_entity('Transition', name=u'tr2')
+        self.execute('SET X state_of Y WHERE X eid in (%s, %s), Y is EEType, Y name "Card"' %
+                      (state1.eid, state2.eid))
+        self.execute('SET X state_of Y WHERE X eid in (%s, %s), Y is EEType, Y name "Bookmark"' %
+                      (state1.eid, state2.eid))
+        self.execute('SET X transition_of Y WHERE X eid %s, Y name "Card"' % tr1.eid)
+        self.execute('SET X transition_of Y WHERE X eid %s, Y name "Bookmark"' % tr2.eid)
+        self.execute('SET X allowed_transition Y WHERE X eid %s, Y eid %s' %
+                      (state1.eid, tr1.eid))
+        self.execute('SET X allowed_transition Y WHERE X eid %s, Y eid %s' %
+                      (state1.eid, tr2.eid))
+        self.execute('SET X destination_state Y WHERE X eid %s, Y eid %s' %
+                      (tr1.eid, state2.eid))
+        self.execute('SET X destination_state Y WHERE X eid %s, Y eid %s' %
+                      (tr2.eid, state2.eid))
+        self.execute('SET X initial_state Y WHERE Y eid %s, X name "Card"' % state1.eid)
+        self.execute('SET X initial_state Y WHERE Y eid %s, X name "Bookmark"' % state1.eid)
+        card = self.add_entity('Card', title=u't1')
+        bookmark = self.add_entity('Bookmark', title=u'111', path=u'/view')
+        
+        transitions = list(state1.transitions(card))
+        self.assertEquals(len(transitions), 1)
+        self.assertEquals(transitions[0].name, 'tr1')
+        transitions = list(state1.transitions(bookmark))
+        self.assertEquals(len(transitions), 1)
+        self.assertEquals(transitions[0].name, 'tr2')
+        
+
+class EmailAddressTC(BaseEntityTC):
+    def test_canonical_form(self):
+        eid1 = self.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"')[0][0]
+        eid2 = self.execute('INSERT EmailAddress X: X address "maarten@philips.com", X canonical TRUE')[0][0]
+        self.execute('SET X identical_to Y WHERE X eid %s, Y eid %s' % (eid1, eid2))
+        email1 = self.entity('Any X WHERE X eid %(x)s', {'x':eid1}, 'x')
+        email2 = self.entity('Any X WHERE X eid %(x)s', {'x':eid2}, 'x')
+        self.assertEquals(email1.canonical_form().eid, eid2)
+        self.assertEquals(email2.canonical_form(), email2)
+        eid3 = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0]
+        email3 = self.entity('Any X WHERE X eid %s'%eid3)
+        self.assertEquals(email3.canonical_form(), None)
+
+    def test_mangling(self):
+        eid = self.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"')[0][0]
+        email = self.entity('Any X WHERE X eid %(x)s', {'x':eid}, 'x')
+        self.assertEquals(email.display_address(), 'maarten.ter.huurne@philips.com')
+        self.assertEquals(email.printable_value('address'), 'maarten.ter.huurne@philips.com')
+        self.vreg.config.global_set_option('mangle-emails', True)
+        self.assertEquals(email.display_address(), 'maarten.ter.huurne at philips dot com')
+        self.assertEquals(email.printable_value('address'), 'maarten.ter.huurne at philips dot com')
+        eid = self.execute('INSERT EmailAddress X: X address "syt"')[0][0]
+        email = self.entity('Any X WHERE X eid %(x)s', {'x':eid}, 'x')
+        self.assertEquals(email.display_address(), 'syt')
+        self.assertEquals(email.printable_value('address'), 'syt')
+
+
+class EUserTC(BaseEntityTC):
+    
+    def test_complete(self):
+        e = self.entity('EUser X WHERE X login "admin"')
+        e.complete()
+
+        
+    def test_matching_groups(self):
+        e = self.entity('EUser X WHERE X login "admin"')
+        self.failUnless(e.matching_groups('managers'))
+        self.failIf(e.matching_groups('xyz'))
+        self.failUnless(e.matching_groups(('xyz', 'managers')))
+        self.failIf(e.matching_groups(('xyz', 'abcd')))
+
+    def test_subject_in_state_vocabulary(self):
+        # on a new entity
+        e = self.etype_instance('EUser')
+        rschema = e.e_schema.subject_relation('in_state')
+        states = list(e.subject_in_state_vocabulary(rschema))
+        self.assertEquals(len(states), 1)
+        self.assertEquals(states[0][0], u'activated') # list of (combobox view, state eid)
+        # on an existant entity
+        e = self.entity('Any X WHERE X is EUser')
+        self.assertEquals(e.in_state[0].name, 'activated')
+        states = list(e.subject_in_state_vocabulary(rschema))
+        self.assertEquals(len(states), 1)
+        self.assertEquals(states[0][0], u'deactivated') # list of (combobox view, state eid)
+
+    def test_workflow_base(self):
+        e = self.create_user('toto')
+        self.assertEquals(e.state, 'activated')
+        activatedeid = self.execute('State X WHERE X name "activated"')[0][0]
+        deactivatedeid = self.execute('State X WHERE X name "deactivated"')[0][0]
+        e.change_state(deactivatedeid, u'deactivate 1')
+        self.commit()
+        e.change_state(activatedeid, u'activate 1')
+        self.commit()
+        e.change_state(deactivatedeid, u'deactivate 2')
+        self.commit()
+        # get a fresh user to avoid potential cache issues
+        e = self.entity('EUser X WHERE X eid %s' % e.eid)
+        self.assertEquals([tr.comment for tr in e.reverse_wf_info_for],
+                          [None, 'deactivate 1', 'activate 1', 'deactivate 2'])
+        self.assertEquals(e.latest_trinfo().comment, 'deactivate 2')
+
+
+class InterfaceTC(EnvBasedTC):
+
+    def test_nonregr_subclasses_and_mixins_interfaces(self):
+        class MyUser(EUser):
+            __implements__ = (IMileStone,)
+        self.vreg.register_vobject_class(MyUser)
+        self.failUnless(implements(EUser, IWorkflowable))
+        self.failUnless(implements(MyUser, IMileStone))
+        self.failUnless(implements(MyUser, IWorkflowable))
+
+
+class SpecializedEntityClassesTC(EnvBasedTC):
+
+    def select_eclass(self, etype):
+        # clear selector cache
+        clear_cache(self.vreg, 'etype_class')
+        return self.vreg.etype_class(etype)
+        
+    def test_etype_class_selection_and_specialization(self):
+        # no specific class for Subdivisions, the default one should be selected
+        eclass = self.select_eclass('SubDivision')
+        self.failUnless(eclass.__autogenerated__)
+        #self.assertEquals(eclass.__bases__, (AnyEntity,))
+        # build class from most generic to most specific and make
+        # sure the most specific is always selected
+        for etype in ('Company', 'Division', 'SubDivision'):
+            class Foo(AnyEntity):
+                id = etype
+            self.vreg.register_vobject_class(Foo)
+            eclass = self.select_eclass('SubDivision')
+            if etype == 'SubDivision':
+                self.failUnless(eclass is Foo)
+            else:
+                self.failUnless(eclass.__autogenerated__)
+                self.assertEquals(eclass.__bases__, (Foo,))
+        # check Division eclass is still selected for plain Division entities
+        eclass = self.select_eclass('Division')
+        self.assertEquals(eclass.id, 'Division')
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/wfobjs.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,118 @@
+"""workflow definition and history related entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.entities import AnyEntity, fetch_config
+
+
+class Transition(AnyEntity):
+    """customized class for Transition entities
+
+    provides a specific may_be_passed method to check if the relation may be
+    passed by the logged user
+    """
+    id = 'Transition'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+    __rtags__ = {('destination_state',  '*', 'subject'):  'create',
+                 ('allowed_transition', '*', 'object') :  'create',
+                  }
+                 
+    def may_be_passed(self, eid, stateeid):
+        """return true if the logged user may pass this transition
+
+        `eid` is the eid of the object on which we may pass the transition
+        `stateeid` is the eid of the current object'state XXX unused
+        """
+        user = self.req.user
+        # check user is at least in one of the required groups if any
+        groups = frozenset(g.name for g in self.require_group)
+        if groups:
+            matches = user.matching_groups(groups)
+            if matches:
+                return matches
+            if 'owners' in groups and user.owns(eid):
+                return True
+        # check one of the rql expression conditions matches if any
+        if self.condition:
+            for rqlexpr in self.condition:
+                if rqlexpr.check_expression(self.req, eid):
+                    return True
+        if self.condition or groups:
+            return False
+        return True
+
+    def destination(self):
+        return self.destination_state[0]
+    
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.transition_of:
+            return self.transition_of[0].rest_path(), {'vid': 'workflow'}
+        return super(Transition, self).after_deletion_path()
+
+    
+class State(AnyEntity):
+    """customized class for State entities
+
+    provides a specific transitions method returning transitions that may be
+    passed by the current user for the given entity
+    """
+    id = 'State'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+    rest_attr = 'eid'
+    
+    __rtags__ = {'destination_state' : 'create',
+                 'allowed_transition' : 'create'
+                 }
+    
+    def transitions(self, entity, desteid=None):
+        rql = ('Any T,N,DS where S allowed_transition T, S eid %(x)s, '
+               'T name N, T destination_state DS, '
+               'T transition_of ET, ET name %(et)s')
+        if desteid is not None:
+            rql += ', DS eid %(ds)s'
+        rset = self.req.execute(rql, {'x': self.eid, 'et': str(entity.e_schema),
+                                         'ds': desteid}, 'x')
+        for tr in rset.entities():
+            if tr.may_be_passed(entity.eid, self.eid):
+                yield tr
+                
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.state_of:
+            return self.state_of[0].rest_path(), {'vid': 'workflow'}
+        return super(State, self).after_deletion_path()
+
+    
+class TrInfo(AnyEntity):
+    """customized class for Transition information entities
+    """
+    id = 'TrInfo'
+    fetch_attrs, fetch_order = fetch_config(['creation_date', 'comment'],
+                                            pclass=None) # don't want modification_date
+    @property
+    def for_entity(self):
+        return self.wf_info_for and self.wf_info_for[0]
+    @property
+    def previous_state(self):
+        return self.from_state and self.from_state[0]
+    
+    @property
+    def new_state(self):
+        return self.to_state[0]
+
+    def after_deletion_path(self):
+        """return (path, parameters) which should be used as redirect
+        information when this entity is being deleted
+        """
+        if self.for_entity:
+            return self.for_entity.rest_path(), {}
+        return 'view', {}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+""" CW - nevow/twisted client """
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/request.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,127 @@
+"""Twisted request handler for CubicWeb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from twisted.web2 import http, http_headers
+
+from mx.DateTime import DateTimeFromTicks
+
+from cubicweb.web import DirectResponse
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb.web.httpcache import GMTOFFSET
+
+def cleanup_files(dct, encoding):
+    d = {}
+    for k, infos in dct.items():
+        for (filename, mt, stream) in infos:
+            if filename:
+                # XXX: suppose that no file submitted <-> no filename
+                filename = unicode(filename, encoding)
+                mt = u'%s/%s' % (mt.mediaType, mt.mediaSubtype)
+                d[k] = (filename, mt, stream)
+    return d
+
+
+class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
+    def __init__(self, req, vreg, https, base_url):
+        self._twreq = req
+        self._base_url = base_url
+        super(CubicWebTwistedRequestAdapter, self).__init__(vreg, https, req.args)
+        self.form.update(cleanup_files(req.files, self.encoding))
+        # prepare output headers
+        self.headers_out = http_headers.Headers()
+        self._headers = req.headers
+
+    def base_url(self):
+        """return the root url of the application"""
+        return self._base_url
+    
+    def http_method(self):
+        """returns 'POST', 'GET', 'HEAD', etc."""
+        return self._twreq.method
+    
+    def relative_path(self, includeparams=True):
+        """return the normalized path of the request (ie at least relative
+        to the application's root, but some other normalization may be needed
+        so that the returned path may be used to compare to generated urls
+
+        :param includeparams:
+           boolean indicating if GET form parameters should be kept in the path
+        """
+        path = self._twreq.uri[1:] # remove the root '/'
+        if not includeparams:
+            path = path.split('?', 1)[0]
+        return path
+
+    def get_header(self, header, default=None, raw=True):
+        """return the value associated with the given input header,
+        raise KeyError if the header is not set
+        """
+        if raw:
+            return self._twreq.headers.getRawHeaders(header, [default])[0]
+        return self._twreq.headers.getHeader(header, default)
+
+    def set_header(self, header, value, raw=True):
+        """set an output HTTP header"""
+        if raw:
+            # adding encoded header is important, else page content
+            # will be reconverted back to unicode and apart unefficiency, this
+            # may cause decoding problem (e.g. when downloading a file)
+            self.headers_out.setRawHeaders(header, [str(value)])
+        else:
+            self.headers_out.setHeader(header, value)
+
+    def add_header(self, header, value):
+        """add an output HTTP header"""
+        # adding encoded header is important, else page content
+        # will be reconverted back to unicode and apart unefficiency, this
+        # may cause decoding problem (e.g. when downloading a file)
+        self.headers_out.addRawHeader(header, str(value))
+
+    def remove_header(self, header):
+        """remove an output HTTP header"""
+        self.headers_out.removeHeader(header)
+
+    def _validate_cache(self):
+        """raise a `DirectResponse` exception if a cached page along the way
+        exists and is still usable
+        """
+        if self.get_header('Cache-Control') in ('max-age=0', 'no-cache'):
+            # Expires header seems to be required by IE7
+            self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
+            return
+        try:
+            http.checkPreconditions(self._twreq, _PreResponse(self))
+        except http.HTTPError, ex:
+            self.info('valid http cache, no actual rendering')
+            raise DirectResponse(ex.response)
+        # Expires header seems to be required by IE7
+        self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
+
+    def header_accept_language(self):
+        """returns an ordered list of preferred languages"""
+        acceptedlangs = self.get_header('Accept-Language', raw=False) or {}
+        for lang, _ in sorted(acceptedlangs.iteritems(), key=lambda x: x[1],
+                              reverse=True):
+            lang = lang.split('-')[0]
+            yield lang
+
+    def header_if_modified_since(self):
+        """If the HTTP header If-modified-since is set, return the equivalent
+        mx date time value (GMT), else return None
+        """
+        mtime = self.get_header('If-modified-since', raw=False)
+        if mtime:
+            # :/ twisted is returned a localized time stamp
+            return DateTimeFromTicks(mtime) + GMTOFFSET
+        return None
+
+
+class _PreResponse(object):
+    def __init__(self, request):
+        self.headers = request.headers_out
+        self.code = 200
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/server.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,367 @@
+"""twisted server for CubicWeb web applications
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import select
+
+from mx.DateTime import today, RelativeDate
+
+from twisted.application import service, strports
+from twisted.internet import reactor, task, threads
+from twisted.internet.defer import maybeDeferred
+from twisted.web2 import channel, http, server, iweb
+from twisted.web2 import static, resource, responsecode
+
+from cubicweb import ObjectNotFound
+from cubicweb.web import (AuthenticationError, NotFound, Redirect, 
+                       RemoteCallFailed, DirectResponse, StatusResponse,
+                       ExplicitLogin)
+from cubicweb.web.application import CubicWebPublisher
+
+from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
+
+
+def start_task(interval, func):
+    lc = task.LoopingCall(func)
+    lc.start(interval)
+
+def start_looping_tasks(repo):
+    for interval, func in repo._looping_tasks:
+        repo.info('starting twisted task %s with interval %.2fs',
+                  func.__name__, interval)
+        def catch_error_func(repo=repo, func=func):
+            try:
+                func()
+            except:
+                repo.exception('error in looping task')
+        start_task(interval, catch_error_func)
+    # ensure no tasks will be further added
+    repo._looping_tasks = ()
+    
+
+class LongTimeExpiringFile(static.File):
+    """overrides static.File and sets a far futre ``Expires`` date
+    on the resouce.
+
+    versions handling is done by serving static files by different
+    URLs for each version. For instance::
+
+      http://localhost:8080/data-2.48.2/cubicweb.css
+      http://localhost:8080/data-2.49.0/cubicweb.css
+      etc.
+
+    """
+    def renderHTTP(self, request):
+        def setExpireHeader(response):
+            response = iweb.IResponse(response)
+            # Don't provide additional resource information to error responses
+            if response.code < 400:
+                # the HTTP RFC recommands not going further than 1 year ahead
+                expires = today() + RelativeDate(months=6)
+                response.headers.setHeader('Expires', int(expires.ticks()))
+            return response
+        d = maybeDeferred(super(LongTimeExpiringFile, self).renderHTTP, request)
+        return d.addCallback(setExpireHeader)
+
+
+class CubicWebRootResource(resource.PostableResource):
+    addSlash = False
+    
+    def __init__(self, config, debug=None):
+        self.appli = CubicWebPublisher(config, debug=debug)
+        self.debugmode = debug
+        self.config = config
+        self.base_url = config['base-url'] or config.default_base_url()
+        self.versioned_datadir = 'data%s' % config.instance_md5_version()
+        assert self.base_url[-1] == '/'
+        self.https_url = config['https-url']
+        assert not self.https_url or self.https_url[-1] == '/'
+        # when we have an in-memory repository, clean unused sessions every XX
+        # seconds and properly shutdown the server
+        if config.repo_method == 'inmemory':
+            reactor.addSystemEventTrigger('before', 'shutdown',
+                                          self.shutdown_event)
+            # monkey path start_looping_task to get proper reactor integration
+            self.appli.repo.__class__.start_looping_tasks = start_looping_tasks
+            if config.pyro_enabled():
+                # if pyro is enabled, we have to register to the pyro name
+                # server, create a pyro daemon, and create a task to handle pyro
+                # requests
+                self.pyro_daemon = self.appli.repo.pyro_register()
+                self.pyro_listen_timeout = 0.02
+                start_task(1, self.pyro_loop_event)
+            self.appli.repo.start_looping_tasks()
+        try:
+            self.url_rewriter = self.appli.vreg.select_component('urlrewriter')
+        except ObjectNotFound:
+            self.url_rewriter = None
+        interval = min(config['cleanup-session-time'] or 120,
+                       config['cleanup-anonymous-session-time'] or 720) / 2.
+        start_task(interval, self.appli.session_handler.clean_sessions)
+        
+    def shutdown_event(self):
+        """callback fired when the server is shutting down to properly
+        clean opened sessions
+        """
+        self.appli.repo.shutdown()
+
+    def pyro_loop_event(self):
+        """listen for pyro events"""
+        try:
+            self.pyro_daemon.handleRequests(self.pyro_listen_timeout)
+        except select.error:
+            return
+        
+    def locateChild(self, request, segments):
+        """Indicate which resource to use to process down the URL's path"""
+        if segments:
+            if segments[0] == 'https':
+                segments = segments[1:]
+            if len(segments) >= 2:
+                if segments[0] in (self.versioned_datadir, 'data'):
+                    # Anything in data/ is treated as static files
+                    datadir = self.config.locate_resource(segments[1])
+                    if datadir is None:
+                        return None, []
+                    self.info('static file %s from %s', segments[-1], datadir)
+                    if segments[0] == 'data':
+                        return static.File(str(datadir)), segments[1:]
+                    else:
+                        return LongTimeExpiringFile(datadir), segments[1:]
+                elif segments[0] == 'fckeditor':
+                    fckeditordir = self.config.ext_resources['FCKEDITOR_PATH']
+                    return static.File(fckeditordir), segments[1:]
+        # Otherwise we use this single resource
+        return self, ()
+    
+    def render(self, request):
+        """Render a page from the root resource"""
+        # reload modified files (only in development or debug mode)
+        if self.config.mode == 'dev' or self.debugmode:
+            self.appli.vreg.register_objects(self.config.vregistry_path())
+        if self.config['profile']: # default profiler don't trace threads
+            return self.render_request(request)
+        else:
+            return threads.deferToThread(self.render_request, request)
+            
+    def render_request(self, request):
+        origpath = request.path
+        host = request.host
+        # dual http/https access handling: expect a rewrite rule to prepend
+        # 'https' to the path to detect https access
+        if origpath.split('/', 2)[1] == 'https':
+            origpath = origpath[6:]
+            request.uri = request.uri[6:]
+            https = True
+            baseurl = self.https_url or self.base_url 
+        else:
+            https = False
+            baseurl = self.base_url
+        req = CubicWebTwistedRequestAdapter(request, self.appli.vreg, https, baseurl)
+        if req.authmode == 'http':
+            # activate realm-based auth
+            realm = self.config['realm']
+            req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
+        try:
+            self.appli.connect(req)
+        except AuthenticationError:
+            return self.request_auth(req)
+        except Redirect, ex:
+            return self.redirect(req, ex.location)
+        if https and req.cnx.anonymous_connection:
+            # don't allow anonymous on https connection
+            return self.request_auth(req)            
+        if self.url_rewriter is not None:
+            # XXX should occurs before authentication?
+            try:
+                path = self.url_rewriter.rewrite(host, origpath)
+            except Redirect, ex:
+                return self.redirect(req, ex.location)
+            request.uri.replace(origpath, path, 1)
+        else:
+            path = origpath
+        if not path or path == "/":
+            path = 'view'
+        try:
+            result = self.appli.publish(path, req)
+        except DirectResponse, ex:
+            return ex.response
+        except StatusResponse, ex:
+            return http.Response(stream=ex.content, code=ex.status,
+                                 headers=req.headers_out or None)
+        except RemoteCallFailed, ex:
+            req.set_header('content-type', 'application/json')
+            return http.Response(stream=ex.dumps(),
+                                 code=responsecode.INTERNAL_SERVER_ERROR)
+        except NotFound:
+            result = self.appli.notfound_content(req)
+            return http.Response(stream=result, code=responsecode.NOT_FOUND,
+                                 headers=req.headers_out or None)
+        except ExplicitLogin:  # must be before AuthenticationError
+            return self.request_auth(req)
+        except AuthenticationError:
+            if self.config['auth-mode'] == 'cookie':
+                # in cookie mode redirecting to the index view is enough :
+                # either anonymous connection is allowed and the page will
+                # be displayed or we'll be redirected to the login form
+                msg = req._('you have been logged out')
+                if req.https:
+                    req._base_url =  self.base_url
+                    req.https = False
+                url = req.build_url('view', vid='index', __message=msg)
+                return self.redirect(req, url)
+            else:
+                # in http we have to request auth to flush current http auth
+                # information
+                return self.request_auth(req, loggedout=True)
+        except Redirect, ex:
+            return self.redirect(req, ex.location)
+        if not result:
+            # no result, something went wrong...
+            self.error('no data (%s)', req)
+            # 500 Internal server error
+            return self.redirect(req, req.build_url('error'))
+        # request may be referenced by "onetime callback", so clear its entity
+        # cache to avoid memory usage
+        req.drop_entity_cache()
+        return http.Response(stream=result, code=responsecode.OK,
+                             headers=req.headers_out or None)
+
+    def redirect(self, req, location):
+        req.headers_out.setHeader('location', str(location))
+        self.debug('redirecting to %s', location)
+        # 303 See other
+        return http.Response(code=303, headers=req.headers_out)
+        
+    def request_auth(self, req, loggedout=False):
+        if self.https_url and req.base_url() != self.https_url:
+            req.headers_out.setHeader('location', self.https_url + 'login')
+            return http.Response(code=303, headers=req.headers_out)            
+        if self.config['auth-mode'] == 'http':
+            code = responsecode.UNAUTHORIZED
+        else:
+            code = responsecode.FORBIDDEN
+        if loggedout:
+            if req.https:
+                req._base_url =  self.base_url
+                req.https = False
+            content = self.appli.loggedout_content(req)
+        else:
+            content = self.appli.need_login_content(req)
+        return http.Response(code, req.headers_out, content)
+
+    
+# This part gets run when you run this file via: "twistd -noy demo.py"
+def main(appid, cfgname):
+    """Starts an cubicweb  twisted server for an application
+
+    appid: application's identifier
+    cfgname: name of the configuration to use (twisted or all-in-one)
+    """
+    from cubicweb.cwconfig import CubicWebConfiguration
+    from cubicweb.etwist import twconfig # trigger configuration registration
+    config = CubicWebConfiguration.config_for(appid, cfgname)
+    # XXX why calling init_available_cubes here ?
+    config.init_available_cubes()
+    # create the site and application objects
+    if '-n' in sys.argv: # debug mode
+        cubicweb = CubicWebRootResource(config, debug=True)
+    else:
+        cubicweb = CubicWebRootResource(config)
+    #toplevel = vhost.VHostURIRewrite(base_url, cubicweb)
+    toplevel = cubicweb
+    website = server.Site(toplevel)
+    application = service.Application("cubicweb")
+    # serve it via standard HTTP on port set in the configuration
+    s = strports.service('tcp:%04d' % (config['port'] or 8080),
+                         channel.HTTPFactory(website))
+    s.setServiceParent(application)
+    return application
+
+
+from twisted.python import failure
+from twisted.internet import defer
+from twisted.web2 import fileupload
+
+# XXX set max file size to 100Mo: put max upload size in the configuration
+# line below for twisted >= 8.0, default param value for earlier version
+resource.PostableResource.maxSize = 100*1024*1024 
+def parsePOSTData(request, maxMem=100*1024, maxFields=1024,
+                  maxSize=100*1024*1024):
+    if request.stream.length == 0:
+        return defer.succeed(None)
+    
+    ctype = request.headers.getHeader('content-type')
+
+    if ctype is None:
+        return defer.succeed(None)
+
+    def updateArgs(data):
+        args = data
+        request.args.update(args)
+
+    def updateArgsAndFiles(data):
+        args, files = data
+        request.args.update(args)
+        request.files.update(files)
+
+    def error(f):
+        f.trap(fileupload.MimeFormatError)
+        raise http.HTTPError(responsecode.BAD_REQUEST)
+    
+    if ctype.mediaType == 'application' and ctype.mediaSubtype == 'x-www-form-urlencoded':
+        d = fileupload.parse_urlencoded(request.stream, keep_blank_values=True)
+        d.addCallbacks(updateArgs, error)
+        return d
+    elif ctype.mediaType == 'multipart' and ctype.mediaSubtype == 'form-data':
+        boundary = ctype.params.get('boundary')
+        if boundary is None:
+            return defer.fail(http.HTTPError(
+                http.StatusResponse(responsecode.BAD_REQUEST,
+                                    "Boundary not specified in Content-Type.")))
+        d = fileupload.parseMultipartFormData(request.stream, boundary,
+                                              maxMem, maxFields, maxSize)
+        d.addCallbacks(updateArgsAndFiles, error)
+        return d
+    else:
+        raise http.HTTPError(responsecode.BAD_REQUEST)
+
+server.parsePOSTData = parsePOSTData
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted'))
+
+
+
+def _gc_debug():
+    import gc
+    from pprint import pprint
+    from cubicweb.vregistry import VObject
+    gc.collect()
+    count = 0
+    acount = 0
+    ocount = {}
+    for obj in gc.get_objects():
+        if isinstance(obj, CubicWebTwistedRequestAdapter):
+            count += 1
+        elif isinstance(obj, VObject):
+            acount += 1
+        else:
+            try:
+                ocount[obj.__class__]+= 1
+            except KeyError:
+                ocount[obj.__class__] = 1
+            except AttributeError:
+                pass
+    print 'IN MEM REQUESTS', count
+    print 'IN MEM APPOBJECTS', acount
+    ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20]
+    pprint(ocount)
+    print 'UNREACHABLE', gc.garbage
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/twconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,96 @@
+"""twisted server configurations:
+
+* the "twisted" configuration to get a web application running in a standalone
+  twisted web server which talk to a repository server using Pyro
+  
+* the "all-in-one" configuration to get a web application running in a twisted
+  web server integrating a repository server in the same process (only available
+  if the repository part of the software is installed
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os.path import join
+
+from cubicweb.web.webconfig import WebConfiguration, merge_options, Method
+
+class TwistedConfiguration(WebConfiguration):
+    """web application (in a twisted web server) client of a RQL server"""
+    name = 'twisted'
+
+    options = merge_options((
+        # ctl configuration
+        ('host',
+         {'type' : 'string',
+          'default': None,
+          'help': 'host name if not correctly detectable through gethostname',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('port',
+         {'type' : 'int',
+          'default': None,
+          'help': 'http server port number (default to 8080)',
+          'group': 'main', 'inputlevel': 0,
+          }),
+        ('pid-file',
+         {'type' : 'string',
+          'default': Method('default_pid_file'),
+          'help': 'repository\'s pid file',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('uid',
+         {'type' : 'string',
+          'default': None,
+          'help': 'if this option is set, use the specified user to start \
+the repository rather than the user running the command',
+          'group': 'main', 'inputlevel': 0,
+          }),
+        ('session-time',
+         {'type' : 'int',
+          'default': 30*60,
+          'help': 'session expiration time, default to 30 minutes',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('profile',
+         {'type' : 'string',
+          'default': None,
+          'help': 'profile code and use the specified file to store stats if this option is set',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('pyro-server',
+         {'type' : 'yn',
+          # pyro is only a recommends by default, so don't activate it here
+          'default': False, 
+          'help': 'run a pyro server',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ) + WebConfiguration.options)
+    
+    def server_file(self):
+        return join(self.apphome, '%s-%s.py' % (self.appid, self.name))
+
+    def default_base_url(self):
+        from socket import gethostname
+        return 'http://%s:%s/' % (self['host'] or gethostname(), self['port'] or 8080)
+
+try:
+    from cubicweb.server.serverconfig import ServerConfiguration
+
+    class AllInOneConfiguration(TwistedConfiguration, ServerConfiguration):
+        """repository and web application in the same twisted process"""
+        name = 'all-in-one'
+        repo_method = 'inmemory'
+        options = merge_options(TwistedConfiguration.options
+                                + ServerConfiguration.options)
+
+        cubicweb_vobject_path = TwistedConfiguration.cubicweb_vobject_path | ServerConfiguration.cubicweb_vobject_path
+        cube_vobject_path = TwistedConfiguration.cube_vobject_path | ServerConfiguration.cube_vobject_path
+        def pyro_enabled(self):
+            """tell if pyro is activated for the in memory repository"""
+            return self['pyro-server']
+        
+except ImportError:
+    pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/twctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,83 @@
+"""cubicweb-clt handlers for twisted
+"""
+
+import sys
+
+from cubicweb.toolsutils import CommandHandler
+from cubicweb.web.webctl import WebCreateHandler
+
+# trigger configuration registration
+import cubicweb.etwist.twconfig # pylint: disable-msg=W0611
+
+
+class TWCreateHandler(WebCreateHandler):
+    cfgname = 'twisted'
+
+    def bootstrap(self, cubes, inputlevel=0):
+        """bootstrap this configuration"""
+        print '** twisted configuration'
+        mainpyfile = self.config.server_file()
+        mainpy = open(mainpyfile, 'w')
+        mainpy.write('''
+from cubicweb.etwist import server
+application = server.main(%r, %r)
+''' % (self.config.appid, self.config.name))
+        mainpy.close()
+        print 'application\'s twisted file %s generated' % mainpyfile
+        super(TWCreateHandler, self).bootstrap(cubes, inputlevel)
+
+
+class TWStartHandler(CommandHandler):
+    cmdname = 'start'
+    cfgname = 'twisted'
+
+    def start_command(self, config, debug):
+        command = ['%s `which twistd`' % sys.executable]
+        for ctl_opt, server_opt in (('pid-file', 'pidfile'),
+                                    ('uid', 'uid'),
+                                    ('log-file', 'logfile',)):
+            value = config[ctl_opt]
+            if not value or (debug and ctl_opt == 'log-file'):
+                continue
+            command.append('--%s %s' % (server_opt, value))
+        if debug:
+            command.append('-n')
+        if config['profile']:
+            command.append('-p %s --savestats' % config['profile'])
+        command.append('-oy')
+        command.append(self.config.server_file())
+        return ' '.join(command)
+
+
+class TWStopHandler(CommandHandler):
+    cmdname = 'stop'
+    cfgname = 'twisted'
+    
+    
+try:
+    from cubicweb.server import serverctl
+
+    class AllInOneCreateHandler(serverctl.RepositoryCreateHandler, TWCreateHandler):
+        """configuration to get a web application running in a twisted web
+        server integrating a repository server in the same process
+        """
+        cfgname = 'all-in-one'
+
+        def bootstrap(self, cubes, inputlevel=0):
+            """bootstrap this configuration"""
+            serverctl.RepositoryCreateHandler.bootstrap(self, cubes, inputlevel)
+            TWCreateHandler.bootstrap(self, cubes, inputlevel)
+            
+    class AllInOneStartHandler(TWStartHandler):
+        cmdname = 'start'
+        cfgname = 'all-in-one'
+        subcommand = 'cubicweb-twisted'
+
+    class AllInOneStopHandler(serverctl.RepositoryStopHandler):
+        cmdname = 'stop'
+        cfgname = 'all-in-one'
+        subcommand = 'cubicweb-twisted'
+
+except ImportError:
+    pass
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gettext.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,492 @@
+"""Internationalization and localization support.
+
+This module provides internationalization (I18N) and localization (L10N)
+support for your Python programs by providing an interface to the GNU gettext
+message catalog library.
+
+I18N refers to the operation by which a program is made aware of multiple
+languages.  L10N refers to the adaptation of your program, once
+internationalized, to the local language and cultural habits.
+
+"""
+
+# This module represents the integration of work, contributions, feedback, and
+# suggestions from the following people:
+#
+# Martin von Loewis, who wrote the initial implementation of the underlying
+# C-based libintlmodule (later renamed _gettext), along with a skeletal
+# gettext.py implementation.
+#
+# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule,
+# which also included a pure-Python implementation to read .mo files if
+# intlmodule wasn't available.
+#
+# James Henstridge, who also wrote a gettext.py module, which has some
+# interesting, but currently unsupported experimental features: the notion of
+# a Catalog class and instances, and the ability to add to a catalog file via
+# a Python API.
+#
+# Barry Warsaw integrated these modules, wrote the .install() API and code,
+# and conformed all C and Python code to Python's coding standards.
+#
+# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this
+# module.
+#
+# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs.
+#
+# TODO:
+# - Lazy loading of .mo files.  Currently the entire catalog is loaded into
+#   memory, but that's probably bad for large translated programs.  Instead,
+#   the lexical sort of original strings in GNU .mo files should be exploited
+#   to do binary searches and lazy initializations.  Or you might want to use
+#   the undocumented double-hash algorithm for .mo files with hash tables, but
+#   you'll need to study the GNU gettext code to do this.
+#
+# - Support Solaris .mo file formats.  Unfortunately, we've been unable to
+#   find this format documented anywhere.
+
+
+import copy, os, re, struct, sys
+from errno import ENOENT
+
+
+__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
+           'find', 'translation', 'install', 'textdomain', 'bindtextdomain',
+           'dgettext', 'dngettext', 'gettext', 'ngettext',
+           ]
+
+_default_localedir = os.path.join(sys.prefix, 'share', 'locale')
+
+
+def test(condition, true, false):
+    """
+    Implements the C expression:
+
+      condition ? true : false
+
+    Required to correctly interpret plural forms.
+    """
+    if condition:
+        return true
+    else:
+        return false
+
+
+def c2py(plural):
+    """Gets a C expression as used in PO files for plural forms and returns a
+    Python lambda function that implements an equivalent expression.
+    """
+    # Security check, allow only the "n" identifier
+    from StringIO import StringIO
+    import token, tokenize
+    tokens = tokenize.generate_tokens(StringIO(plural).readline)
+    try:
+        danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
+    except tokenize.TokenError:
+        raise ValueError, \
+              'plural forms expression error, maybe unbalanced parenthesis'
+    else:
+        if danger:
+            raise ValueError, 'plural forms expression could be dangerous'
+
+    # Replace some C operators by their Python equivalents
+    plural = plural.replace('&&', ' and ')
+    plural = plural.replace('||', ' or ')
+
+    expr = re.compile(r'\!([^=])')
+    plural = expr.sub(' not \\1', plural)
+
+    # Regular expression and replacement function used to transform
+    # "a?b:c" to "test(a,b,c)".
+    expr = re.compile(r'(.*?)\?(.*?):(.*)')
+    def repl(x):
+        return "test(%s, %s, %s)" % (x.group(1), x.group(2),
+                                     expr.sub(repl, x.group(3)))
+
+    # Code to transform the plural expression, taking care of parentheses
+    stack = ['']
+    for c in plural:
+        if c == '(':
+            stack.append('')
+        elif c == ')':
+            if len(stack) == 1:
+                # Actually, we never reach this code, because unbalanced
+                # parentheses get caught in the security check at the
+                # beginning.
+                raise ValueError, 'unbalanced parenthesis in plural form'
+            s = expr.sub(repl, stack.pop())
+            stack[-1] += '(%s)' % s
+        else:
+            stack[-1] += c
+    plural = expr.sub(repl, stack.pop())
+
+    return eval('lambda n: int(%s)' % plural)
+
+
+
+def _expand_lang(locale):
+    from locale import normalize
+    locale = normalize(locale)
+    COMPONENT_CODESET   = 1 << 0
+    COMPONENT_TERRITORY = 1 << 1
+    COMPONENT_MODIFIER  = 1 << 2
+    # split up the locale into its base components
+    mask = 0
+    pos = locale.find('@')
+    if pos >= 0:
+        modifier = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_MODIFIER
+    else:
+        modifier = ''
+    pos = locale.find('.')
+    if pos >= 0:
+        codeset = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_CODESET
+    else:
+        codeset = ''
+    pos = locale.find('_')
+    if pos >= 0:
+        territory = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_TERRITORY
+    else:
+        territory = ''
+    language = locale
+    ret = []
+    for i in range(mask+1):
+        if not (i & ~mask):  # if all components for this combo exist ...
+            val = language
+            if i & COMPONENT_TERRITORY: val += territory
+            if i & COMPONENT_CODESET:   val += codeset
+            if i & COMPONENT_MODIFIER:  val += modifier
+            ret.append(val)
+    ret.reverse()
+    return ret
+
+
+
+class NullTranslations:
+    def __init__(self, fp=None):
+        self._info = {}
+        self._charset = None
+        self._fallback = None
+        if fp is not None:
+            self._parse(fp)
+
+    def _parse(self, fp):
+        pass
+
+    def add_fallback(self, fallback):
+        if self._fallback:
+            self._fallback.add_fallback(fallback)
+        else:
+            self._fallback = fallback
+
+    def gettext(self, message):
+        if self._fallback:
+            return self._fallback.gettext(message)
+        return message
+
+    def ngettext(self, msgid1, msgid2, n):
+        if self._fallback:
+            return self._fallback.ngettext(msgid1, msgid2, n)
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+
+    def ugettext(self, message):
+        if self._fallback:
+            return self._fallback.ugettext(message)
+        return unicode(message)
+
+    def ungettext(self, msgid1, msgid2, n):
+        if self._fallback:
+            return self._fallback.ungettext(msgid1, msgid2, n)
+        if n == 1:
+            return unicode(msgid1)
+        else:
+            return unicode(msgid2)
+
+    def info(self):
+        return self._info
+
+    def charset(self):
+        return self._charset
+
+    def install(self, unicode=False):
+        import __builtin__
+        __builtin__.__dict__['_'] = unicode and self.ugettext or self.gettext
+
+
+class GNUTranslations(NullTranslations):
+    # Magic number of .mo files
+    LE_MAGIC = 0x950412deL
+    BE_MAGIC = 0xde120495L
+
+    def _parse(self, fp):
+        """Override this method to support alternative .mo formats."""
+        unpack = struct.unpack
+        filename = getattr(fp, 'name', '')
+        # Parse the .mo file header, which consists of 5 little endian 32
+        # bit words.
+        self._catalog = catalog = {}
+        self.plural = lambda n: int(n != 1) # germanic plural by default
+        buf = fp.read()
+        buflen = len(buf)
+        # Are we big endian or little endian?
+        magic = unpack('<I', buf[:4])[0]
+        if magic == self.LE_MAGIC:
+            version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
+            ii = '<II'
+        elif magic == self.BE_MAGIC:
+            version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
+            ii = '>II'
+        else:
+            raise IOError(0, 'Bad magic number', filename)
+        # Now put all messages from the .mo file buffer into the catalog
+        # dictionary.
+        for i in xrange(0, msgcount):
+            mlen, moff = unpack(ii, buf[masteridx:masteridx+8])
+            mend = moff + mlen
+            tlen, toff = unpack(ii, buf[transidx:transidx+8])
+            tend = toff + tlen
+            if mend < buflen and tend < buflen:
+                msg = buf[moff:mend]
+                tmsg = buf[toff:tend]
+            else:
+                raise IOError(0, 'File is corrupt', filename)
+            # See if we're looking at GNU .mo conventions for metadata
+            if mlen == 0:
+                # Catalog description
+                # don't handle multi-lines fields here, and skip
+                # lines which don't look like a header description
+                # (e.g. "header: value")
+                lastk = k = None
+                for item in tmsg.splitlines():
+                    item = item.strip()
+                    if not item or not ':' in item:
+                        continue
+                    k, v = item.split(':', 1)
+                    k = k.strip().lower()
+                    v = v.strip()
+                    self._info[k] = v
+                    if k == 'content-type':
+                        self._charset = v.split('charset=')[1]
+                    elif k == 'plural-forms':
+                        v = v.split(';')
+                        plural = v[1].split('plural=')[1]
+                        self.plural = c2py(plural)
+            # Note: we unconditionally convert both msgids and msgstrs to
+            # Unicode using the character encoding specified in the charset
+            # parameter of the Content-Type header.  The gettext documentation
+            # strongly encourages msgids to be us-ascii, but some appliations
+            # require alternative encodings (e.g. Zope's ZCML and ZPT).  For
+            # traditional gettext applications, the msgid conversion will
+            # cause no problems since us-ascii should always be a subset of
+            # the charset encoding.  We may want to fall back to 8-bit msgids
+            # if the Unicode conversion fails.
+            if msg.find('\x00') >= 0:
+                # Plural forms
+                msgid1, msgid2 = msg.split('\x00')
+                tmsg = tmsg.split('\x00')
+                if self._charset:
+                    msgid1 = unicode(msgid1, self._charset)
+                    tmsg = [unicode(x, self._charset) for x in tmsg]
+                for i in range(len(tmsg)):
+                    catalog[(msgid1, i)] = tmsg[i]
+            else:
+                if self._charset:
+                    msg = unicode(msg, self._charset)
+                    tmsg = unicode(tmsg, self._charset)
+                catalog[msg] = tmsg
+            # advance to next entry in the seek tables
+            masteridx += 8
+            transidx += 8
+
+    def gettext(self, message):
+        missing = object()
+        tmsg = self._catalog.get(message, missing)
+        if tmsg is missing:
+            if self._fallback:
+                return self._fallback.gettext(message)
+            return message
+        # Encode the Unicode tmsg back to an 8-bit string, if possible
+        if self._charset:
+            return tmsg.encode(self._charset)
+        return tmsg
+
+    def ngettext(self, msgid1, msgid2, n):
+        try:
+            tmsg = self._catalog[(msgid1, self.plural(n))]
+            if self._charset:
+                return tmsg.encode(self._charset)
+            return tmsg
+        except KeyError:
+            if self._fallback:
+                return self._fallback.ngettext(msgid1, msgid2, n)
+            if n == 1:
+                return msgid1
+            else:
+                return msgid2
+
+    def ugettext(self, message):
+        missing = object()
+        tmsg = self._catalog.get(message, missing)
+        if tmsg is missing:
+            if self._fallback:
+                return self._fallback.ugettext(message)
+            return unicode(message)
+        return tmsg
+
+    def ungettext(self, msgid1, msgid2, n):
+        try:
+            tmsg = self._catalog[(msgid1, self.plural(n))]
+        except KeyError:
+            if self._fallback:
+                return self._fallback.ungettext(msgid1, msgid2, n)
+            if n == 1:
+                tmsg = unicode(msgid1)
+            else:
+                tmsg = unicode(msgid2)
+        return tmsg
+
+
+# Locate a .mo file using the gettext strategy
+def find(domain, localedir=None, languages=None, all=0):
+    # Get some reasonable defaults for arguments that were not supplied
+    if localedir is None:
+        localedir = _default_localedir
+    if languages is None:
+        languages = []
+        for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
+            val = os.environ.get(envar)
+            if val:
+                languages = val.split(':')
+                break
+        if 'C' not in languages:
+            languages.append('C')
+    # now normalize and expand the languages
+    nelangs = []
+    for lang in languages:
+        for nelang in _expand_lang(lang):
+            if nelang not in nelangs:
+                nelangs.append(nelang)
+    # select a language
+    if all:
+        result = []
+    else:
+        result = None
+    for lang in nelangs:
+        if lang == 'C':
+            break
+        mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
+        if os.path.exists(mofile):
+            if all:
+                result.append(mofile)
+            else:
+                return mofile
+    return result
+
+
+
+# a mapping between absolute .mo file path and Translation object
+_translations = {}
+
+def translation(domain, localedir=None, languages=None,
+                class_=None, fallback=False):
+    if class_ is None:
+        class_ = GNUTranslations
+    mofiles = find(domain, localedir, languages, all=1)
+    if not mofiles:
+        if fallback:
+            return NullTranslations()
+        raise IOError(ENOENT, 'No translation file found for domain', domain)
+    # TBD: do we need to worry about the file pointer getting collected?
+    # Avoid opening, reading, and parsing the .mo file after it's been done
+    # once.
+    result = None
+    for mofile in mofiles:
+        key = os.path.abspath(mofile)
+        t = _translations.get(key)
+        if t is None:
+            t = _translations.setdefault(key, class_(open(mofile, 'rb')))
+        # Copy the translation object to allow setting fallbacks.
+        # All other instance data is shared with the cached object.
+        t = copy.copy(t)
+        if result is None:
+            result = t
+        else:
+            result.add_fallback(t)
+    return result
+
+
+def install(domain, localedir=None, unicode=False):
+    translation(domain, localedir, fallback=True).install(unicode)
+
+
+
+# a mapping b/w domains and locale directories
+_localedirs = {}
+# current global domain, `messages' used for compatibility w/ GNU gettext
+_current_domain = 'messages'
+
+
+def textdomain(domain=None):
+    global _current_domain
+    if domain is not None:
+        _current_domain = domain
+    return _current_domain
+
+
+def bindtextdomain(domain, localedir=None):
+    global _localedirs
+    if localedir is not None:
+        _localedirs[domain] = localedir
+    return _localedirs.get(domain, _default_localedir)
+
+
+def dgettext(domain, message):
+    try:
+        t = translation(domain, _localedirs.get(domain, None))
+    except IOError:
+        return message
+    return t.gettext(message)
+
+
+def dngettext(domain, msgid1, msgid2, n):
+    try:
+        t = translation(domain, _localedirs.get(domain, None))
+    except IOError:
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+    return t.ngettext(msgid1, msgid2, n)
+
+
+def gettext(message):
+    return dgettext(_current_domain, message)
+
+
+def ngettext(msgid1, msgid2, n):
+    return dngettext(_current_domain, msgid1, msgid2, n)
+
+
+# dcgettext() has been deemed unnecessary and is not implemented.
+
+# James Henstridge's Catalog constructor from GNOME gettext.  Documented usage
+# was:
+#
+#    import gettext
+#    cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR)
+#    _ = cat.gettext
+#    print _('Hello World')
+
+# The resulting catalog object currently don't support access through a
+# dictionary API, which was supported (but apparently unused) in GNOME
+# gettext.
+
+Catalog = translation
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,202 @@
+"""cubicweb on google appengine
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+
+from datetime import datetime, time, date
+from mx.DateTime import DateTime, Date, Time
+
+def mx2datetime(mxobj, yamstype):
+    """converts a mx date object (DateTime, Date or Time) into a
+    regular python datetime object
+    """
+    #if yamstype == 'Datetime':
+    # don't use date, db model doesn't actually support it, only datetime
+    return datetime(mxobj.year, mxobj.month, mxobj.day,
+                    mxobj.hour, mxobj.minute, int(mxobj.second))
+#     elif yamstype == 'Date':
+#         return date(mxobj.year, mxobj.month, mxobj.day)
+#     # XXX don't support time either, what should we do here ?
+#     return time(mxobj.hour, mxobj.minute, int(mxobj.second))
+
+def datetime2mx(datetimeobj, yamstype=None):
+    """converts a mx date object (DateTime, Date or Time) into a
+    regular python datetime object
+    """
+    if yamstype is None:
+        yamstype = guess_yamstype_from_date(datetimeobj)
+    assert yamstype is not None
+    if yamstype == 'Datetime':
+        # don't use date, db model doesn't actually support it, only datetime
+        return DateTime(datetimeobj.year, datetimeobj.month, datetimeobj.day,
+                        datetimeobj.hour, datetimeobj.minute, int(datetimeobj.second))
+    elif yamstype == 'Date':
+        return Date(datetimeobj.year, datetimeobj.month, datetimeobj.day)
+    # XXX don't support time either, what should we do here ?
+    return Time(datetimeobj.hour, datetimeobj.minute, int(datetimeobj.second))
+
+
+def guess_yamstype_for_date(datetimeobj):
+    """guesses yams correct type according to `datetimeobj`'s type"""
+    if isinstance(datetimeobj, datetime):
+        return 'Datetime'
+    elif isinstance(datetimeobj, date):
+        return 'Date'
+    elif isinstance(datetimeobj, time):
+        return 'Time'
+    return None
+
+
+def use_mx_for_dates(func):
+    """decorator to convert func's return value into mx objects
+    instead of datetime objects
+    """
+    def wrapper(*args, **kwargs):
+        value = func(*args, **kwargs)
+        yamstype = guess_yamstype_for_date(value)
+        if yamstype is None:
+            return value
+        return datetime2mx(value, yamstype)
+    return wrapper
+
+
+try:
+    # WARNING: do not import the google's db module here since it will take
+    #          precedence over our own db submodule
+    from google.appengine.api.datastore import Key, Get, Query
+    from google.appengine.api.datastore_errors import BadKeyError
+except ImportError:
+    # not in google app environment
+    pass
+else:
+
+    import os    
+    _SS = os.environ.get('SERVER_SOFTWARE')
+    if _SS is None:
+        MODE = 'test'
+    elif _SS.startswith('Dev'):
+        MODE = 'dev'
+    else:
+        MODE = 'prod'
+
+    from cubicweb.server import SOURCE_TYPES
+    from cubicweb.goa.gaesource import GAESource
+    SOURCE_TYPES['gae'] = GAESource
+
+    
+    def do_monkey_patch():
+
+        # monkey patch yams Bytes validator since it should take a bytes string with gae
+        # and not a StringIO
+        def check_bytes(eschema, value):
+            """check value is a bytes string"""
+            return isinstance(value, str)
+        from yams import constraints
+        constraints.BASE_CHECKERS['Bytes'] = check_bytes
+
+        def rql_for_eid(eid):
+            return 'Any X WHERE X eid "%s"' % eid
+        from cubicweb.common import uilib
+        uilib.rql_for_eid = rql_for_eid
+
+        def typed_eid(eid):
+            try:
+                return str(Key(eid))
+            except BadKeyError:
+                raise ValueError(eid)
+        import cubicweb
+        cubicweb.typed_eid = typed_eid
+
+        # XXX monkey patch cubicweb.schema.CubicWebSchema to have string eid with
+        #     optional cardinality (since eid is set after the validation)
+        
+        import re
+        from yams import buildobjs as ybo
+        
+        def add_entity_type(self, edef):
+            edef.name = edef.name.encode()
+            assert re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name), repr(edef.name)
+            eschema = super(CubicWebSchema, self).add_entity_type(edef)
+            if not eschema.is_final():
+                # automatically add the eid relation to non final entity types 
+                rdef = ybo.RelationDefinition(eschema.type, 'eid', 'Bytes',
+                                              cardinality='?1', uid=True)
+                self.add_relation_def(rdef)
+                rdef = ybo.RelationDefinition(eschema.type, 'identity', eschema.type)
+                self.add_relation_def(rdef)
+            self._eid_index[eschema.eid] = eschema
+            return eschema
+        
+        from cubicweb.schema import CubicWebSchema
+        CubicWebSchema.add_entity_type = add_entity_type
+
+
+        # don't reset vreg on repository set_schema
+        from cubicweb.server import repository
+        orig_set_schema = repository.Repository.set_schema
+        def set_schema(self, schema, resetvreg=True):
+            orig_set_schema(self, schema, False)
+        repository.Repository.set_schema = set_schema
+        # deactivate function ensuring relation cardinality consistency
+        repository.del_existing_rel_if_needed = lambda *args: None
+
+        def get_cubes(self):
+            """return the list of top level cubes used by this instance"""
+            config = self.config
+            cubes = config['included-cubes'] + config['included-yams-cubes']
+            return config.expand_cubes(cubes)
+        repository.Repository.get_cubes = get_cubes
+        
+        from rql import RQLHelper
+        RQLHelper.simplify = lambda x,r: None
+
+        # activate entity caching on the server side
+
+        def set_entity_cache(self, entity):
+            self._query_data.setdefault('_eid_cache', {})[entity.eid] = entity
+
+        def entity_cache(self, eid):
+            return self._query_data['_eid_cache'][eid]
+
+        def drop_entity_cache(self, eid=None):
+            if eid is None:
+                self._query_data['_eid_cache'] = {}
+            elif '_eid_cache' in self._query_data:
+                self._query_data['_eid_cache'].pop(eid, None)
+
+        def datastore_get(self, key):
+            if isinstance(key, basestring):
+                key = Key(key)
+            try:
+                gentity = self._query_data['_key_cache'][key]
+                #self.critical('cached %s', gentity)
+            except KeyError:
+                gentity = Get(key)
+                #self.critical('Get %s', gentity)
+                self._query_data.setdefault('_key_cache', {})[key] = gentity
+            return gentity
+
+        def clear_datastore_cache(self, key=None):
+            if key is None:
+                self._query_data['_key_cache'] = {}
+            else:
+                if isinstance(key, basestring):
+                    key = Key(key)
+                self._query_data['_key_cache'].pop(key, None)
+
+        from cubicweb.server.session import Session
+        Session.set_entity_cache = set_entity_cache
+        Session.entity_cache = entity_cache
+        Session.drop_entity_cache = drop_entity_cache
+        Session.datastore_get = datastore_get
+        Session.clear_datastore_cache = clear_datastore_cache
+
+        from docutils.frontend import OptionParser
+        # avoid a call to expanduser which is not available under gae
+        def get_standard_config_files(self):
+            return self.standard_config_files
+        OptionParser.get_standard_config_files = get_standard_config_files
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/appobjects/components.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,100 @@
+"""overrides some base views for cubicweb on google appengine
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os.path import join
+
+from logilab.mtconverter import html_escape
+from logilab.common.decorators import cached
+
+from cubicweb import typed_eid
+from cubicweb.schema import display_name
+from cubicweb.common.view import StartupView, EntityView
+from cubicweb.common.selectors import (onelinerset_selector, searchstate_selector,
+                                    accept_selector)
+from cubicweb.web import Redirect
+from cubicweb.web.views import vid_from_rset
+from cubicweb.goa.db import rset_from_objs
+
+from google.appengine.api import datastore, mail
+
+from main import APPLROOT
+
+
+class SearchForAssociationView(EntityView):
+    """view called by the edition view when the user asks
+    to search for something to link to the edited eid
+    """
+    id = 'search-associate'
+    
+    __selectors__ = (onelinerset_selector, searchstate_selector, accept_selector)
+    accepts = ('Any',)
+    search_states = ('linksearch',)
+
+    def cell_call(self, row, col):
+        entity = self.entity(0, 0)
+        role, eid, rtype, etype = self.req.search_state[1]
+        assert entity.eid == typed_eid(eid)
+        rset = entity.unrelated(rtype, etype, role, ordermethod='fetch_order')
+        vid = vid_from_rset(self.req, rset, self.schema)
+        self.w(u'<div id="search-associate-content">')
+        self.pagination(self.req, rset, w=self.w)
+        self.wview(vid, rset)
+        self.w(u'</div>')
+
+
+class SchemaImageView(StartupView):
+    id = 'schemagraph'
+    binary = True
+    content_type = 'image/png'
+    def call(self):
+        """display global schema information"""        
+        skipmeta = not int(self.req.form.get('withmeta', 0))
+        if skipmeta:
+            url = self.build_url('data/schema.png')
+        else:
+            url = self.build_url('data/metaschema.png')
+        raise Redirect(url)
+
+
+from cubicweb.web.views.baseviews import MetaDataView
+
+class GAEMetaDataView(MetaDataView):
+    show_eid = False
+
+
+from cubicweb.web.views.startup import ManageView
+
+def entity_types_no_count(self, eschemas):
+    """return a list of formatted links to get a list of entities of
+    a each entity's types
+    """
+    req = self.req
+    for eschema in eschemas:
+        if eschema.is_final() or not (eschema.has_perm(req, 'read') or
+                                      eschema.has_local_role('read')):
+            continue
+        etype = eschema.type
+        label = display_name(req, etype, 'plural')
+        view = self.vreg.select_view('list', req, req.etype_rset(etype))
+        url = view.url()
+        etypelink = u'&nbsp;<a href="%s">%s</a>' % (html_escape(url), label)
+        yield (label, etypelink, self.add_entity_link(eschema, req))
+
+ManageView.entity_types = entity_types_no_count
+
+
+from cubicweb.web.views.basecontrollers import SendMailController
+
+def sendmail(self, recipient, subject, body):
+    sender = '%s <%s>' % (
+        self.req.user.dc_title() or self.config['sender-name'],
+        self.req.user.get_email() or self.config['sender-addr'])
+    mail.send_mail(sender=sender, to=recipient,
+                   subject=subject, body=body)
+
+SendMailController.sendmail = sendmail
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/appobjects/dbmgmt.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,185 @@
+"""special management views to manage repository content (initialization and
+restoration).
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os.path import exists, join, abspath
+from pickle import loads, dumps
+
+from logilab.common.decorators import cached
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.view import StartupView
+from cubicweb.web import Redirect
+from cubicweb.goa.dbinit import fix_entities, init_persistent_schema, insert_versions
+
+from google.appengine.api.datastore import Entity, Key, Get, Put, Delete
+from google.appengine.api.datastore_types import Blob
+from google.appengine.api.datastore_errors import EntityNotFoundError
+
+
+def _get_status(name, create=True):
+    key = Key.from_path('EApplicationStatus', name)
+    try:
+        status = Get(key)
+    except EntityNotFoundError:
+        if create:
+            status = Entity('EApplicationStatus', name=name)
+        else:
+            status = None
+    return status
+
+
+class AuthInfo(StartupView):
+    """special management view to get cookie values to give to laxctl commands
+    which are doing datastore administration requests
+    """
+    id = 'authinfo'
+    require_groups = ('managers',)
+
+    def call(self):
+        cookie = self.req.get_cookie()
+        values = []
+        if self.config['use-google-auth']:
+            for param in ('ACSID', 'dev_appserver_login'):
+                morsel = cookie.get(param)
+                if morsel:
+                    values.append('%s=%s' % (param, morsel.value))
+                    break
+        values.append('__session=%s' % cookie['__session'].value)
+        self.w(u"<p>pass this flag to the client: --cookie='%s'</p>"
+               % html_escape('; '.join(values)))
+        
+        
+
+class ContentInit(StartupView):
+    """special management view to initialize content of a repository,
+    step by step to avoid depassing quotas
+    """
+    id = 'contentinit'
+    require_groups = ('managers',)
+
+    def server_session(self):
+        ssession = self.config.repo_session(self.req.cnx.sessionid)
+        ssession.set_pool()
+        return ssession
+
+    def end_core_step(self, msg, status, stepid):
+        status['cpath'] = ''
+        status['stepid'] = stepid
+        Put(status)
+        self.msg(msg)
+        
+    def call(self):
+        status = _get_status('creation')
+        if status.get('finished'):
+            self.redirect('process already completed')
+        config = self.config
+        # execute cubicweb's post<event> script
+        #mhandler.exec_event_script('post%s' % event)
+        # execute cubes'post<event> script if any
+        paths = [p for p in config.cubes_path() + [config.apphome]
+                 if exists(join(p, 'migration'))]
+        paths = [abspath(p) for p in (reversed(paths))]
+        cpath = status.get('cpath')
+        if cpath is None and status.get('stepid') is None:
+            init_persistent_schema(self.server_session(), self.schema)
+            self.end_core_step(u'inserted schema entities', status, 0)
+            return
+        if cpath == '' and status.get('stepid') == 0:
+            fix_entities(self.schema)
+            self.end_core_step(u'fixed bootstrap groups and users', status, 1)
+            return
+        if cpath == '' and status.get('stepid') == 1:
+            insert_versions(self.server_session(), self.config)
+            self.end_core_step(u'inserted software versions', status, None)
+            return
+        for i, path in enumerate(paths):
+            if not cpath or cpath == path:
+                self.info('running %s', path)
+                stepid = status.get('stepid')
+                context = status.get('context')
+                if context is not None:
+                    context = loads(context)
+                else:
+                    context = {}
+                stepid = self._migrhandler.exec_event_script(
+                    'postcreate', path, 'stepable_postcreate', stepid, context)
+                if stepid is None: # finished for this script
+                    # reset script state
+                    context = stepid = None
+                    # next time, go to the next script
+                    self.msg(u'finished postcreate for %s' % path)
+                    try:
+                        path = paths[i+1]
+                        self.continue_link()
+                    except IndexError:
+                        status['finished'] = True
+                        path = None
+                        self.redirect('process completed')
+                else:
+                    if context.get('stepidx'):
+                        self.msg(u'created %s entities for step %s of %s' % (
+                            context['stepidx'], stepid, path))
+                    else:
+                        self.msg(u'finished postcreate step %s for %s' % (
+                            stepid, path))
+                    context = Blob(dumps(context))
+                    self.continue_link()
+                status['context'] = context
+                status['stepid'] = stepid
+                status['cpath'] = path
+                break
+        else:
+            if not cpath:
+                # nothing to be done
+                status['finished'] = True
+                self.redirect('process completed')
+            else:
+                # Note the error: is expected by the laxctl command line tool,
+                # deal with this if internationalization is introduced
+                self.msg(u'error: strange creation state, can\'t find %s'
+                         % cpath)
+                self.w(u'<div>click <a href="%s?vid=contentclear">here</a> to '
+                       '<b>delete all datastore content</b> so process can be '
+                       'reinitialized</div>' % html_escape(self.req.base_url()))
+        Put(status)
+        
+    @property
+    @cached
+    def _migrhandler(self):
+        return self.config.migration_handler(self.schema, interactive=False,
+                                             cnx=self.req.cnx,
+                                             repo=self.config.repository())
+
+    def msg(self, msg):
+        self.w(u'<div class="message">%s</div>' % html_escape(msg))
+    def redirect(self, msg):
+        raise Redirect(self.req.build_url('', msg))
+    def continue_link(self):
+        self.w(u'<a href="%s">continue</a><br/>' % html_escape(self.req.url()))
+
+        
+class ContentClear(StartupView):
+    id = 'contentclear'
+    require_groups = ('managers',)
+    skip_etypes = ('EGroup', 'EUser')
+    
+    def call(self):
+        # XXX should use unsafe_execute with all hooks deactivated
+        # XXX step by catching datastore errors?
+        for eschema in self.schema.entities():
+            if eschema.is_final() or eschema in self.skip_etypes:
+                continue
+            self.req.execute('DELETE %s X' % eschema)
+            self.w(u'deleted all %s entities<br/>' % eschema)
+        status = _get_status('creation', create=False)
+        if status:
+            Delete(status)
+        self.w(u'done<br/>')
+        self.w(u'click <a href="%s?vid=contentinit">here</a> to start the data '
+               'initialization process<br/>' % self.req.base_url())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/appobjects/gauthservice.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,37 @@
+"""authentication using google authentication service
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.registerers import priority_registerer
+from cubicweb.web.views.basecomponents import UserLink
+from cubicweb.web.views.actions import LogoutAction
+
+from google.appengine.api import users
+
+
+class use_google_auth_registerer(priority_registerer):
+    """register object if use-google-auth is true"""
+    
+    def do_it_yourself(self, registered):
+        if not hasattr(self.config, 'has_resource'):
+            return
+        return super(use_google_auth_registerer, self).do_it_yourself(registered)
+
+
+class GAEUserLink(UserLink):
+    __registerer__ = use_google_auth_registerer
+
+    def anon_user_link(self):
+        self.w(self.req._('anonymous'))
+        self.w(u'&nbsp;[<a class="logout" href="%s">%s</a>]'
+               % (users.create_login_url(self.req.url()), self.req._('login')))
+
+class GAELogoutAction(LogoutAction):
+    __registerer__ = use_google_auth_registerer
+
+    def url(self):
+        return users.create_logout_url(self.req.build_url('logout') )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/appobjects/sessions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,271 @@
+"""persistent sessions stored in big table
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+XXX TODO:
+* cleanup persistent session
+* use user as ancestor?
+"""
+__docformat__ = "restructuredtext en"
+
+from pickle import loads, dumps
+from time import localtime, strftime
+
+from logilab.common.decorators import cached, clear_cache
+
+from cubicweb import UnknownEid, BadConnectionId
+from cubicweb.dbapi import Connection, ConnectionProperties, repo_connect
+from cubicweb.server.session import Session
+from cubicweb.web import InvalidSession
+from cubicweb.web.application import AbstractSessionManager
+from cubicweb.web.application import AbstractAuthenticationManager
+
+from google.appengine.api.datastore import Key, Entity, Get, Put, Delete, Query
+from google.appengine.api.datastore_errors import EntityNotFoundError
+from google.appengine.api.datastore_types import Blob
+
+try:
+    del Connection.__del__
+except AttributeError:
+    pass # already deleted
+
+
+class GAEAuthenticationManager(AbstractAuthenticationManager):
+    """authenticate user associated to a request and check session validity,
+    using google authentication service
+    """
+
+    def __init__(self, *args, **kwargs):
+        super(GAEAuthenticationManager, self).__init__(*args, **kwargs)
+        self._repo = self.config.repository(vreg=self.vreg)
+        
+    def authenticate(self, req, _login=None, _password=None):
+        """authenticate user and return an established connection for this user
+        
+        :raise ExplicitLogin: if authentication is required (no authentication
+        info found or wrong user/password)
+        """
+        if _login is not None:
+            login, password = _login, _password
+        else:
+            login, password = req.get_authorization()
+        # remove possibly cached cursor coming from closed connection
+        clear_cache(req, 'cursor')
+        cnxprops = ConnectionProperties(self.vreg.config.repo_method,
+                                        close=False, log=False)
+        cnx = repo_connect(self._repo, login, password, cnxprops=cnxprops)
+        self._init_cnx(cnx, login, password)
+        # associate the connection to the current request
+        req.set_connection(cnx)
+        return cnx
+
+    def _init_cnx(self, cnx, login, password):
+        cnx.anonymous_connection = self.config.is_anonymous_user(login)
+        cnx.vreg = self.vreg
+        cnx.login = login
+        cnx.password = password
+
+
+class GAEPersistentSessionManager(AbstractSessionManager):
+    """manage session data associated to a session identifier"""
+
+    def __init__(self, *args, **kwargs):
+        super(GAEPersistentSessionManager, self).__init__(*args, **kwargs)
+        self._repo = self.config.repository(vreg=self.vreg)
+        
+    def get_session(self, req, sessionid):
+        """return existing session for the given session identifier"""
+        # search a record for the given session
+        key = Key.from_path('CubicWebSession', 'key_' + sessionid, parent=None)
+        try:
+            record = Get(key)
+        except EntityNotFoundError:
+            raise InvalidSession()
+        repo = self._repo
+        if self.has_expired(record):
+            repo._sessions.pop(sessionid, None)
+            Delete(record)
+            raise InvalidSession()
+        # associate it with a repository session
+        try:
+            reposession = repo._get_session(sessionid)
+            user = reposession.user
+            # touch session to avoid closing our own session when sessions are
+            # cleaned (touch is done on commit/rollback on the server side, too
+            # late in that case)
+            reposession._touch()
+        except BadConnectionId:
+            # can't found session in the repository, this probably mean the
+            # session is not yet initialized on this server, hijack the repo
+            # to create it
+            # use an internal connection
+            ssession = repo.internal_session()
+            # try to get a user object
+            try:
+                user = repo.authenticate_user(ssession, record['login'],
+                                              record['password'])
+            finally:
+                ssession.close()
+            reposession = Session(user, self._repo, _id=sessionid)
+            self._repo._sessions[sessionid] = reposession
+        cnx = Connection(self._repo, sessionid)
+        return self._get_proxy(req, record, cnx, user)
+
+    def open_session(self, req):
+        """open and return a new session for the given request"""
+        cnx = self.authmanager.authenticate(req)
+        # avoid rebuilding a user
+        user = self._repo._get_session(cnx.sessionid).user
+        # build persistent record for session data
+        record = Entity('CubicWebSession', name='key_' + cnx.sessionid)
+        record['login'] = cnx.login
+        record['password'] = cnx.password
+        record['anonymous_connection'] = cnx.anonymous_connection
+        Put(record)
+        return self._get_proxy(req, record, cnx, user)
+    
+    def close_session(self, proxy):
+        """close session on logout or on invalid session detected (expired out,
+        corrupted...)
+        """
+        proxy.close()
+
+    def current_sessions(self):
+        for record in Query('CubicWebSession').Run():
+            yield ConnectionProxy(record)
+            
+    def _get_proxy(self, req, record, cnx, user):
+        proxy = ConnectionProxy(record, cnx, user)
+        user.req = req
+        req.set_connection(proxy, user)
+        return proxy
+
+
+class ConnectionProxy(object):
+    
+    def __init__(self, record, cnx=None, user=None):
+        self.__record = record
+        self.__cnx = cnx
+        self.__user = user
+        self.__data = None
+        self.__is_dirty = False
+        self.sessionid = record.key().name()[4:] # remove 'key_' prefix
+        
+    def __repr__(self):
+        sstr = '<ConnectionProxy %s' % self.sessionid
+        if self.anonymous_connection:
+            sstr += ' (anonymous)'
+        elif self.__user:
+            sstr += ' for %s' % self.__user.login
+        sstr += ', last used %s>' % strftime('%T', localtime(self.last_usage_time))
+        return sstr
+        
+    def __getattribute__(self, name):
+        try:
+            return super(ConnectionProxy, self).__getattribute__(name)
+        except AttributeError:
+            return getattr(self.__cnx, name)
+
+    def _set_last_usage_time(self, value):
+        self.__is_dirty = True
+        self.__record['last_usage_time'] = value
+    def _get_last_usage_time(self):
+        return self.__record['last_usage_time']
+    
+    last_usage_time = property(_get_last_usage_time, _set_last_usage_time)
+
+    @property
+    def anonymous_connection(self):
+        # use get() for bw compat if sessions without anonymous information are
+        # found. Set default to True to limit lifetime of those sessions.
+        return self.__record.get('anonymous_connection', True)
+        
+    @property
+    @cached
+    def data(self):
+        if self.__record.get('data') is not None:
+            try:
+                return loads(self.__record['data'])
+            except:
+                self.__is_dirty = True
+                self.exception('corrupted session data for session %s',
+                               self.__cnx)
+        return {}
+        
+    def get_session_data(self, key, default=None, pop=False):
+        """return value associated to `key` in session data"""
+        if pop:
+            try:
+                value = self.data.pop(key)
+                self.__is_dirty = True
+                return value
+            except KeyError:
+                return default
+        else:
+            return self.data.get(key, default)
+        
+    def set_session_data(self, key, value):
+        """set value associated to `key` in session data"""
+        self.data[key] = value
+        self.__is_dirty = True
+        
+    def del_session_data(self, key):
+        """remove value associated to `key` in session data"""
+        try:
+            del self.data[key]
+            self.__is_dirty = True
+        except KeyError:
+            pass    
+            
+    def commit(self):
+        if self.__is_dirty:
+            self.__save()
+        self.__cnx.commit()
+
+    def rollback(self):
+        self.__save()
+        self.__cnx.rollback()
+
+    def close(self):
+        if self.__cnx is not None:
+            self.__cnx.close()
+        Delete(self.__record)
+        
+    def __save(self):
+        if self.__is_dirty:
+            self.__record['data'] = Blob(dumps(self.data))
+            Put(self.__record)
+            self.__is_dirty = False
+
+    def user(self, req=None, props=None):
+        """return the User object associated to this connection"""
+        return self.__user
+        
+
+import logging
+from cubicweb import set_log_methods
+set_log_methods(ConnectionProxy, logging.getLogger('cubicweb.web.goa.session'))
+
+
+from cubicweb.common.view import StartupView
+from cubicweb.web import application
+
+class SessionsCleaner(StartupView):
+    id = 'cleansessions'
+    require_groups = ('managers',)
+    
+    def call(self):
+        # clean web session
+        session_manager = application.SESSION_MANAGER
+        nbclosed, remaining = session_manager.clean_sessions()
+        self.w(u'<div class="message">')
+        self.w(u'%s web sessions closed<br/>\n' % nbclosed)
+        # clean repository sessions
+        repo = self.config.repository(vreg=self.vreg)
+        nbclosed = repo.clean_sessions()
+        self.w(u'%s repository sessions closed<br/>\n' % nbclosed)
+        self.w(u'%s remaining sessions<br/>\n' % remaining)
+        self.w(u'</div>')
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/bin/laxctl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+import sys
+import os.path as osp
+
+APPLROOT = osp.abspath(osp.join(osp.dirname(osp.abspath(__file__)), '..'))
+if APPLROOT not in sys.path:
+    sys.path.insert(0, APPLROOT)
+CUBES_DIR = osp.join(APPLROOT, 'cw-cubes')
+if CUBES_DIR not in sys.path:
+    sys.path.insert(1, CUBES_DIR)
+    
+try:
+    import custom
+except ImportError, exc:
+    print exc
+    sys.exit(2)
+
+from tools.laxctl import run
+run()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/db.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,460 @@
+"""provide replacement classes for gae db module, so that a gae model can be
+used as base for a cubicweb application by simply replacing ::
+
+  from google.appengine.ext import db
+
+by
+
+  from cubicweb.goa import db
+
+The db.model api should be fully featured by replacement classes, with the
+following differences:
+
+* all methods returning `google.appengine.ext.db.Model` instance(s) will return
+  `cubicweb.goa.db.Model` instance instead (though you should see almost no
+  difference since those instances have the same api)
+  
+* class methods returning model instance take a `req` as first argument, unless
+  they are called through an instance, representing the current request
+  (accessible through `self.req` on almost all objects)
+  
+* XXX no instance.<modelname>_set attributes, use instance.reverse_<attr name>
+      instead
+* XXX reference property always return a list of objects, not the instance
+* XXX name/collection_name argument of properties constructor are ignored
+* XXX ListProperty
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from datetime import datetime
+from copy import deepcopy
+
+from logilab.common.decorators import cached, iclassmethod
+
+from cubicweb import RequestSessionMixIn, Binary, entities
+from cubicweb.rset import ResultSet
+from cubicweb.common.entity import metaentity
+from cubicweb.server.utils import crypt_password
+from cubicweb.goa import use_mx_for_dates, mx2datetime, MODE
+from cubicweb.goa.dbinit import init_relations
+
+from google.appengine.api.datastore import Get, Put, Key, Entity, Query
+from google.appengine.api.datastore import NormalizeAndTypeCheck, RunInTransaction
+from google.appengine.api.datastore_types import Text, Blob
+from google.appengine.api.datastore_errors import BadKeyError
+
+# XXX remove this dependancy
+from google.appengine.ext import db 
+
+
+def rset_from_objs(req, objs, attrs=('eid',), rql=None, args=None):
+    """return a ResultSet instance for list of objects"""
+    if objs is None:
+        objs = ()
+    elif isinstance(objs, Entity):
+        objs = (objs,)
+    if rql is None:
+        rql = 'Any X'
+    rows = []
+    description = []
+    rset = ResultSet(rows, rql, args, description=description)
+    vreg = req.vreg
+    for i, obj in enumerate(objs):
+        line = []
+        linedescr = []
+        eschema = vreg.schema.eschema(obj.kind())
+        for j, attr in enumerate(attrs):
+            if attr == 'eid':
+                value = obj.key()
+                obj.row, obj.col = i, j
+                descr = eschema.type
+                value = str(value)
+            else:
+                value = obj[attr]
+                descr = str(eschema.destination(attr))
+            line.append(value)
+            linedescr.append(descr)            
+        rows.append(line)
+        description.append(linedescr)
+        for j, attr in enumerate(attrs):
+            if attr == 'eid':
+                entity = vreg.etype_class(eschema.type)(req, rset, i, j)
+                rset._get_entity_cache_ = {(i, j): entity}        
+    rset.rowcount = len(rows)
+    req.decorate_rset(rset)    
+    return rset
+
+
+def needrequest(wrapped):
+    def wrapper(cls, *args, **kwargs):
+        req = kwargs.pop('req', None)
+        if req is None and args and isinstance(args[0], RequestSessionMixIn):
+            args = list(args)
+            req = args.pop(0)
+        if req is None:
+            req = getattr(cls, 'req', None)
+            if req is None:
+                raise Exception('either call this method on an instance or '
+                                'specify the req argument')
+        return wrapped(cls, req, *args, **kwargs)
+    return iclassmethod(wrapper)
+
+    
+class gaedbmetaentity(metaentity):
+    """metaclass for goa.db.Model classes: filter entity / db model part,
+    put aside the db model part for later creation of db model class.
+    """
+    def __new__(mcs, name, bases, classdict):
+        if not 'id' in classdict:
+            classdict['id'] = name
+        entitycls = super(gaedbmetaentity, mcs).__new__(mcs, name, bases, classdict)
+        return entitycls
+
+
+TEST_MODELS = {}
+
+def extract_dbmodel(entitycls):
+    if MODE == 'test' and entitycls in TEST_MODELS:
+        dbclassdict = TEST_MODELS[entitycls]
+    else:
+        dbclassdict = {}
+        for attr, value in entitycls.__dict__.items():
+            if isinstance(value, db.Property) or isinstance(value, ReferencePropertyStub):
+                dbclassdict[attr] = value
+                # don't remove attr from entitycls, this make tests fail, and it's anyway
+                # overwritten by descriptor at class initialization time
+                #delattr(entitycls, attr)
+    if MODE == 'test':
+        TEST_MODELS[entitycls] = dbclassdict
+        dbclassdict = deepcopy(dbclassdict)
+        for propname, prop in TEST_MODELS[entitycls].iteritems():
+            if getattr(prop, 'reference_class', None) is db._SELF_REFERENCE:
+                dbclassdict[propname].reference_class = db._SELF_REFERENCE
+    return dbclassdict
+
+
+class Model(entities.AnyEntity):
+    id = 'Any'
+    __metaclass__ = gaedbmetaentity
+    
+    row = col = 0
+    
+    @classmethod
+    def __initialize__(cls):
+        super(Model, cls).__initialize__()
+        cls._attributes = frozenset(rschema for rschema in cls.e_schema.subject_relations()
+                                    if rschema.is_final())
+    
+    def __init__(self, *args, **kwargs):
+        # db.Model prototype:
+        #   __init__(self, parent=None, key_name=None, **kw)
+        #
+        # Entity prototype:
+        #   __init__(self, req, rset, row=None, col=0)
+        if args and isinstance(args[0], RequestSessionMixIn) or 'req' in kwargs:
+            super(Model, self).__init__(*args, **kwargs)
+            self._gaeinitargs = None
+        else:
+            super(Model, self).__init__(None, None)
+            # if Model instances are given in kwargs, turn them into db model
+            for key, val in kwargs.iteritems():
+                if key in self.e_schema.subject_relations() and not self.e_schema.schema[key].is_final():
+                    if isinstance(kwargs, (list, tuple)):
+                        val = [isinstance(x, Model) and x._dbmodel or x for x in val]
+                    elif isinstance(val, Model):
+                        val = val._dbmodel
+                    kwargs[key] = val.key()
+            self._gaeinitargs = (args, kwargs)
+            
+    def __repr__(self):
+        return '<ModelEntity %s %s %s at %s>' % (
+            self.e_schema, self.eid, self.keys(), id(self))
+
+    __getattribute__ = use_mx_for_dates(entities.AnyEntity.__getattribute__)
+
+    def _cubicweb_to_datastore(self, attr, value):
+        attr = attr[2:] # remove 's_' / 'o_' prefix
+        if attr in self._attributes:
+            tschema = self.e_schema.destination(attr)
+            if tschema in ('Datetime', 'Date', 'Time'):
+                value = mx2datetime(value, tschema)
+            elif tschema == 'String':
+                if len(value) > 500:
+                    value = Text(value)                
+            elif tschema == 'Password':
+                # if value is a Binary instance, this mean we got it
+                # from a query result and so it is already encrypted
+                if isinstance(value, Binary):
+                    value = value.getvalue()
+                else:
+                    value = crypt_password(value)
+            elif tschema == 'Bytes':
+                if isinstance(value, Binary):
+                    value = value.getvalue()
+                value = Blob(value)
+        else:
+            value = Key(value)
+        return value
+
+    def _to_gae_dict(self, convert=True):
+        gaedict = {}
+        for attr, value in self.iteritems():
+            attr = 's_' + attr
+            if value is not None and convert:
+                value = self._cubicweb_to_datastore(attr, value)
+            gaedict[attr] = value
+        return gaedict
+    
+    def to_gae_model(self):
+        dbmodel = self._dbmodel
+        dbmodel.update(self._to_gae_dict())
+        return dbmodel
+
+    @property
+    @cached
+    def _dbmodel(self): 
+        if self.has_eid():
+            assert self._gaeinitargs is None
+            try:
+                return self.req.datastore_get(self.eid)
+            except AttributeError: # self.req is not a server session
+                return Get(self.eid)
+        self.set_defaults()
+        values = self._to_gae_dict(convert=False)
+        parent = key_name = _app = None
+        if self._gaeinitargs is not None:
+            args, kwargs = self._gaeinitargs
+            args = list(args)
+            if args:
+                parent = args.pop(0)
+            if args:
+                key_name = args.pop(0)
+            if args:
+                _app = args.pop(0)
+            assert not args
+            if 'parent' in kwargs:
+                assert parent is None
+                parent = kwargs.pop('parent')
+            if 'key_name' in kwargs:
+                assert key_name is None
+                key_name = kwargs.pop('key_name')
+            if '_app' in kwargs:
+                assert _app is None
+                _app = kwargs.pop('_app')
+            
+            for key, value in kwargs.iteritems():
+                if key in self._attributes:
+                    values['s_'+key] = value
+        else:
+            kwargs = None
+        if key_name is None:
+            key_name = self.db_key_name()
+            if key_name is not None:
+                key_name = 'key_' + key_name
+        for key, value in values.iteritems():
+            if value is None:
+                continue
+            values[key] = self._cubicweb_to_datastore(key, value)
+        entity = Entity(self.id, parent, _app, key_name)
+        entity.update(values)
+        init_relations(entity, self.e_schema)
+        return entity
+
+    def db_key_name(self):
+        """override this method to control datastore key name that should be
+        used at entity creation.
+
+        Note that if this function return something else than None, the returned
+        value will be prefixed by 'key_' to build the actual key name.
+        """
+        return None
+    
+    def metainformation(self):
+        return {'type': self.id, 'source': {'uri': 'system'}, 'extid': None}
+       
+    def view(self, vid, __registry='views', **kwargs):
+        """shortcut to apply a view on this entity"""
+        return self.vreg.render(__registry, vid, self.req, rset=self.rset,
+                                row=self.row, col=self.col, **kwargs)
+
+    @classmethod
+    def _rest_attr_info(cls):
+        mainattr, needcheck = super(Model, cls)._rest_attr_info()
+        if needcheck:
+            return 'eid', False
+        return mainattr, needcheck
+    
+    @use_mx_for_dates
+    def get_value(self, name):
+        try:
+            value = self[name]
+        except KeyError:
+            if not self.has_eid():
+                return None
+            value = self._dbmodel.get('s_'+name)
+            if value is not None:
+                if isinstance(value, Text):
+                    value = unicode(value)
+                elif isinstance(value, Blob):
+                    value = Binary(str(value))
+            self[name] = value
+        return value
+
+    def has_eid(self):
+        if self.eid is None:
+            return False
+        try:
+            Key(self.eid)
+            return True
+        except BadKeyError:
+            return False
+        
+    def complete(self, skip_bytes=True):
+        pass
+
+    def unrelated(self, rtype, targettype, role='subject', limit=None,
+                  ordermethod=None):
+        # XXX dumb implementation
+        if limit is not None:
+            objs = Query(str(targettype)).Get(limit)
+        else:
+            objs = Query(str(targettype)).Run()
+        return rset_from_objs(self.req, objs, ('eid',),
+                              'Any X WHERE X is %s' % targettype)
+    
+    def key(self):
+        return Key(self.eid)
+
+    def put(self, req=None):
+        if req is not None and self.req is None:
+            self.req = req
+        dbmodel = self.to_gae_model()
+        key = Put(dbmodel)
+        self.set_eid(str(key))
+        if self.req is not None and self.rset is None:
+            self.rset = rset_from_objs(self.req, dbmodel, ('eid',),
+                                       'Any X WHERE X eid %(x)s', {'x': self.eid})
+            self.row = self.col = 0
+        return dbmodel
+    
+    @needrequest
+    def get(cls, req, keys):
+        # if check if this is a dict.key call
+        if isinstance(cls, Model) and keys in cls._attributes:
+            return super(Model, cls).get(keys)
+        rset = rset_from_objs(req, Get(keys), ('eid',),
+                              'Any X WHERE X eid IN %(x)s', {'x': keys})
+        return list(rset.entities())
+
+    @needrequest
+    def get_by_id(cls, req, ids, parent=None):
+        if isinstance(parent, Model):
+            parent = parent.key()
+        ids, multiple = NormalizeAndTypeCheck(ids, (int, long))
+        keys = [Key.from_path(cls.kind(), id, parent=parent)
+                for id in ids]
+        rset = rset_from_objs(req, Get(keys))
+        return list(rset.entities())
+
+    @classmethod
+    def get_by_key_name(cls, req, key_names, parent=None):
+        if isinstance(parent, Model):
+            parent = parent.key()
+        key_names, multiple = NormalizeAndTypeCheck(key_names, basestring)
+        keys = [Key.from_path(cls.kind(), name, parent=parent)
+                for name in key_names]
+        rset = rset_from_objs(req, Get(keys))
+        return list(rset.entities())
+
+    @classmethod
+    def get_or_insert(cls, req, key_name, **kwds):
+        def txn():
+            entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
+            if entity is None:
+                entity = cls(key_name=key_name, **kwds)
+                entity.put()
+            return entity
+        return RunInTransaction(txn)
+
+    @classmethod
+    def all(cls, req):
+        rset = rset_from_objs(req, Query(cls.id).Run())
+        return list(rset.entities())
+
+    @classmethod
+    def gql(cls, req, query_string, *args, **kwds):
+        raise NotImplementedError('use rql')
+
+    @classmethod
+    def kind(cls):
+        return self.id
+
+    @classmethod
+    def properties(cls):
+        raise NotImplementedError('use eschema')
+
+    def dynamic_properties(self):
+        raise NotImplementedError('use eschema')
+        
+    def is_saved(self):
+        return self.has_eid()
+
+    def parent(self):
+        parent = self._dbmodel.parent()
+        if not parent is None:
+            rset = rset_from_objs(self.req, (parent,), ('eid',),
+                                  'Any X WHERE X eid %(x)s', {'x': parent.key()})
+            parent = rset.get_entity(0, 0)
+        return parent
+
+    def parent_key(self):
+        return self.parent().key()
+
+    def to_xml(self):
+        return self._dbmodel.ToXml()
+
+# hijack AnyEntity class
+entities.AnyEntity = Model
+
+BooleanProperty = db.BooleanProperty
+URLProperty = db.URLProperty
+DateProperty = db.DateProperty
+DateTimeProperty = db.DateTimeProperty
+TimeProperty = db.TimeProperty
+StringProperty = db.StringProperty
+TextProperty = db.TextProperty
+BlobProperty = db.BlobProperty
+IntegerProperty = db.IntegerProperty
+FloatProperty = db.FloatProperty
+ListProperty = db.ListProperty
+SelfReferenceProperty = db.SelfReferenceProperty 
+UserProperty = db.UserProperty
+
+
+class ReferencePropertyStub(object):
+    def __init__(self, cls, args, kwargs):
+        self.cls = cls
+        self.args = args
+        self.kwargs = kwargs
+        self.required = False
+        self.__dict__.update(kwargs)
+        self.creation_counter = db.Property.creation_counter
+        db.Property.creation_counter += 1
+
+    @property
+    def data_type(self):
+        class FakeDataType(object):
+            @staticmethod
+            def kind():
+                return self.cls.__name__
+        return FakeDataType
+
+def ReferenceProperty(cls, *args, **kwargs):
+    if issubclass(cls, db.Model):
+        cls = db.class_for_kind(cls.__name__)
+        return db.ReferenceProperty(cls, *args, **kwargs)
+    return ReferencePropertyStub(cls, args, kwargs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/dbinit.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,108 @@
+"""some utility functions for datastore initialization.
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from google.appengine.api.datastore import Key, Entity, Put, Get, Query
+from google.appengine.api import datastore_errors
+
+_GROUP_CACHE = {} # XXX use memcache
+
+def _get_group(groupname):
+    try:
+        return _GROUP_CACHE[groupname]
+    except KeyError:
+        key = Key.from_path('EGroup', 'key_' + groupname, parent=None)
+        try:
+            group = Get(key)
+        except datastore_errors.EntityNotFoundError:
+            raise Exception('can\'t find required group %s, is your application '
+                            'correctly initialized (eg did you run the '
+                            'initialization script) ?' % groupname)
+        _GROUP_CACHE[groupname] = group
+        return group
+
+
+def create_user(login, password, groups):
+    """create a cubicweb user"""
+    from cubicweb.server.utils import crypt_password
+    user = Entity('EUser', name=login)
+    user['s_login'] = unicode(login)
+    user['s_upassword'] = crypt_password(password)
+    set_user_groups(user, groups)
+    Put(user)
+    return user
+
+def create_groups():
+    """create initial cubicweb groups"""
+    for groupname in ('managers', 'users', 'guests'):
+        group = Entity('EGroup', name='key_' + groupname)
+        group['s_name'] = unicode(groupname)
+        Put(group)
+        _GROUP_CACHE[groupname] = group
+
+def set_user_groups(user, groups):
+    """set user in the given groups (as string). The given user entity
+    (datastore.Entity) is not putted back to the repository, this is the caller
+    responsability.
+    """
+    groups = [_get_group(g) for g in groups]
+    user['s_in_group'] = [g.key() for g in groups] or None
+    for group in groups:
+        try:
+            group['o_in_group'].append(user.key())
+        except (KeyError, AttributeError):
+            group['o_in_group'] = [user.key()]
+        Put(group)
+
+def init_relations(gaeentity, eschema):
+    """set None for every subject relations which is not yet defined"""
+    for rschema in eschema.subject_relations():
+        if rschema in ('identity', 'has_text'):
+            continue
+        dsrelation = 's_' + rschema.type
+        if not dsrelation in gaeentity:
+            gaeentity[dsrelation] = None
+    for rschema in eschema.object_relations():
+        if rschema == 'identity':
+            continue
+        dsrelation = 'o_' + rschema.type
+        if not dsrelation in gaeentity:
+            gaeentity[dsrelation] = None
+    
+def fix_entities(schema):
+    for etype in ('EUser', 'EGroup'):
+        eschema = schema.eschema(etype)
+        for gaeentity in Query(etype).Run():
+            init_relations(gaeentity, eschema)
+            # XXX o_is on EEType entity
+            gaeentity['s_is'] = Key.from_path('EEType', 'key_' + etype, parent=None)
+            Put(gaeentity)
+    
+def init_persistent_schema(ssession, schema):
+    execute = ssession.unsafe_execute
+    rql = ('INSERT EEType X: X name %(name)s, X description %(descr)s,'
+           'X final FALSE, X meta %(meta)s')
+    eschema = schema.eschema('EEType')
+    execute(rql, {'name': u'EEType', 'descr': unicode(eschema.description),
+                  'meta': eschema.meta})
+    for eschema in schema.entities():
+        if eschema.is_final() or eschema == 'EEType':
+            continue
+        execute(rql, {'name': unicode(eschema), 'meta': eschema.meta,
+                      'descr': unicode(eschema.description)})
+
+def insert_versions(ssession, config):
+    execute = ssession.unsafe_execute
+    # insert versions
+    execute('INSERT EProperty X: X pkey %(pk)s, X value%(v)s',
+            {'pk': u'system.version.cubicweb',
+             'v': unicode(config.cubicweb_version())})
+    for cube in config.cubes():
+        execute('INSERT EProperty X: X pkey %(pk)s, X value%(v)s',
+                {'pk': u'system.version.%s' % cube,
+                 'v': unicode(config.cube_version(cube))})
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/dbmyams.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,217 @@
+"""extends yams to be able to load google appengine's schemas
+
+MISSING FEATURES:
+ - ListProperty, StringList, EmailProperty, etc. (XXX)
+ - ReferenceProperty.verbose_name, collection_name, etc. (XXX)
+
+XXX proprify this knowing we'll use goa.db
+"""
+
+from os.path import join
+from datetime import datetime, date, time
+
+from google.appengine.ext import db
+from google.appengine.api import datastore_types
+
+from yams.schema2sql import eschema_attrs
+from yams.constraints import SizeConstraint
+from yams.reader import PyFileReader
+from yams.buildobjs import (String, Int, Float, Boolean, Date, Time, Datetime,
+                            Interval, Password, Bytes, ObjectRelation,
+                            SubjectRelation, RestrictedEntityType)
+from yams.buildobjs import metadefinition, EntityType
+
+from cubicweb.schema import CubicWebSchemaLoader
+from cubicweb.goa import db as goadb
+
+# db.Model -> yams ############################################################
+
+DBM2Y_TYPESMAP = {
+    basestring: String,
+    datastore_types.Text: String,
+    int: Int,
+    float: Float,
+    bool: Boolean,
+    time: Time,
+    date: Date,
+    datetime: Datetime,
+    datastore_types.Blob: Bytes,
+    }
+
+
+def dbm2y_default_factory(prop, **kwargs):
+    """just wraps the default types map to set
+    basic constraints like `required`, `default`, etc.
+    """
+    yamstype = DBM2Y_TYPESMAP[prop.data_type]
+    if 'default' not in kwargs:
+        default = prop.default_value()
+        if default is not None:
+            kwargs['default'] = default
+    if prop.required:
+        kwargs['required'] = True
+    return yamstype(**kwargs)
+
+def dbm2y_string_factory(prop):
+    """like dbm2y_default_factory but also deals with `maxsize` and `vocabulary`"""
+    kwargs = {}
+    if prop.data_type is basestring:
+        kwargs['maxsize'] = 500
+    if prop.choices is not None:
+        kwargs['vocabulary'] = prop.choices
+    return dbm2y_default_factory(prop, **kwargs)
+
+def dbm2y_date_factory(prop):
+    """like dbm2y_default_factory but also deals with today / now definition"""
+    kwargs = {}
+    if prop.auto_now_add:
+        if prop.data_type is datetime:
+            kwargs['default'] = 'now'
+        else:
+            kwargs['default'] = 'today'
+    # XXX no equivalent to Django's `auto_now`
+    return dbm2y_default_factory(prop, **kwargs)
+
+    
+def dbm2y_relation_factory(etype, prop, multiple=False):
+    """called if `prop` is a `db.ReferenceProperty`"""
+    if multiple:
+        cardinality = '**'
+    elif prop.required:
+        cardinality = '1*'
+    else:
+        cardinality = '?*'
+    # XXX deal with potential kwargs of ReferenceProperty.__init__()
+    try:
+        return SubjectRelation(prop.data_type.kind(), cardinality=cardinality)
+    except AttributeError, ex:
+        # hack, data_type is still _SELF_REFERENCE_MARKER
+        return SubjectRelation(etype, cardinality=cardinality)
+    
+    
+DBM2Y_FACTORY = {
+    basestring: dbm2y_string_factory,
+    datastore_types.Text: dbm2y_string_factory,
+    int: dbm2y_default_factory,
+    float: dbm2y_default_factory,
+    bool: dbm2y_default_factory,
+    time: dbm2y_date_factory,
+    date: dbm2y_date_factory,
+    datetime: dbm2y_date_factory,
+    datastore_types.Blob: dbm2y_default_factory,
+    }
+
+
+class GaeSchemaLoader(CubicWebSchemaLoader):
+    """Google appengine schema loader class"""
+    def __init__(self, *args, **kwargs):
+        self.use_gauthservice = kwargs.pop('use_gauthservice', False)
+        super(GaeSchemaLoader, self).__init__(*args, **kwargs)
+        self.defined = {}
+        self.created = []
+        self._instantiate_handlers()
+        
+    def finalize(self, register_base_types=False):
+        return self._build_schema('google-appengine', register_base_types)
+
+    def load_dbmodel(self, name, props):
+        clsdict = {}
+        ordered_props = sorted(props.items(),
+                               key=lambda x: x[1].creation_counter)
+        for pname, prop in ordered_props:
+            if isinstance(prop, db.ListProperty):
+                if not issubclass(prop.item_type, db.Model):
+                    self.error('ignoring list property with %s item type'
+                               % prop.item_type)
+                    continue
+                rdef = dbm2y_relation_factory(name, prop, multiple=True)
+            else:
+                try:
+                    if isinstance(prop, (db.ReferenceProperty,
+                                         goadb.ReferencePropertyStub)):
+                        rdef = dbm2y_relation_factory(name, prop)
+                    else:
+                        rdef = DBM2Y_FACTORY[prop.data_type](prop)
+                except KeyError, ex:
+                    import traceback
+                    traceback.print_exc()
+                    self.error('ignoring property %s (keyerror on %s)' % (pname, ex))
+                    continue
+            rdef.creation_rank = prop.creation_counter
+            clsdict[pname] = rdef
+        edef = metadefinition(name, (EntityType,), clsdict)
+        self.add_definition(self, edef())
+
+    def error(self, msg):
+        print 'ERROR:', msg
+
+    def import_yams_schema(self, ertype, schemamod):
+        erdef = self.pyreader.import_erschema(ertype, schemamod)
+
+    def import_yams_cube_schema(self, templpath):
+        for filepath in self.get_schema_files(templpath):
+            self.handle_file(filepath)
+        
+    @property
+    def pyreader(self):
+        return self._live_handlers['.py']
+        
+import os
+from cubicweb import CW_SOFTWARE_ROOT
+
+if os.environ.get('APYCOT_ROOT'):
+    SCHEMAS_LIB_DIRECTORY = join(os.environ['APYCOT_ROOT'],
+                                 'local', 'share', 'cubicweb', 'schemas')
+else:
+    SCHEMAS_LIB_DIRECTORY = join(CW_SOFTWARE_ROOT, 'schemas')
+
+def load_schema(config, schemaclasses=None, extrahook=None):
+    """high level method to load all the schema for a lax application"""
+    # IMPORTANT NOTE: dbmodel schemas must be imported **BEFORE**
+    # the loader is instantiated because this is where the dbmodels
+    # are registered in the yams schema
+    for compname in config['included-cubes']:
+        comp = __import__('%s.schema' % compname)
+    loader = GaeSchemaLoader(use_gauthservice=config['use-google-auth'], db=db)
+    loader.lib_directory = SCHEMAS_LIB_DIRECTORY
+    if schemaclasses is not None:
+        for cls in schemaclasses:
+            loader.load_dbmodel(cls.__name__, goadb.extract_dbmodel(cls))
+    elif config['schema-type'] == 'dbmodel':
+        import schema as appschema
+        for objname, obj in vars(appschema).items():
+            if isinstance(obj, type) and issubclass(obj, goadb.Model) and obj.__module__ == appschema.__name__:
+                loader.load_dbmodel(obj.__name__, goadb.extract_dbmodel(obj))
+    for erschema in ('EGroup', 'EEType', 'ERType', 'RQLExpression',
+                     'is_', 'is_instance_of',
+                     'read_permission', 'add_permission',
+                     'delete_permission', 'update_permission'):
+        loader.import_yams_schema(erschema, 'bootstrap')  
+    loader.handle_file(join(SCHEMAS_LIB_DIRECTORY, 'base.py'))
+    cubes = config['included-yams-cubes']
+    for cube in reversed(config.expand_cubes(cubes)):
+        config.info('loading cube %s', cube)
+        loader.import_yams_cube_schema(config.cube_dir(cube))
+    if config['schema-type'] == 'yams':
+        loader.import_yams_cube_schema('.')
+    if extrahook is not None:
+        extrahook(loader)
+    if config['use-google-auth']:
+        loader.defined['EUser'].remove_relation('upassword')
+        loader.defined['EUser'].permissions['add'] = ()
+        loader.defined['EUser'].permissions['delete'] = ()
+    for etype in ('EGroup', 'RQLExpression'):
+        read_perm_rel = loader.defined[etype].get_relations('read_permission').next()
+        read_perm_rel.cardinality = '**'
+    # XXX not yet ready for EUser workflow
+    loader.defined['EUser'].remove_relation('in_state')
+    loader.defined['EUser'].remove_relation('wf_info_for')
+    # remove RQLConstraint('NOT O name "owners"') on EUser in_group EGroup
+    # since "owners" group is not persistent with gae
+    loader.defined['EUser'].get_relations('in_group').next().constraints = []
+    # return the full schema including the cubes' schema
+    for ertype in loader.defined.values():
+        if getattr(ertype, 'inlined', False):
+            ertype.inlined = False
+    return loader.finalize()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/FAQ.en.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+==============================
+LAX Frequently Asked Questions
+==============================
+
+[WRITE ME]
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/README_LAX.fr.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,75 @@
+Qu'est-ce que ``LAX`` ?
+=======================
+
+``LAX`` (Logilab Application engine eXtension) est un framework 
+d'application web qui facilite les développements faits pour
+``Google AppEngine``.
+
+``LAX`` est un portage de la partie web de la plate-forme
+applicative développée par Logilab depuis 2001. Cette plate-forme 
+publie des données que la partie stockage tire de bases SQL, 
+d'annuaires LDAP et de systèmes de gestion de version. Depuis mai 
+2008, elle fonctionne sur le "datastore" de ``Google AppEngine``.
+
+``LAX`` est pour le moment en version alpha.
+
+Django/GAE vs. LAX/GAE
+=======================
+
+NotImplementedError()
+
+
+Téléchargement des sources
+==========================
+
+- Les sources de ``Google AppEngine`` peuvent être obtenues à l'adresse
+  suivante : http://code.google.com/appengine/downloads.html
+
+- Les sources de ``LAX`` se trouvent à l'adresse suivante :
+  http://lax.logilab.org/
+
+
+Installation
+============
+
+Les sources de ``Google AppEngine`` doivent être décompressées et le
+répertoire `google` qui s'y trouve doit être accessible par la variable
+d'environnement ``PYTHONPATH``. Correctement définir le ``PYTHONPATH`` 
+n'est pas nécessaire pour le lancement de l'application elle-même mais 
+pour l'utilisation des scripts fournis par ``LAX`` ou pour l'exécution 
+des tests unitaires.
+
+Une fois décompactée, l'archive ``lax-0.1.0-alpha.tar.gz``, on obtient
+l'arborescence suivante::
+  
+  .
+  |-- app.yaml
+  |-- custom.py
+  |-- data
+  |-- cubicweb/
+  |-- i18n/
+  |-- logilab/
+  |-- main.py
+  |-- mx/
+  |-- rql/
+  |-- schema.py
+  |-- simplejson/
+  |-- tools/
+  |   |-- generate_schema_img.py
+  |   `-- i18ncompile.py
+  |-- views.py
+  |-- yams/
+  `-- yapps/
+
+  
+On retrouve le squelette d'une application web de ``Google AppEngine``
+(fichiers ``app.yaml``, ``main.py``en particulier) avec les dépendances
+supplémentaires nécessaires à l'utilisation du framework ``LAX``
+
+
+Lancement de l'application de base
+==================================
+
+python /path/to/google_appengine/dev_appserver.py /path/to/lax
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/advanced_notes.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+
+La différence entre la classe `AppRsetObject` et la classe `AppObject` est que
+les instances de la premières sont séléctionnées pour une requête et un "result
+set" et alors que les secondes ne sont séléctionnées qu'en fonction de leur
+identifiant.
Binary file goa/doc/devmanual_fr/archi_globale.dia has changed
Binary file goa/doc/devmanual_fr/archi_globale.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_autres_composants_ui.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+Autres composants de l'interface web
+====================================
+
+Actions
+-------
+XXXFILLME
+
+Component, VComponent
+---------------------
+XXXFILLME
+
+EProperty
+---------
+XXXFILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_bases_framework_erudi.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,226 @@
+Fondements du framework CubicWeb
+=============================
+
+Le moteur web d'cubicweb consiste en quelques classes gérant un ensemble d'objets
+chargés dynamiquement au lancement d'cubicweb. Ce sont ces objets dynamiques, issus
+du modèle ou de la librairie, qui construisent le site web final. Les différents
+composants dynamiques sont par exemple : 
+
+* coté client et serveur
+
+ - les définitions d'entités, contenant la logique permettant la manipulation des
+   données de l'application
+
+* coté client
+
+  - les *vues* , ou encore plus spécifiquement 
+
+    - les boites
+    - l'en-tête et le pied de page
+    - les formulaires
+    - les gabarits de pages
+
+  - les *actions*
+  - les *controleurs*
+
+* coté serveur
+
+  - les crochets de notification
+  - les vues de notification
+
+Les différents composants du moteur sont :
+
+* un frontal web (seul twisted disponible pour le moment), transparent du point
+  de vue des objets dynamiques
+* un objet encapsulant la configuration
+* un `vregistry` (`cubicweb.cwvreg`) contenant les objets chargés dynamiquements
+
+
+Détail de la procédure d'enregistrement
+---------------------------------------
+Au démarage le `vregistry` ou base de registres inspecte un certain nombre de
+répertoires à la recherche de définition de classes "compatible". Après une
+procédure d'enregistrement les objets sont affectés dans différents registres
+afin d'être ensuite séléctionné dynamiquement pendant le fonctionnement de
+l'application.
+
+La classe de base de tout ces objets est la classe `AppRsetObject` (module
+`cubicweb.common.appobject`). 
+
+
+API Python/RQL
+--------------
+
+Inspiré de la db-api standard, avec un object Connection possédant les méthodes
+cursor, rollback et commit principalement. La méthode importante est la méthode
+`execute` du curseur :
+
+`execute(rqlstring, args=None, eid_key=None, build_descr=True)`
+
+:rqlstring: la requête rql à éxécuter (unicode)
+:args: si la requête contient des substitutions, un dictionnaire contenant les
+       valeurs à utiliser
+:eid_key: 
+   un détail d'implémentation du cache de requêtes RQL fait que si une substitution est
+   utilisée pour introduire un eid *levant des ambiguités dans la résolution de
+   type de la requête*, il faut spécifier par cet argument la clé correspondante
+   dans le dictionnaire
+
+C'est l'objet Connection qui possède les méthodes classiques `commit` et
+`rollback`. Vous ne *devriez jamais avoir à les utiliser* lors du développement
+d'interface web sur la base du framework CubicWeb étant donné que la fin de la
+transaction est déterminée par celui-ci en fonction du succès d'éxécution de la
+requête. 
+
+NOTE : lors de l'éxécution de requêtes de modification (SET,INSERT,DELETE), si une
+requête génère une erreur liée à la sécurité, un rollback est systématiquement
+effectuée sur la transaction courante.
+
+
+La classe `Request` (`cubicweb.web`)
+---------------------------------
+Une instance de requête est créée lorsque une requête HTTP est transmise au
+serveur web. Elle contient des informations telles que les paramètres de
+formulaires, l'utilisateur connecté, etc. 
+
+**De manière plus générale une requête représente une demande d'un utilisateur,
+que se soit par HTTP ou non (on parle également de requête rql coté serveur par
+exemple)**
+
+Une instance de la classe `Request` possède les attributs :
+
+* `user`, instance de`cubicweb.common.utils.User` correspondant à l'utilisateur
+  connecté 
+* `form`, dictionaire contenant les valeurs de formulaire web
+* `encoding`, l'encodage de caractère à utiliser dans la réponse
+
+Mais encore :
+
+:Gestion des données de session:        
+  * `session_data()`, retourne un dictionaire contenant l'intégralité des
+    données de la session
+  * `get_session_data(key, default=None)`, retourne la valeur associée à
+    la clé ou la valeur `default` si la clé n'est pas définie
+  * `set_session_data(key, value)`, associe une valeur à une clé
+  * `del_session_data(key)`,  supprime la valeur associé à une clé
+    
+
+:Gestion de cookie:
+  * `get_cookie()`, retourne un dictionnaire contenant la valeur de l'entête
+    HTTP 'Cookie'
+  * `set_cookie(cookie, key, maxage=300)`, ajoute un en-tête HTTP `Set-Cookie`,
+    avec une durée de vie 5 minutes par défault (`maxage` = None donne un cooke
+    *de session"* expirant quand l'utilisateur ferme son navigateur
+  * `remove_cookie(cookie, key)`, fait expirer une valeur
+
+:Gestion d'URL:
+  * `url()`, retourne l'url complète de la requête HTTP
+  * `base_url()`, retourne l'url de la racine de l'application
+  * `relative_path()`, retourne chemin relatif de la requête
+
+:Et encore...:
+  * `set_content_type(content_type, filename=None)`, place l'en-tête HTTP
+    'Content-Type'
+  * `get_header(header)`, retourne la valeur associé à un en-tête HTTP
+    arbitraire de la requête
+  * `set_header(header, value)`, ajoute un en-tête HTTP arbitraire dans la
+    réponse 
+  * `cursor()` retourne un curseur RQL sur la session
+  * `execute(*args, **kwargs)`, raccourci vers .cursor().execute()
+  * `property_value(key)`, gestion des propriétés (`EProperty`)
+  * le dictionaire `data` pour stocker des données pour partager de
+    l'information entre les composants *durant l'éxécution de la requête*.
+
+A noter que cette classe est en réalité abstraite et qu'une implémentation
+concrète sera fournie par le *frontend* web utilisé (en l'occurent *twisted*
+aujourd'hui). Enfin pour les vues ou autres qui sont éxécutés coté serveur,
+la majeure partie de l'interface de `Request` est définie sur la session
+associée au client. 
+
+
+La classe `AppObject`
+---------------------
+
+En général :
+
+* on n'hérite pas directement des cette classe mais plutôt d'une classe
+  plus spécifique comme par exemple `AnyEntity`, `EntityView`, `AnyRsetView`,
+  `Action`...
+
+* pour être enregistrable, un classe fille doit définir son registre (attribut
+  `__registry__`) et son identifiant (attribut `id`). Généralement on n'a pas à
+  s'occuper du registre, uniquement de l'identifiant `id` :) 
+
+On trouve un certain nombre d'attributs et de méthodes définis dans cette classe
+et donc commune à tous les objets de l'application :
+
+A l'enregistrement, les attributs suivants sont ajoutés dynamiquement aux
+*classes* filles:
+
+* `vreg`, le `vregistry` de l'application
+* `schema`, le schéma de l'application
+* `config`, la configuration de l'application
+
+On trouve également sur les instances les attributs :
+
+* `req`, instance de `Request`
+* `rset`, le "result set" associé à l'objet le cas échéant
+* `cursor`, curseur rql sur la session
+
+
+:Gestion d'URL:
+  * `build_url(method=None, **kwargs)`, retourne une URL absolue construites à
+    partir des arguments donnés. Le *controleur* devant gérer la réponse
+    peut-être spécifié via l'argument spécial `method` (le branchement est
+    théoriquement bien effectué automatiquement :).
+
+  * `datadir_url()`, retourne l'url du répertoire de données de l'application
+    (contenant les fichiers statiques tels que les images, css, js...)
+
+  * `base_url()`, raccourci sur `req.base_url()`
+
+  * `url_quote(value)`, version *unicode safe* de de la fonction `urllib.quote`
+
+:Manipulation de données:
+
+  * `etype_rset(etype, size=1)`, raccourci vers `vreg.etype_rset()`
+
+  * `eid_rset(eid, rql=None, descr=True)`, retourne un objet result set pour
+    l'eid donné
+  * `entity(row, col=0)`, retourne l'entité correspondant à la position données
+    du "result set" associé à l'objet
+
+  * `complete_entity(row, col=0, skip_bytes=True)`, équivalent à `entity` mais
+    appelle également la méthode `complete()` sur l'entité avant de la retourner
+
+:Formattage de données:
+  * `format_date(date, date_format=None, time=False)`
+  * `format_time(time)`,
+
+:Et encore...:
+
+  * `external_resource(rid, default=_MARKER)`, accède à une valeur définie dans
+    le fichier de configuration `external_resource`
+    
+  * `tal_render(template, variables)`, 
+
+
+**NOTE IMPORTANTE**
+Lorsqu'on hérite d'`AppObject` (même indirectement), il faut **toujours**
+utiliser **super()** pour récupérer les méthodes et attributs des classes
+parentes, et pas passer par l'identifiant de classe parente directement.
+(sous peine de tomber sur des bugs bizarres lors du rechargement automatique
+des vues). Par exemple, plutôt que d'écrire::
+
+      class Truc(PrimaryView):
+          def f(self, arg1):
+              PrimaryView.f(self, arg1)
+
+Il faut écrire::
+      
+      class Truc(PrimaryView):
+          def f(self, arg1):
+              super(Truc, self).f(arg1)
+
+
+XXX FILLME diagramme interaction application/controller/template/view
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_configuration_instance.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,160 @@
+Configuration d'une instance
+============================
+
+À la création d'une instance, un fichier de configuration est généré dans ::
+
+   $(CW_REGISTRY)/<instance>/<nom configuration>.conf
+
+par exemple ::
+
+   /etc/cubicweb.d/jpl/all-in-one.conf
+
+C'est un simple fichier texte au format INI. Dans la description suivante,
+chaque nom d'option est préfixé de sa section et suivi de sa valeur par défaut
+le cas échéant, e.g. "`<section>.<option>` [valeur]".
+
+
+Configuration du serveur web
+----------------------------
+:`web.auth-mode` [cookie]: 
+   mode d'authentification, cookie ou http
+:`web.realm`: 
+   realm de l'application en mode d'authentification http
+:`web.http-session-time` [0]:
+   délai d'inactivité d'une session HTTP avant sa fermeture automatique. Durée
+   en secondes, 0 signifiant pas d'expiration (ou plus exactement lors de la
+   fermeture du navigateur du client)
+
+:`main.anonymous-user`, `main.anonymous-password`:
+   login et mot de passe à utiliser pour se connecter au serveur RQL lors des
+   connexions HTTP anonymes. Il faut que le compte EUser associé existe.
+
+:`main.base-url`:
+   url de base du site, à utiliser pour générer les urls des pages web
+
+Configuration https
+```````````````````
+Il est possible de rendre un site accessible en http pour les connections 
+anonymes et en https pour les utilisateurs authentifié. Il faut pour cela
+utiliser apache (par ex.) pour la redirection et la variable `main.https-url` du
+fichier de configuration.
+
+:Exemple:
+
+  pour une redirection apache d'un site accessible via `http://localhost/demo`
+  et `https://localhost/demo` et qui tourne en réalité sur le port 8080, il 
+  faut avoir pour la version http : ::
+
+    RewriteCond %{REQUEST_URI} ^/demo
+    RewriteRule ^/demo$ /demo/
+    RewriteRule ^/demo/(.*) http://127.0.0.1:8080/$1 [L,P]
+  
+  et pour la version https : ::
+
+    RewriteCond %{REQUEST_URI} ^/demo
+    RewriteRule ^/demo$ /demo/
+    RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]
+
+
+  et on aura dans le fichier all-in-one.conf de l'instance : ::
+
+    base-url = http://localhost/demo
+    https-url = `https://localhost/demo`
+
+Configuration de l'interface web
+--------------------------------
+:`web.embed-allowed`:
+   expression régulière correspondant aux sites pouvant être "incorporé" dans
+   le site (controleur 'embed')
+:`web.submit-url`:
+   url à laquelle les bugs rencontrés dans l'application peuvent être posté
+
+
+Configuration du serveur RQL
+----------------------------
+:`main.host`:
+   nom de l'hôte s'il ne peut être détecter correctement
+:`main.pid-file`:
+   fichier où sera écrit le pid du serveur
+:`main.uid`:
+   compte utilisateur à utiliser pour le lancement du serveur quand il est
+   lancé en root par init
+:`main.session-time [30*60]`:
+   temps d'expiration d'une session RQL
+:`main.query-log-file`:
+   fichier dans lequel écrire toutes les requêtes RQL éxécutées par le serveur
+
+
+Configuration Pyro pour l'instance
+-----------------------------------
+Coté serveur web :
+
+:`pyro-client.pyro-application-id`: 
+   identifiant pyro du serveur RQL (e.g. le nom de l'instance)
+
+Coté serveur RQL :
+
+:`pyro-server.pyro-port`:
+   numéro de port pyro. Si aucune valeur n'est spécifiée, un port est attribué
+   automatiquement.
+
+Coté serveur RQL et serveur web :
+
+:`pyro-name-server.pyro-ns-host`:
+   nom de l'hôte hébergeant le serveur de nom pyro. Si aucune valeur n'est
+   spécifié, il est localisé par une requête de broadcast
+:`pyro-name-server.pyro-ns-group` [cubicweb]:
+   groupe pyro sous lequel enregistrer l'application
+
+
+Configuration courriel
+----------------------
+Coté serveur RQL et serveur web :
+
+:`email.mangle-emails [no]`:
+   indique si les adresses email doivent être affichées telle quelle ou
+   transformée
+
+Coté serveur RQL :
+
+:`email.smtp-host [mail]`:
+   nom de l'hôte hébergeant le serveur SMTP à utiliser pour le courriel sortant
+:`email.smtp-port [25]`:
+   port du serveur SMTP à utiliser pour le courriel sortant
+:`email.sender-name`:
+   nom à utiliser pour les courriels sortant de l'application
+:`email.sender-addr`:
+   adresse à utiliser pour les courriels sortant de l'application
+:`email.default-dest-addrs`:
+   adresses de destination par défaut, si utilisé par la configuration de la 
+   diffusion du modèle (séparées par des virgules)
+:`email.supervising-addrs`:
+   addresses de destination des courriels de supervision (séparées par des 
+   virgules)
+
+
+Configuration journalisation
+----------------------------
+:`main.log-threshold`:
+   niveau de filtrage des messages (DEBUG, INFO, WARNING, ERROR)
+:`main.log-file`:
+   fichier dans lequel écrire les messages
+
+
+Configuration Eproperties
+-------------------------
+D'autres paramètres de configuration sont sous la forme d'entités `EProperty`
+dans la base de données. Il faut donc les éditer via l'interface web ou par des
+requêtes rql.
+
+:`ui.encoding`:
+   encodage de caractères à utiliser pour l'interface web
+:`navigation.short-line-size`: # XXX should be in ui
+   nombre de caractères maximum pour les affichages "courts"
+:`navigation.page-size`:
+   nombre d'entités maximum à afficher par page de résultat
+:`navigation.related-limit`:
+   nombre d'entités liées maximum à afficher sur la vue primaire d'une entité
+:`navigation.combobox-limit`:
+   nombre d'entités non liées maximum à afficher sur les listes déroulantes de
+   la vue d'édition d'une entité
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_definition_schema.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,19 @@
+Définition du modèle de données (*schéma*)
+==========================================
+
+Le schéma est l'élément central d'une application d'CubicWeb, définissant le modèle
+de données manipulé. Il est généralement défini à partir de type d'entités
+existants dans la librairie et d'autres spécifiques, généralement décrites dans
+un ou plusieurs fichiers python dans le sous-répertoire `schema` du modèle.
+
+A ce niveau il est important de noter la différence entre type de relation et
+définition de relation : un type de relation est uniquement un nom de relation
+avec éventuellement quelques propriétés supplémentaires (voir plus bas), alors
+qu'une définition de relation est un triplet complet "<type d'entité sujet>
+<type de relation> <type d'entité objet>". Eventuellement un type de relation
+sera créé implicitement si aucun n'est associé à une définition de relation du
+schema.
+
+.. include:: sect_stdlib_schemas.txt
+.. include:: sect_definition_schema.txt
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_definition_workflows.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+Définition de workflow
+======================
+On peut mettre une condition rql ou/et un groupe auquel doit appartenir l'utilisateur.
+
+Si on met à la fois un(ou plusieurs) groupe et une condition RQL, il faut que les deux soient respectés.
+
+Si on met plusieurs groupes, il faut que l'utilisateur soit dans un des groupes.
+
+Pour la condition RQL sur une transition, on peut y mettre les substitutions suivantes :
+
+* `%(eid)s`, eid de l'objet
+* `%(ueid)s`, eid de l'utilisateur qui fait la requête
+* `%(seid)s`, eid de l'état courant de l'objet
+
+Dans le script de création d'un workflow, penser à mettre `_()` autour des noms d'états et de transitions
+pour que ceux si soient pris en compte par les scripts de gestion des catalogues i18n.
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_fondements_erudi.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,71 @@
+Fondements CubicWeb
+================
+
+Concepts et vocabulaire
+-----------------------
+
+*schéma*
+  le schéma définit le modèle de données d'une application sous forme d'entités
+  et de relations, grâce au package `yams`_. C'est l'élément central d'une
+  application. Il est initialement défini sur le système de fichiers et est
+  stocké dans la base de données lors de la création d'une instance. CubicWeb 
+  fournit un certain nombres de types d'entités inclus systématiquement 
+  car nécessaire au noyau CubicWeb et une librairie de composants devant être
+  inclus explicitement le cas échéant.
+
+*source*
+  une source de données est un conteneur de données quelquonque (SGBD, annuaire
+  LDAP...) intégré par l'entrepôt CubicWeb. Un entrepôt possède au moins une source
+  dite "system" contenant le schéma de l'application, l'index plein-texte et
+  d'autres informations vitales au système.
+
+*composant*
+  un composant est un modèle regroupant un ou plusieurs types de données et/ou
+  des vues afin de fournir une fonctionalité précise, ou une application CubicWeb
+  complète utilisant éventuellement d'autres composants. Les différents
+  composants disponibles sur une machine sont installés dans
+  `/usr/share/cubicweb/templates`
+
+*result set*
+  objet encaspulant les résultats d'une requête RQL et des informations sur
+  cette requête.
+
+.. _`Python Remote Object`: http://pyro.sourceforge.net/
+.. _`yams`: http://www.logilab.org/project/name/yams/
+
+
+Structure générale d'une application LAX
+----------------------------------------
+
+Un composant complexe est structuré selon le modèle suivant :
+
+::
+    
+  .
+  |-- app.yaml
+  |-- custom.py
+  |-- data
+  |-- cubicweb/
+  |-- i18n/
+  |-- logilab/
+  |-- main.py
+  |-- mx/
+  |-- rql/
+  |-- schema.py
+  |-- simplejson/
+  |-- tools/
+  |   |-- generate_schema_img.py
+  |   `-- i18ncompile.py
+  |-- views.py
+  |-- yams/
+  `-- yapps/
+        
+
+où :
+
+* ``schema.py`` contient la définition du schéma
+* ``views.py`` contient les définitions des vues
+* ``i18n`` contient les catalogues de messages pour les langues supportées (coté
+  serveur et interface web) 
+* ``data`` contient des fichiers de données arbitraires servis statiquement
+  (images, css, fichiers javascripts)... (coté interface web uniquement)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_i18n.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,66 @@
+Internationalisation
+====================
+
+Le système d'internationalisation de l'interface web d'cubicweb est basé sur le
+système `GNU gettext`_.
+
+.. _`GNU gettext`: http://www.gnu.org/software/gettext/
+
+Messages à internationaliser
+----------------------------
+
+Marquage des messages à internaliser
+````````````````````````````````````
+Les chaines de caractères à internationaliser sont marqués par l'appel à la
+fonction `_` *OU* par la méthode équivalent de la requête dans le code python ou
+dans les expressions python de template TAL. 
+
+Dans les templates cubicweb-tal, il est également possible d'insérer une chaine à
+traduire via les balises `i18n:content` et  `i18n:replace`.
+
+De plus des messages correspondant aux entités/relations utilisés par le schéma
+de l'application seront automatiquement ajoutés.
+
+Renvoi d'un message internationalisé lors de la construction d'une page
+```````````````````````````````````````````````````````````````````````
+La fonction *built-in* `_` ne doit servir qu'**à marquer les messages à
+traduire**, non pas à récupérer une traduction. Il faut pour cela utiliser la
+méthode `_` de l'objet requête, sans quoi vous récupérerez l'identifiant de
+message au lieu de sa traduction dans la langue propre à la requête.1
+
+
+Gestion des catalogues de traduction
+------------------------------------
+Une fois l'application rendu internationalisable coté code, reste à gérer les
+catalogues de traductions. cubicweb-ctl intègre pour cela les commandes suivantes : 
+
+* `i18nlibupdate`, met à jour les catalogues de messages *de la librairie
+  cubicweb*. Sauf si vous développez sur le framework (et non votre propre
+  application), vous ne devriez pas avoir à utiliser cette commande
+
+* `i18nupdate`, met à jour les catalogues de messages *du composant* (ou de tous
+  les composants). A la suite de cette commande, vous devez mettre à jour les
+  fichiers de traduction *.po* dans le sous-répertoire "i18n" de votre
+  template. Évidemment les traductions précédentes toujours utilisées ont été
+  conservées.
+
+* `i18ncompile`, recompile les catalogues de messages *d'une instance* (ou de
+  toutes les instances) après mise à jour des catalogues de son composant. Cela
+  est effectué automatiquement lors d'une création ou d'une mise à jour. Les
+  catalogues de messages compilés se trouvent dans le répertoire
+  "i18n/<lang>/LC_MESSAGES/cubicweb.mo" de l'application où `lang` est
+  l'identifiant de la langue sur 2 lettres ('en' ou 'fr' par exemple)
+
+
+Le cas classique
+````````````````
+Vous avez ajouté et/ou modifié des messages d'un composant utilisé par votre
+application (en ajoutant une nouvelle vue ou en ayant modifié le schéma par
+exemple) :
+
+1. `cubicweb-ctl i18nupdate <composant>`
+2. éditer les fichiers <composant>/xxx.po dans pour y rajouter les traductions
+   manquantes (`msgstr` vide) 
+3. `hg ci -m "updated i18n catalogs"`
+4. `cubicweb-ctl i18n compile <monapplication>`
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_manipulation_donnees.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,122 @@
+Manipulation des données stockées
+=================================
+
+Les classes `Entity` et `AnyEntity`
+-----------------------------------
+Pour fournir un comportement spécifique à un type d'entité, il suffit de définir
+une classe héritant de la class `cubicweb.entities.AnyEntity`. En général il faut
+définir ces classes dans un module du package `entities` d'une application pour 
+qu'elle soit disponible à la fois coté serveur et coté client.
+
+La classe `AnyEntity` est une classe chargée dynamiquement héritant de la classe
+de base `Entity` (`cubicweb.common.entity`). On définit une sous-classe pour
+ajouter des méthodes ou spécialiser les comportements d'un type d'entité donné.
+
+Des descripteurs sont ajoutés à l'enregistrement pour initialiser la classe en
+fonction du schéma :
+
+* on peut accéder aux attributs définis dans le schéma via les attributs de même
+  nom sur les instances (valeur typée)
+
+* on peut accéder aux relations définies dans le schéma via les attributs de même
+  nom sur les instances (liste d'instances d'entité)
+
+Les méthodes définies sur la classe `AnyEntity` ou `Entity` sont les suivantes :
+
+* `has_eid()`, retourne vrai si l'entité à un eid affecté (i.e. pas en cours de
+  création) 
+        
+* `check_perm(action)`, vérifie que l'utilisateur à le droit d'effectuer
+  l'action demandée sur l'entité
+
+:Formattage et génération de la sortie:
+
+  * `view(vid, **kwargs)`, applique la vue donnée à l'entité
+
+  * `absolute_url(**kwargs)`, retourne une URL absolue permettant d'accéder à la
+    vue primaire d'une entité
+
+  * `format(attr)`, retourne le format (type MIME) du champ passé en argument
+
+  * `printable_value(attr, value=_marker, attrtype=None, format='text/html')`, 
+    retourne une chaine permettant l'affichage dans un format donné de la valeur
+    d'un attribut (la valeur est automatiquement récupérée au besoin)
+
+  * `display_name(form='')`, retourne une chaîne pour afficher le type de
+    l'entité, en spécifiant éventuellement la forme désirée ('plural' pour la
+    forme plurielle) 
+
+:Gestion de données:
+
+  * `complete(skip_bytes=True)`, effectue une requête permettant de récupérer d'un
+    coup toutes les valeurs d'attributs manquant sur l'entité
+
+  * `get_value(name)`, récupere la valeur associée à l'attribut passé en argument
+
+  * `related(rtype, x='subject', limit=None, entities=False)`, retourne une liste
+    des entités liées à l'entité courant par la relation donnée en argument
+
+  * `unrelated(rtype, targettype, x='subject', limit=None)`, retourne un result set
+    des entités not liées à l'entité courante par la relation donnée en argument
+    et satisfaisants les contraintes de celle-ci
+
+  * `copy_relations(ceid)`, copie les relations de l'entité ayant l'eid passé en
+    argument sur l'entité courante
+
+  * `last_modified(view)`, retourne la date à laquelle on doit considérer
+    l'objet comme modifié (utiliser par la gestion de cache HTTP)
+
+:Meta-données standard (Dublin Core):
+
+  * `dc_title()`, retourne une chaine unicode correspondant à la méta-donnée
+    'Title' (utilise par défaut le premier attribut non 'meta' du schéma de
+    l'entité) 
+
+  * `dc_long_title()`, comme dc_title mais peut retourner un titre plus détaillé
+
+  * `dc_description(format='text/plain')`, retourne une chaine unicode
+     correspondant à la méta-donnée 'Description' (cherche un attribut
+     'description' par défaut)
+
+  * `dc_authors()`, retourne une chaine unicode correspondant à la méta-donnée
+    'Authors' (propriétaires par défaut)
+
+  * `dc_date(date_format=None)`, retourne une chaine unicode
+     correspondant à la méta-donnée 'Date' (date de modification par défaut)
+            
+:Contrôle du vocabulaire pour les relations:
+  * `vocabulary(rtype, x='subject', limit=None)`
+  * `subject_relation_vocabulary(rtype, limit=None)`
+  * `object_relation_vocabulary(rtype, limit=None)`
+  * `relation_vocabulary(rtype, targettype, x, limit=None)`
+
+
+Les *rtags*
+-----------
+Les *rtags* permettent de spécifier certains comportements propres aux relations
+d'un type d'entité donné (voir plus loin). Ils sont définis sur la classe 
+d'entité via l'attribut `rtags` qui est un dictionnaire dont les clés sont un 
+triplet ::
+
+  <type de relation>, <type d'entité cible>, <position du contexte ("subject" ou "object"
+
+et les valeurs un `set` ou un tuple de marqueurs définissant des propriétés 
+s'appliquant à cette relation. 
+
+Il est possible de simplifier ce dictionnaire :
+
+* si l'on veut spécifier un seul marqueur, il n'est pas nécessaire d'utiliser
+  un tuple comme valeur, le marqueur seul (chaine de caractères) suffit
+* si l'on s'intéresse uniquement à un type de relation et non à la cible et à la
+  position du contexte (ou que celui-ci n'est pas ambigüe), on peut simplement
+  utiliser le nom du type de relation comme clé
+* si l'on veut qu'un marqueur s'applique quelque soit le type d'entité cible, il
+  faut utiliser la chaine `*` comme type d'entité cible
+
+A noter également que ce dictionnaire est *traité à la création de la classe*. 
+Il est automatiquement fusionné avec celui de la ou des classe(s) parentes (pas
+besoin de copier celui de la classe parent pour le modifier). De même modifier
+celui-ci après création de la classe n'aura aucun effet...
+
+
+.. include:: sect_definition_entites.txt
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_migration.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,218 @@
+Migration
+=========
+
+Une des idées de base d'CubicWeb est la création incrémentale d'application, et
+pour cela de nombreuses actions sont fournies afin de facilement faire évoluer
+une application et tout particulièrement le modèle de données manipulé sans
+perdre les données des instances existantes.
+
+La version courante d'un modèle d'application est données dans le fichier
+`__pkginfo__.py` sous forme d'un tuple de 3 entiers.
+
+
+Gestion des scripts de migrations
+---------------------------------
+Les scripts des migrations doivent être placés dans le répertoire `migration` de
+l'application, et nommé de la manière suivante :
+
+  <n° de version X.Y.Z>[_<description>]_<mode>.py
+
+dans lequel : 
+
+* X.Y.Z correspond au n° de version du modèle vers lequel le script permet de
+  migrer,
+
+* le *mode* (entre le dernier "_" et l'extension ".py") indique à quelle partie
+  de l'application (serveur RQL, serveur web) le script s'applique en cas
+  d'installation distribuée. Il peut valoir : 
+
+  * `common`, s'applique aussi bien sur le serveur RQL que sur le serveur web,
+    et met à jour des fichiers sur le disque (migration de fichier de
+    configuration par exemple).
+
+  * `web`, s'applique uniquement sur le serveur web, et met à jour des fichiers
+    sur le disque 
+
+  * `repository`, s'applique uniquement sur le serveur RQL, et met à jour des
+    fichiers sur le disque 
+
+  * `Any`, s'applique uniquement sur le serveur RQL, et met à jour des
+    données en base (migrations de schéma et de données par ex.)
+
+
+Toujours dans le répertoire `migration`, le fichier spécial `depends.map` permet
+d'indiquer que pour migrer vers une version spécifique du modèle, il faut tout
+d'abord avoir migrer vers une version données de cubicweb. Ce fichier peut contenir
+des commentaires (lignes commençant par un "#"), et une dépendance est notée sur
+une ligne de la manière suivante : ::
+
+  <n° de version du modèle X.Y.Z> : <n° de version cubicweb X.Y.Z>
+
+Par exemple ::
+
+  0.12.0: 2.26.0
+  0.13.0: 2.27.0
+  # 0.14 works with 2.27 <= cubicweb <= 2.28 at least
+  0.15.0: 2.28.0
+
+
+Contexte de base
+----------------
+Les identifiants suivants sont préféfinis dans les scripts de migration : 
+
+* `config`, configuration de l'instance
+
+* `interactive_mode`, booléen indiquant si le script est éxécuté en mode
+  interactif ou non
+
+* `appltemplversion`, version du modèle d'application de l'instance
+
+* `applcubicwebversion`, version cubicweb de l'instance
+
+* `templversion`, version du modéle d'application installée
+
+* `cubicwebversion`, version cubicweb installée
+
+* `confirm(question)`, fonction posant une question et retournant vrai si
+  l'utilisateur a répondu oui, faux sinon (retourne toujours vrai en mode non
+  interactif) 
+
+* `_`, fonction équivalente à `unicode` permettant de marquer des chaines à
+  internationaliser dans les scripts de migration
+
+Dans les scripts "repository", les identifiants suivant sont également définis :
+
+* `checkpoint`, demande confirmant et effectue un "commit" au point d'appel
+
+* `repo_schema`, schéma persistent de l'instance (i.e. schéma de l'instance en
+  cours de migration)
+
+* `newschema`, schéma installé sur le système de fichier (i.e. schéma de la
+  version à jour du modèle et de cubicweb)
+
+* `sqlcursor`, un curseur SQL pour les très rares cas où il est réellement
+  nécessaire ou avantageux de passer par du sql
+
+* `repo`, l'objet repository
+
+                        
+Migration de schéma
+-------------------
+Les fonctions de migration de schéma suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_attribute(etype, attrname, attrtype=None, commit=True)`, ajoute un
+  nouvel attribut à un type d'entité existante. Si le type de celui-ci n'est pas
+  spécifié il est extrait du schéma à jour.
+        
+* `drop_attribute(etype, attrname, commit=True)`, supprime un
+  attribut à un type d'entité existante.
+
+* `rename_attribute(etype, oldname, newname, commit=True)`, renomme un attribut
+            
+* `add_entity_type(etype, auto=True, commit=True)`, ajoute un nouveau type
+  d'entité. Si `auto` est vrai, toutes les relations utilisant ce type d'entité
+  et ayant un type d'entité connu à l'autre extrémité vont également être
+  ajoutées.
+
+* `drop_entity_type(etype, commit=True)`, supprime un type d'entité et toutes
+  les relations l'utilisant.
+
+* `rename_entity_type(oldname, newname, commit=True)`, renomme un type d'entité
+            
+* `add_relation_type(rtype, addrdef=True, commit=True)`, ajoute un nouveau type
+  de relation. Si `addrdef` est vrai, toutes les définitions de relation de ce
+  type seront également ajoutées.
+
+* `drop_relation_type(rtype, commit=True)`, supprime un type de relation et
+  toutes les définitions de ce type.
+
+* `rename_relation(oldname, newname, commit=True)`, renomme une relation.
+
+* `add_relation_definition(subjtype, rtype, objtype, commit=True)`, ajoute une
+  définition de relation.
+
+* `drop_relation_definition(subjtype, rtype, objtype, commit=True)`, supprime
+  une définition de relation.
+
+* `synchronize_permissions(ertype, commit=True)`, synchronise les permissions
+  d'un type d'entité ou de relation
+        
+* `synchronize_rschema(rtype, commit=True)`, synchronise les propriétés et
+  permissions d'un type de relation.
+                
+* `synchronize_eschema(etype, commit=True)`, synchronise les propriétés et
+  permissions d'un type d'entité.
+    
+* `synchronize_schema(commit=True)`, synchronise le schéma persistent avec le
+  schéma à jour (mais sans ajouter ni supprimer de nouveaux types d'entités ou
+  de relations ni de définitions de relation).
+        
+* `change_relation_props(subjtype, rtype, objtype, commit=True, **kwargs)`, change
+  les propriétés d'une definition de relation en utilisant les arguments nommés
+  pour les propriétés à changer.
+
+* `set_widget(etype, rtype, widget, commit=True)`, change le widget à utiliser
+  pour la relation <rtype> du type d'entité <etype>
+
+* `set_size_constraint(etype, rtype, size, commit=True)`, change la contrainte
+  de taille pour la relation <rtype> du type d'entité <etype>
+
+
+Migration de données
+--------------------
+Les fonctions de migration de données suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `rqlexec(rql, kwargs=None, cachekey=None, ask_confirm=True)`, éxécute une
+  requête rql arbitraire, d'interrogation ou de modification. Un objet result
+  set est retourné.
+
+* `rqlexecall(rqliter, cachekey=None, ask_confirm=True)`, éxécute une série
+  de requêtes rql arbitraires, d'interrogation ou de modification. rqliter est
+  un itérateur retournant des couples (rql, kwargs). Le result set de la
+  dernière requête éxécutée est retourné.
+
+* `add_entity(etype, *args, **kwargs)`, ajoute une nouvelle entité du type
+  données. La valeur des attributs et relations est spécifiée en utilisant les
+  arguments nommés et positionnels.
+
+  
+Création de workflow
+--------------------
+Les fonctions de création de workflow suivantes sont disponibles dans les scripts
+"repository" : 
+
+* `add_state(name, stateof, initial=False, commit=False, **kwargs)`, ajoute un
+  nouvel état de workflow
+    
+* `add_transition(name, transitionof, fromstates, tostate, requiredgroups=(), commit=False, **kwargs)`, 
+  ajoute une nouvelle transtion de workflow
+
+Migration de configuration
+--------------------------
+Les fonctions de migration de configuration suivantes sont disponibles dans tout
+les scripts : 
+
+* `option_renamed(oldname, newname)`, indique qu'une option a été renommée
+
+* `option_group_change(option, oldgroup, newgroup)`, indique qu'une option a
+  changé de groupe
+
+* `option_added(oldname, newname)`, indique qu'une option a été ajoutée
+
+* `option_removed(oldname, newname)`, indique qu'une option a été supprimée
+
+
+Autres fonctions de migration
+-----------------------------
+Ces fonctions ne sont utilisés que pour des opérations de bas niveau
+irréalisables autrement ou pour réparer des bases cassées lors de session
+interactive. Elles sont disponibles dans les scripts "repository".
+
+* `sqlexec(sql, args=None, ask_confirm=True)`, éxécute une requête sql
+  arbitraire, à n'utiliser 
+
+* `add_entity_type_table(etype, commit=True)`
+* `add_relation_type_table(rtype, commit=True)`
+* `uninline_relation(rtype, commit=True)`
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_mise_en_place_environnement.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,87 @@
+Mise en place d'un environnement de développement CubicWeb
+=======================================================
+
+.. include:: sect_mercurial.txt
+.. include:: sect_installation.txt
+.. include:: sect_cubicweb-ctl.txt
+
+
+
+Création d'un composant
+-----------------------
+Commençons par créer un squelette qui nous servira de base au développement de
+notre composant ou application ::
+
+  cd ~/hg
+  cubicweb-ctl newtemplate moncomposant
+  # répondre aux questions
+  hg init moncomposant
+  cd moncomposant
+  hg add .
+  hg ci
+
+A partir de là si tout va bien, votre composant devrait être affiché par
+`cubicweb-ctl list` dans la section *Avaible components*, si ce n'est pas le cas
+revoir la section `Configuration de l'environnement`_.
+
+
+Création d'une instance de développement
+----------------------------------------
+
+Maintenant que nous avons notre squelette de modèle, on peut en créer une
+instance afin de voir ce que tout ça donne dans un simple navigateur web.
+Nous allons utiliser une configuration `all-in-one` afin de simplifier les
+choses ::
+
+  cubicweb-ctl create all-in-one moncomposant moninstance
+
+Une série de questions vont être posées, la réponse par défaut est généralement
+suffisante. Vous pourrez de toute façon modifier la configuration par la suite
+en éditant les fichiers générés. Lorsqu'un login/mot de passe d'accès au sgbd
+vous est demandé, il est recommandé d'utilisé l'utilisateur créé lors de la
+`Configuration Postgres`_.
+
+Il est important de distinguer ici l'utilisateur utilisé pour accéder au sgbd,
+et l'utilisateur utilisé pour s'authentifier dans l'application cubicweb. Lorsque
+l'application cubicweb démarre, elle utilise le login/mot de passe sgdb pour
+récupérer le schéma et gérer les transactions bas-niveau. En revanche, lorsque
+`cubicweb-ctl create` vous demande un login/mot de passe `manager` pour cubicweb, il
+s'agit d'un utilisateur qui sera créé dans l'application `cubicweb` pour pouvoir
+s'y connecter dans un premier temps et l'administrer. Il sera par la suite possible
+de créer des utilisateurs différents pour l'application.
+
+A l'issue de cette commande, la définition de votre instance se trouve dans
+*~/etc/cubicweb.d/moninstance/*. Pour la lancer, il suffit de taper ::
+
+  cubicweb-ctl start -D moninstance
+
+L'option `-D` indique le *debug mode* : l'instance ne passe pas en mode serveur
+et ne se déconnecte pas du terminal, ce qui simplifie le dépannage en cas de non
+démarrage de l'instance. Vous pouvez ensuite allez voir ce que ça donne en
+pointant votre navigateur sur l'url `http://localhost:8080` (le n° de port
+dépend de votre configuration). Pour vous authentifier vous pouvez utiliser le
+login/mot de passe administrateur que vous avez spécifié lors de la création de
+l'instance.
+
+Pour arrêter l'instance, un Ctrl-C dans la fenêtre où vous l'avez lancé
+suffit. Si l'option `-D` a été omise, il faut taper ::
+
+  cubicweb-ctl stop moninstance
+
+Voilà, tout est en place pour démarrer le développement du modèle...
+
+
+Utilisation de cubicweb-liveserver
+-------------------------------
+
+Afin de tester rapidement un nouveau composant, on peut également
+utiliser le script `cubicweb-liveserver` qui permet de créer une
+application en mémoire (utilisant une base de données SQLite par
+défaut) et la rendre accessible via un serveur web::
+
+  cubicweb-liveserver moncomposant
+
+ou bien, pour utiliser une base de données existante (SQLite ou postgres)::
+
+  cubicweb-liveserver -s monfichier_sources moncomposant
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_rql.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,196 @@
+Le langage RQL (Relation Query Language)
+========================================
+
+Présentation
+------------
+* langage mettant l'accent sur le parcours de relations.
+* Les attributs sont considérés comme des cas particuliers de relations.
+* RQL s'inspire de SQL mais se veut plus haut niveau.
+* Une connaissance du schéma CubicWeb définissant l'application est nécessaire.
+
+
+Les différents types de requêtes
+--------------------------------
+Recherche (`Any`)
+  interroger l'entrepôt afin d'extraire des entités et/ou des attributs
+  d'entités.
+
+Insertion (`INSERT`)
+  insérer de nouvelles entités dans la base.
+
+Mise à jour d'entités, création de relations (`SET`)
+  mettre à jours des entités existantes dans la base, ou de créer des
+  relations entres des entités existantes.
+
+Suppression d'entités ou de relation (`DELETE`)
+  supprimer des entités et relations existantes dans la base.
+
+
+Variables et typage
+-------------------
+Les entités et valeurs à parcourir et / ou séléctionner sont représentées dans
+la requête par des *variables* qui doivent être écrites en majuscule.
+
+Les types possibles pour chaque variable sont déduits à partir du schéma en
+fonction des contraintes présentes dans la requête.
+
+On peut contraindre les types possibles pour une variable à l'aide de la
+relation spéciale `is`.
+
+Types de bases
+--------------
+* `String` (litéral: entre doubles ou simples quotes).
+* `Int`, `Float` (le séparateur étant le '.').
+* `Date`, `Datetime`, `Time` (litéral: chaîne YYYY/MM/DD[ hh:mm] ou mots-clés
+  `TODAY` et `NOW`).
+* `Boolean` (mots-clés `TRUE` et `FALSE`).
+* mot-clé `NULL`.
+
+Opérateurs
+----------
+* Opérateurs logiques : `AND`, `OR`, `,`.
+* Opérateurs mathématiques: `+`, `-`, `*`, `/`.
+* Operateur de comparaisons: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN`.
+
+  * L'opérateur `=` est l'opérateur par défaut.
+
+  * L'opérateur `LIKE` / `~=` permet d'utiliser le caractère `%` dans une chaine
+    de caractère pour indiquer que la chaîne doit commencer ou terminer par un
+    préfix/suffixe::
+    
+      Any X WHERE X nom ~= 'Th%'
+      Any X WHERE X nom LIKE '%lt'
+
+  * L'opérateur `IN` permet de donner une liste de valeurs possibles::
+
+      Any X WHERE X nom IN ('chauvat', 'fayolle', 'di mascio', 'thenault')
+
+Requête de recherche
+--------------------
+
+  [`DISTINCT`] <type d'entité> V1(, V2)\*
+  [`GROUPBY` V1(, V2)\*]  [`ORDERBY` <orderterms>]
+  [`WHERE` <restriction>] 
+  [`LIMIT` <value>] [`OFFSET` <value>]
+
+:type d'entité:
+  Type de la ou des variables séléctionnées. 
+  Le type spécial `Any`, revient à ne pas spécifier de type.
+:restriction:
+  liste des relations à parcourir sous la forme 
+    `V1 relation V2|<valeur constante>`
+:orderterms:
+  Définition de l'ordre de selection : variable ou n° de colonne suivie de la
+  méthode de tri (`ASC`, `DESC`), ASC étant la valeur par défaut.
+:note pour les requêtes groupées:
+  Pour les requêtes groupées (i.e. avec une clause `GROUPBY`), toutes les
+  variables sélectionnée doivent être soit groupée soit aggrégée.
+
+Exemples - recherche
+`````````````````````
+::
+
+      Any X WHERE X eid 53
+      Personne X
+      Personne X WHERE X travaille_pour S, S nom "logilab"
+      Any E,COUNT(X) GROUPBY E ORDERBY EN WHERE X is E, E name EN 
+      Any E,COUNT(X) GROUPBY E ORDERBY 2 WHERE X is E 
+
+
+Fonctionnalités avancées
+````````````````````````
+* Fonctions d'aggrégat : `COUNT`, `MIN`, `MAX`, `SUM`.
+* Fonctions sur les chaines :`UPPER`, `LOWER`.
+* Relations optionnelles :
+
+  * Elles permettent de sélectionner des entités liées ou non à une autre.
+
+  * Il faut utiliser le `?` derrière la variable pour spécifier que la relation
+    vers celle-ci est optionnelle :
+
+    - Anomalies d'un projet attachées ou non à une version ::
+
+        Any X,V WHERE X concerns P, P eid 42, X corrected_in V?
+
+    - Toutes les fiches et le projet qu'elles documentent le cas échéant ::
+
+        Any C,P WHERE C is Card, P? documented_by C
+
+Négation
+````````
+* Une requête du type `Document X WHERE NOT X owned_by U` revient à dire "les
+  documents n'ayant pas de relation `owned_by`". 
+* En revanche la requête `Document X WHERE NOT X owned_by U, U login "syt"`
+  revient à dire "les  documents n'ayant pas de relation `owned_by` avec
+  l'utilisateur syt". Ils peuvent avoir une relation "owned_by" avec un autre
+  utilisateur.
+
+
+Requête d'insertion
+-------------------
+   `INSERT` <type d'entité> V1(, <type d'entité> V2)\* `:` <assignements>
+   [`WHERE` <restriction>] 
+
+:assignements:
+  liste des relations à assigner sous la forme `V1 relation V2|<valeur constante>`
+
+La restriction permet de définir des variables utilisées dans les assignements.
+
+Attention, si une restriction est spécifiée, l'insertion est effectuée *pour
+chaque ligne de résultat renvoyée par la restriction*.
+
+Exemples - insertion
+`````````````````````
+* Insertion d'une nouvelle personne nommée 'bidule'::
+
+       INSERT Personne X: X nom 'bidule'
+
+* Insertion d'une nouvelle personne nommée 'bidule', d'une autre nommée
+  'chouette' et d'une relation 'ami' entre eux::
+
+       INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'chouette', X ami Y
+
+* Insertion d'une nouvelle personne nommée 'bidule' et d'une relation 'ami' avec
+  une personne existante nommée 'chouette'::
+
+       INSERT Personne X: X nom 'bidule', X ami Y WHERE Y nom 'chouette'
+
+
+Requête de mise à jour
+----------------------
+   `SET` <assignements>
+   [`WHERE` <restriction>] 
+
+Attention, si une restriction est spécifiée, la mise à jour est effectuée *pour
+chaque ligne de résultat renvoyée par la restriction*.
+
+Exemples - mise à jour 
+````````````````````````
+* Renommage de la personne nommée 'bidule' en 'toto', avec modification du
+  prénom::
+
+       SET X nom 'toto', X prenom 'original' WHERE X is 'Person', X nom 'bidule'
+
+* Insertion d'une relation de type 'connait' entre les objets reliés par la
+  relation de type 'ami'::
+
+       SET X know Y WHERE X ami Y
+
+Requête de suppression
+----------------------
+   `DELETE` (<type d''entité> V) | (V1 relation v2),...
+   [`WHERE` <restriction>] 
+
+Attention, si une restriction est spécifiée, la suppression est effectuée *pour
+chaque ligne de résultat renvoyée par la restriction*.
+
+Exemples
+````````
+* Suppression de la personne nommé 'toto'::
+
+       DELETE Person X WHERE X nom 'toto'
+
+* Suppression de toutes les relations de type 'ami' partant de la personne
+  nommée 'toto'::
+
+       DELETE X ami Y WHERE X is 'Person', X nom 'toto'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_serveur_crochets.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+Les crochets (*hooks*)
+======================
+
+XXX FILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_serveur_notification.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+Gestion de notifications
+========================
+
+XXX FILLME
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_tests.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,36 @@
+Tests
+=====
+
+Écriture de tests unitaires
+---------------------------
+Le framework de test fournit principalement deux classes de tests dans le module
+`cubicweb.devtools.apptest`:
+
+* `EnvBasedTC`, pour simuler un environnement complet (web + repository)
+* `RepositoryBasedTC`, pour simuler un environnement de repository uniquement
+
+Ces deux classes ont quasiment la même interface et proposent un certain nombre de méthodes
+rendant l'écriture de test puissante et rapide.
+
+XXXFILLME describe API
+
+Dans la plupart des cas, vous allez vouloir hériter de `EnvBasedTC` pour écrire des tests
+unitaires ou fonctionnels pour vos entités, vues, crochets...
+
+
+Test des courriels de notifications
+```````````````````````````````````
+Lors de l'éxécution de tests les courriels potentiellement générés ne sont pas réellement
+envoyé mais se retrouve dans la liste `MAILBOX` du module `cubicweb.devtools.apptest`. Cette
+liste est remise à zéro au *setUp* de chaque test (par le setUp des classes `EnvBasedTC`
+et `RepositoryBasedTC`).
+
+Vous pouvez donc tester vos notifications en analysant le contenu de cette liste, qui
+contient des objets ayant deux attributs :
+* `recipients`, la liste des destinataires
+* `msg`, l'objet email.Message
+
+
+Tests automatiques
+------------------
+XXXFILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_ui_gestion_formulaire.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,131 @@
+Gestion de formulaires
+======================
+
+Contrôle de la génération automatique de formulaire pour les entités manipulée
+------------------------------------------------------------------------------
+XXX FILLME
+
+* les formulaires 'edition' et 'creation'
+
+Le formulaire généré par défaut ne vous convient pas ? Vous êtes peut-être pas
+obligé de le refaire à la main ! :)
+
+* rtags primary, secondary, generated, generic,
+  `Entity.relation_category(rtype, x='subject')`
+* inline_view (now a rtag?)
+* spécification widget
+
+
+Fonctionnement du contrôleur d'édition par défaut (id: 'edit')
+--------------------------------------------------------------
+
+Contrôle de l'édition
+`````````````````````
+Prérequis: les paramètres liés aux entités à éditer sont spécifiés de la forme ::
+
+  <nom de champ>:<eid de l'entité>
+
+où l'eid de l'entité pourra être une lettre dans le cas d'une entité à créer. On
+dénommera ces paramètres comme *qualifié*.
+
+1. récupération des entités à éditer en cherchant les paramètres de formulaire
+   commençant par 'eid:' ayant également un paramètre '__type' associé
+   (également *qualifié* par l'eid évidemment)
+
+2. pour tous les attributs et relations de chaque entité à éditer
+
+   1. recherche d'un paramètre 'edits-<nom relation>' ou 'edito-<nom relation>'
+      qualifié dans le cas d'une relation dont l'entité est objet
+   2. si trouvé, la valeur récupérée est considérée comme la valeur originale
+      pour cette relation, et on cherche la (ou les) nouvelle(s) valeur(s) dans
+      le paramètre <nom relation> (qualifié)
+   3. si la valeur est différente de l'originale, une requête de modification en
+      base est effectuée
+
+3. pour chaque entité à éditer
+
+   1. si un paramètre `__linkto` qualifié est spécifié, sa valeur doit être une
+      chaine (ou une liste de chaine) de la forme : ::
+
+        <relation type>:<eids>:<target>
+
+      où <target> vaut 'subject' ou 'object' et chaque eid peut-être séparé d'un
+      autre par un '_'. Target spécifie *l'entité éditée* est sujet ou objet de la
+      relation et chaque relation ainsi spécifiée sera insérée.
+
+   2. si un paramètre `__cloned_eid` qualifié est spécifié pour une entité, les
+      relations de l'entité spécifiée en valeur de cette argument sont copiées sur
+      l'entité éditée
+
+
+   3. si un paramètre `__delete` qualifié est spécifié, sa valeur doit être une
+      chaine (ou une liste de chaine) de la forme : ::
+
+	<subject eids>:<relation type>:<object eids>
+
+      où chaque eid sujet ou objet peut-être séparé d'un autre par un '_'. Chaque
+      relation ainsi spécifiée sera supprimée.
+
+   4. si un paramètre `__insert` qualifié est spécifié, sa valeur doit être de
+      même format que pour `__delete`, mais chaque relation ainsi spécifiée sera 
+      insérée.
+
+4. si les paramètres `__insert` et/ou  `__delete` sont trouvés non qualifiés,
+   ils sont interprétés comme décrit ci-dessus (quelque soit le nombre d'entité
+   édité)
+
+5. si aucune entité n'est éditée mais que le formulaire contient les paramètres
+   `__linkto` et `eid`, celui-ci est interprété en prenant la valeur spécifié
+   par le paramètre `eid` pour désigner l'entité sur laquelle ajouter les
+   relations
+
+
+A noter que :
+
+* si le paramètre `__action_delete` est trouvé, toutes les entités comme
+  spécifiées à éditer seront supprimées
+
+* si le paramètre `__action_cancel` est trouvé, aucune action n'est effectuée
+
+* si le paramètre `__action_apply` est trouvé, l'édition est effectuée
+  normalement mais la redirection sera effectuée sur le formulaire (cf `Contrôle
+  de la redirection`_)
+
+* le paramètre `__method` est également supporté comme sur le template principal
+  (XXX not very consistent, maybe __method should be dealed in the view controller) 
+
+* si aucune entité à éditer n'est trouvée et qu'il n'y a pas de paramètre
+  `__action_delete`, `__action_cancel`, `__linkto`, `__delete` ou `__insert`,
+  une erreur est levée
+
+* placer dans le formulaire le paramètre `__message` permettra d'utiliser la
+  valeur de ce paramètre comme message d'information à l'utilisateur une fois
+  l'édition effectuée.
+
+
+Contrôle de la redirection
+``````````````````````````
+Une fois que l'édition s'est bien passé, reste un problème : c'est bien beau
+tout ça, mais où qu'on va maintenant ?? Si rien n'est spécifié, le controlleur
+se débrouille, mais comme il fait pas toujours ce qu'on voudrait, on peut
+controller ça en utilisant les paramètres suivant :
+
+* `__redirectpath`: chemin de l'url (relatif à la racine du site, sans paramètre
+  de formulaire
+  
+* `__redirectparams`: paramètres de formulaires à ajouter au chemin
+  
+* `__redirectrql`: requête RQL de redirection
+
+* `__redirectvid`: identifiant de vue de redirection
+
+* `__errorurl`: url du formulaire original, utilisé pour la redirection en cas
+  d'erreur de validation pendant l'édition. Si celui-ci n'est pas spécifié, une
+  page d'erreur sera présentée plutot qu'un retour sur le formulaire (qui est le
+  cas échéant responsable d'afficher les erreurs)
+
+* `__form_id`: identifiant de vue du formulaire original, utilisée si
+  `__action_apply` est trouvé
+
+En général on utilise soit `__redirectpath et `__redirectparams` soit
+`__redirectrql` et `__redirectvid`.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_ui_js_json.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+AJAX
+====
+JSON bla  bla
+XXX FILLME
+
+
+Le contrôleur 'json'
+--------------------
+XXX FILLME
+
+
+API Javascript
+--------------
+XXX FILLME
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/chap_visualisation_donnees.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,117 @@
+Définition de vues
+==================
+
+Les classes de base des vues
+----------------------------
+
+La class `View` (`cubicweb.common.view`)
+`````````````````````````````````````
+Un vue écrit dans son flux de sortie via son attribut `w` (`UStreamIO`).
+
+L'interface de base des vues est la suivante :
+
+* `dispatch(**context)`, appelle ("rend") la vue en appellent `call` ou
+  `cell_call` en fonction des arguments passé
+* `call(**kwargs)`, appelle la vue pour un result set complet ou nul
+* `cell_call(row, col, **kwargs)`, appelle la vue pour une cellule donnée d'un
+  result set
+* `url()`, retourne l'url permettant d'obtenir cette vue avec le result set en
+  cours 
+* `view(__vid, rset, __fallback_vid=None, **kwargs)`, appelle la vue
+  d'identificant `__vid` sur le result set donné. Il est possible de données un
+  identificant de vue de "fallback" qui sera utilisé si la vue demandée n'est
+  pas applicable au result set
+  
+* `wview(__vid, rset, __fallback_vid=None, **kwargs)`, pareil que `view` mais
+  passe automatiquement le flux en argument
+  
+* `html_headers()`, retourne une liste d'en-tête HTML à placer par le template
+  principal 
+
+* `page_title()`, retourne le titre à utiliser dans l'en tête HTML `title`
+
+* `creator(eid)`, retourne l'eid et le login du créateur de l'entité ayant
+  l'eid passé en argument
+
+Autres classes de base :
+
+* `EntityView`, vue s'appliquant à aux lignes ou cellule contenant une entité
+  (eg un eid)
+* `StartupView`, vue de départ n'ayant pas besoin de result set
+* `AnyRsetView`, vue s'appliquant à n'importe quelle result set
+
+
+Les templates ou patron
+-----------------------
+
+Les patrons (ou *template*) sont des cas particulier de vue ne dépendant a
+priori pas d'un result set. La classe de base `Template` (`cubicweb.common.view`)
+est une classe dérivée de la classe `View`.
+
+Pour construire une page HTML, un *template principal* est utilisé. Généralement
+celui possédant l'identifiant 'main' est utilisé (ce n'est pas le cas lors
+d'erreur dans celui-ci ou pour le formulaire de login par exemple). Ce patron
+utilise d'autres patrons en plus des vues dépendants du contenu pour générer la
+page à renvoyer.
+
+C'est ce template qui est chargé :
+
+1. d'éxécuter la requête RQL des données à afficher le cas échéant
+2. éventuellement de déterminer la vue à utiliser pour l'afficher si non
+   spécifiée
+3. de composer la page à retourner
+
+
+Le patron principal par défaut (`cubicweb.web.views.basetemplates.TheMainTemplate`)
+--------------------------------------------------------------------------------
+
+Le template principal par défaut construit la page selon la décomposition
+suivante :
+
+.. image:: main_template_layout.png
+
+Le rectancle contenant le `view.dispatch()` représente l'emplacement où est
+inséré la vue de contenu à afficher. Les autres représentent des sous-templates
+appelé pour construire la page. Les implémentations par défaut de tout ces
+templates sont dans le module `cubicweb.web.views.basetemplates`. Vous pouvez
+évidemment surcharger l'un des sous-templates pour modifier l'aspect visuel
+d'une partie désirée de la page.
+
+On peut également contrôler certains comportements du template principal à
+l'aide des paramètres de formulaire suivante :
+
+* `__notemplate`, si présente (quelque soit la valeur associée), seule la vue de
+  contenu est renvoyée
+* `__force_display`, si présente et contient une valeur non nulle, pas de
+  navigation quelque soit le nombre d'entités à afficher
+* `__method`, si le result set à afficher ne contient qu'une entité et que ce
+  paramètre est spécifié, celui-ci désigne une méthode à appeler sur l'entité
+  en lui donnant en argument le dictionnaire des paramètres de formulaire, avant
+  de reprendre le comportement classique (s'insère entre les étapes 1. et
+  2. décrites ci-dessus)
+
+
+.. include:: sect_stdlib_vues.txt
+
+
+Vues xml, binaires...
+---------------------
+Pour les vues générants autre que du html  (une image générée dynamiquement par
+exemple), et qui ne peuvent donc généralement pas être incluse dans la page
+HTML générée par le template principal (voir ci-dessus), il faut :
+
+* placer l'attribut `templatable` de la classe à `False`
+* indiquer via l'attribut `content_type` de la classe le type MIME généré par la
+  vue 'application/octet-stream'
+
+Pour les vues générants un contenu binaire (une image générée dynamiquement par
+exemple), il faut également placer l'attribut `binary` de la classe à `True` (ce
+qui implique `templatable == False` afin que l'attribut `w` de la vue soit
+remplacé par un flux binaire plutôt que unicode.
+
+
+Quelques trucs (X)HTML à respecter
+----------------------------------
+Certains navigateurs (dont firefox) n'aime pas les `<div>` vides (par vide
+j'entend sans contenu dans la balise, il peut y avoir des attributs), faut
+toujours mettre `<div></div>` même s'il n'y a rien dedans, et non `<div/>`. 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/index.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,31 @@
+=======================================
+Développement d'applications avec CubicWeb
+=======================================
+
+
+:Author: Logilab
+:Organization: Logilab
+
+.. contents::
+
+.. include:: chap_fondements_cubicweb.txt
+.. include:: chap_mise_en_place_environnement.txt
+.. include:: chap_rql.txt
+.. include:: chap_definition_schema.txt
+.. include:: chap_definition_workflows.txt
+.. include:: chap_bases_framework_cubicweb.txt
+.. include:: chap_visualisation_donnees.txt
+.. include:: chap_manipulation_donnees.txt
+.. include:: chap_ui_gestion_formulaire.txt
+.. include:: chap_ui_js_json.txt
+.. include:: chap_autres_composants_ui.txt
+.. include:: chap_serveur_crochets.txt
+.. include:: chap_serveur_notification.txt
+
+.. include:: chap_tests.txt
+.. include:: chap_i18n.txt
+.. include:: chap_migration.txt
+
+.. include:: chap_configuration_instance.txt
+
+XXX: XXX FILLME, CSS, API sécurité
Binary file goa/doc/devmanual_fr/main_template_layout.dia has changed
Binary file goa/doc/devmanual_fr/main_template_layout.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/makefile	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+MKHTMLOPTS=--doctype book --param toc.section.depth=1  --target html --stylesheet single-file
+SRC=.
+
+MKPDFOPTS=--doctype book --param toc.section.depth=2  --target pdf --stylesheet standard
+
+TXTFILES:= $(wildcard *.txt)
+TARGET := $(TXTFILES:.txt=.html)
+
+all: index.html
+
+index.html: *.txt
+	mkdoc ${MKHTMLOPTS} index.txt
+
+index.pdf: *.txt
+	mkdoc ${MKPDFOPTS} index.txt
+
+%.html: %.txt
+	mkdoc ${MKHTMLOPTS} $<
+
+clean:
+	rm -f *.html
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_definition_entites.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,166 @@
+Paramétrages et extensions spécifiques
+--------------------------------------
+
+Valeurs par défaut dynamiques
+`````````````````````````````
+Il est possible de définir dans le schéma des valeurs par défaut *statiques*.
+Il est également possible de définir des valeurs par défaut *dynamiques* en 
+définissant sur la classe d'entité une méthode `default_<nom attribut>` pour
+un attribut donnée.
+
+
+Contrôle des attributs chargés et du tri par défaut
+```````````````````````````````````````````````````
+* l'attribut de classe `fetch_attrs` permet de définir sur une classe d'entité
+  la liste des noms des attributs ou relations devant être chargés 
+  automatiquement lors de la récupération d'entité(s) de ce type. Dans le cas 
+  des relations, on est limité aux relations *sujets de cardinalité `?` ou `1`*.
+
+* la méthode de classe `fetch_order(attr, var)` prend en argument un nom 
+  d'attribut (ou de relation) et un nom de variable et doit retourner une chaine
+  à utiliser dans la close "ORDERBY" d'une requête RQL pour trier 
+  automatiquement les listes d'entités de ce type selon cet attribut, ou `None`
+  si l'on ne veut pas de tri sur l'attribut passé en argument. Par défaut les 
+  entités sont triées selon leur date de création
+
+* la méthode de classe `fetch_unrelated_order(attr, var)` est similaire à la 
+  méthode `fetch_order` mais est utilisée essentiellement pour contrôler le tri
+  des listes déroulantes permettant de créer des relations dans la vue d'édition
+  d'une entité
+
+La fonction `fetch_config(fetchattrs, mainattr=None)` permet de simplifier la 
+définition des attributs à précharger et du tri en retournant une liste des 
+attributs à précharger (en considérant ceux de la classe  `AnyEntity`
+automatiquement) et une fonction de tri sur l'attribut "principal" (le 2eme 
+argument si spécifié ou sinon le premier attribut de la liste `fetchattrs`).
+Cette fonction est définie dans le package `cubicweb.entities`.
+
+Par exemple : ::
+
+  class Transition(AnyEntity):
+    """..."""
+    id = 'Transition'
+    fetch_attrs, fetch_order = fetch_config(['name'])
+
+Indique que pour le type d'entité "Transition" il faut précharger l'attribut
+"name" et trier par défaut selon cet attribut.
+
+
+Contrôle des formulaires d'édition
+``````````````````````````````````
+Il est possible de contrôler les attributs/relations dans la vue d'édition
+simple ou multiple à l'aide des *rtags* suivants :
+
+* `primary`, indique qu'un attribut ou une relation doit être incorporé dans
+  les formulaires d'édition simple et multiple. Dans le cas d'une relation,
+  le formulaire d'édition de l'entité liée sera inclus dans le formulaire
+
+* `secondary`, indique qu'un attribut ou une relation doit être incorporé dans
+  le formulaire d'édition simple uniquement. Dans le cas d'une relation,
+  le formulaire d'édition de l'entité liée sera inclus dans le formulaire
+
+* `generic`, indique qu'une relation doit être incorporé dans le formulaire 
+  d'édition simple dans la boite générique d'ajout de relation
+
+* `generated`, indique qu'un attribut est caculé dynamiquement ou autre, et 
+  qu'il ne doit donc pas être présent dans les formulaires d'édition
+
+Au besoin il est possible de surcharger la méthode 
+`relation_category(rtype, x='subject')` pour calculer dynamiquement la catégorie
+d'édition d'une relation.
+
+
+Contrôle de la boîte "add_related"
+``````````````````````````````````
+La boite `add related` est une boite automatique proposant de créer une entité
+qui sera automatiquement liée à l'entité de départ (le contexte dans lequel 
+s'affiche la boite). Par défaut, les liens présents dans cette boite sont 
+calculés en fonction des propriétés du schéma de l'entité visualisée, mais il
+est possible de les spécifier explicitement à l'aide des *rtags* suivants :
+
+* `link`, indique qu'une relation est généralement créée vers une entité
+  existante et qu'il ne faut donc pas faire apparaitre de lien pour cette 
+  relation
+
+* `create`, indique qu'une relation est généralement créée vers de nouvelles
+  entités et qu'il faut donc faire apparaitre un lien pour créer une nouvelle
+  entité et la lier automatiquement
+
+Au besoin il est possible de surcharger la méthode  
+`relation_mode(rtype, targettype, x='subject')` pour caculer dynamiquement la
+catégorie de création d'une relation.
+
+A noter également que si au moins une action dans la catégorie "addrelated" est
+trouvée pour le contexte courant, le fonctionnement automatique est désactivé
+en faveur du fonctionnement explicite (i.e. affichage des actions de la
+catégorie "addrelated" uniquement).
+
+Contrôle des formulaires de filtrage de table
+`````````````````````````````````````````````
+La vue "table" par défaut gère dynamiquement un formulaire de filtrage du
+contenu de celle-ci. L'algorithme est le suivant : 
+
+1. on considère que la première colonne contient les entités à restreindre
+2. on recupère la première entité de la table (ligne 0) pour "représenter"
+   toutes les autres
+3. pour toutes les autres variables définies dans la requête originale :
+
+   1. si la variable est liée à la variable principale par au moins une
+      n'importe quelle relation
+   2. on appelle la méthode `filterform_vocabulary(rtype, x)` sur l'entité
+      et si rien est retourné (ou plus exactement un tuple de valeur `None`,
+      voir ci-dessous) on passe à la variable suivante, sinon un élément de
+      formulaire de filtrage sera créé avec les valeurs de vocabulaire
+      retournées
+
+4. il n'y a pas d'autres limitations sur le rql, il peut comporter des clauses
+   de tris, de groupes... Des fonctions javascripts sont utilisées pour
+   regénérer une requête à partir de la requête de départ et des valeurs
+   séléctionnées dans les filtres de formulaire.
+
+   
+La méthode `filterform_vocabulary(rtype, x, var, rqlst, args, cachekey)` prend
+en argument le nom d'une relation et la "cible", qui indique si l'entité sur
+laquelle la méthode est appellée est sujet ou objet de la relation. Elle doit
+retourner :
+
+* un 2-uple de None si elle ne sait pas gérer cette relation
+
+* un type et une liste contenant le vocabulaire
+
+  * la liste doit contenir des couples (valeur, label)
+  * le type indique si la valeur désigne un nombre entier (`type == 'int'`), une
+    chaîne de  caractères (`type == 'string'`) ou une entité non finale (`type
+    == 'eid'`)
+
+Par exemple dans notre application de gestion de tickets, on veut pouvoir
+filtrés ceux-ci par : 
+
+* type
+* priorité
+* état (in_state)
+* étiquette (tags)
+* version (done_in)
+
+On définit donc la méthode suivante : ::
+
+
+    class Ticket(AnyEntity):
+
+	...
+
+	def filterform_vocabulary(self, rtype, x, var, rqlst, args, cachekey):
+	    _ = self.req._
+	    if rtype == 'type':
+		return 'string', [(x, _(x)) for x in ('bug', 'story')]
+	    if rtype == 'priority':
+		return 'string', [(x, _(x)) for x in ('minor', 'normal', 'important')]
+	    if rtype == 'done_in':
+		rql = insert_attr_select_relation(rqlst, var, rtype, 'num')
+		return 'eid', self.req.execute(rql, args, cachekey)
+	    return super(Ticket, self).filterform_vocabulary(rtype, x, var, rqlst,
+							     args, cachekey)
+
+							     
+NOTE: Le support du filtrage sur les étiquettes et l'état est installé
+automatiquement, pas besoin de le gérer ici.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_definition_schema.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,339 @@
+
+Définition d'un type d'entité
+-----------------------------
+
+Un type d'entité est définit par une classe python héritant de `EntityType`. Le
+nom de la classe correspond au nom du type. Ensuite le corps de la classe
+contient la description des attributs et des relations pour ce type d'entité,
+par exemple ::
+
+  class Personne(EntityType):
+    """une personne avec les propriétés et relations nécessaires à mon
+    application"""
+
+    nom = String(required=True, fulltextindexed=True)
+    prenom = String(required=True, fulltextindexed=True)
+    civilite = String(vocabulary=('M', 'Mme', 'Mlle'))
+    date_naiss = Date()
+    travaille_pour = SubjectRelation('Company', cardinality='?*')
+
+* le nom de l'attribut python correspond au nom de l'attribut ou de la relation
+  dans cubicweb.
+
+* tout les types de bases sont disponibles nativement : `String`, `Int`, `Float`,
+  `Boolean`, `Date`, `Datetime`, `Time`, `Byte`.
+
+* Chaque type d'entité a au moins les méta-relations suivantes :
+  - `eid` (`Int`)
+  - `creation_date` (`Datetime`)
+  - `modification_date` (`Datetime`)
+  - `owned_by` (`EUser`)
+  - `is` (`EEType`)
+
+* il est également possible de définir des relations dont le type d'entité est
+  l'objet en utilisant `ObjectRelation` plutôt que `SubjectRelation`
+
+* le premier argument de `SubjectRelation` et `ObjectRelation` donne
+  respectivement le type d'entité objet /sujet de la relation. Cela
+  peut être : 
+
+  * une chaine de caractères correspondant à un type d'entité
+
+  * un tuple de chaines de caractères correspondant à plusieurs types d'entité
+
+  * les chaînes de caractères spéciales suivantes :
+
+    - "**" : tout les types d'entité
+    - "*" : tout les types d'entité non méta
+    - "@" : tout les types d'entité méta mais non "système" (i.e. servant à la
+      description du schema en base)
+
+* il est possible d'utiliser l'attribut possible `meta` pour marquer un type
+  d'entité comme étant "méta" (i.e. servant à décrire / classifier d'autre
+  entités) 
+
+* propriétés optionnelles des attributs et relations : 
+
+  - `description` : chaine de caractères décrivant un attribut ou une
+    relation. Par défaut cette chaine sera utilisée dans le formulaire de saisie
+    de l'entité, elle est donc destinée à aider l'utilisateur final et doit être
+    marquée par la fonction `_` pour être correctement internationalisée.
+
+  - `constraints` : liste de contraintes devant être respecté par la relation
+    (c.f. `Contraintes`_)
+
+  - `cardinality` : chaine de 2 caractères spécifiant la cardinalité de la
+    relation. Le premier caractère donne la cardinalité de la relation sur le
+    sujet, le 2eme sur l'objet. Quand une relation possède plusieurs sujets ou
+    objets possibles, la cardinalité s'applique sur l'ensemble et non un à un (et
+    doit donc à priori être cohérente...). Les valeurs possibles sont inspirées
+    des expressions régulières :
+
+    * `1`: 1..1
+    * `?`: 0..1
+    * `+`: 1..n
+    * `*`: 0..n
+
+  - `meta` : booléen indiquant que la relation est une méta relation (faux par
+    défaut)
+
+* propriétés optionnelles des attributs : 
+
+  - `required` : booléen indiquant si l'attribut est obligatoire (faux par
+    défaut)
+
+  - `unique` : booléen indiquant si la valeur de l'attribut doit être unique
+    parmi toutes les entités de ce type (faux par défaut)
+
+  - `indexed` : booléen indiquant si un index doit être créé dans la base de
+    données sur cette attribut (faux par défaut). C'est utile uniquement si vous
+    savez que vous allez faire de nombreuses recherche sur la valeur de cet
+    attribut. 
+
+  - `default` : valeur par défaut de l'attribut. A noter que dans le cas des
+    types date, les chaines de caractères correspondant aux mots-clés RQL
+    `TODAY` et `NOW` sont utilisables.
+
+  - `vocabulary` : spécifie statiquement les valeurs possibles d'un attribut
+
+* propriétés optionnelles des attributs de type `String` : 
+
+  - `fulltextindexed` : booléen indiquant si l'attribut participe à l'index plein
+    texte (faux par défaut) (*valable également sur le type `Byte`*)
+
+  - `internationalizable` : booléen indiquant si la valeur de cet attribut est
+    internationalisable (faux par défaut) 
+
+  - `maxsize` : entier donnant la taille maximum de la chaine (pas de limite par
+    défaut)  
+
+* propriétés optionnelles des relations : 
+
+  - `composite` : chaîne indiquant que le sujet (composite == 'subject') est
+    composé de ou des objets de la relation. Pour le cas opposé (l'objet est
+    composé de ou des sujets de la relation, il suffit de mettre 'object' comme
+    valeur. La composition implique que quand la relation est supprimé (et donc
+    aussi quand le composite est supprimé), le ou les composés le sont
+    également. 
+
+
+Contraintes
+```````````
+Par défaut les types de contraintes suivant sont disponibles :
+
+* `SizeConstraint` : permet de spécifier une taille minimale et/ou maximale sur
+  les chaines de caractères (cas générique de `maxsize`)
+
+* `BoundConstraint` : permet de spécifier une valeur minimale et/ou maximale sur
+  les types numériques
+
+* `UniqueConstraint` : identique à "unique=True"
+
+* `StaticVocabularyConstraint` : identique à "vocabulary=(...)"
+
+* `RQLConstraint` : permet de spécifier une requête RQL devant être satisfaite
+  par le sujet et/ou l'objet de la relation. Dans cette requête les variables `S`
+  et `O` sont préféfinies respectivement comme l'entité sujet et objet de la
+  relation
+
+* `RQLVocabularyConstraint` : similaire à la précédente, mais exprimant une
+  contrainte "faible", i.e. servant uniquement à limiter les valeurs apparaissant
+  dans la liste déroulantes du formulaire d'édition, mais n'empêchant pas une
+  autre entité d'être séléctionnée
+
+
+Définition d'un type de relation
+--------------------------------
+
+Un type de relation est définit par une classe python héritant de `RelationType`. Le
+nom de la classe correspond au nom du type. Ensuite le corps de la classe
+contient la description des propriétés de ce type de relation, ainsi
+qu'éventuellement une chaine pour le sujet et une autre pour l'objet permettant
+de créer des définitions de relations associées (auquel cas il est possibles de
+donner sur la classe les propriétés de définition de relation explicitées
+ci-dessus), par exemple ::
+
+  class verrouille_par(RelationType):
+    """relation sur toutes les entités applicatives indiquant que celles-ci sont vérouillées
+    inlined = True
+    cardinality = '?*'
+    subject = '*'
+    object = 'EUser'
+
+En plus des permissions, les propriétés propres aux types de relation (et donc
+partagés par toutes les définitions de relation de ce type) sont :
+
+* `inlined` : booléen contrôlant l'optimisation physique consistant à stocker la
+  relation dans la table de l'entité sujet au lieu de créer une table spécifique
+  à la relation. Cela se limite donc aux relations dont la cardinalité
+  sujet->relation->objet vaut 0..1 ('?') ou 1..1 ('1')
+
+* `symetric` : booléen indiquant que la relation est symétrique, i.e. "X relation
+   Y" implique "Y relation X"
+
+Dans le cas de définitions de relations simultanée, `sujet` et `object` peuvent
+tout deux valoir la même chose que décrite pour le 1er argument de
+`SubjectRelation` et `ObjectRelation`.
+
+A partir du moment où une relation n'est ni mise en ligne, ni symétrique, et
+ne nécessite pas de permissions particulières, sa définition (en utilisant
+`SubjectRelation` ou `ObjectRelation`) est suffisante.
+
+
+Définition des permissions
+--------------------------
+
+La définition des permissions se fait à l'aide de l'attribut `permissions` des
+types d'entité ou de relation. Celui-ci est un dictionnaire dont les clés sont
+les types d'accès (action), et les valeurs les groupes ou expressions autorisées. 
+
+Pour un type d'entité, les actions possibles sont `read`, `add`, `update` et
+`delete`.
+
+Pour un type de relation, les actions possibles sont `read`, `add`, et `delete`.
+
+Pour chaque type d'accès, un tuple indique le nom des groupes autorisés et/ou
+une ou plusieurs expressions RQL devant être vérifiées pour obtenir
+l'accès. L'accès est donné à partir du moment où l'utilisateur fait parti d'un
+des groupes requis ou dès qu'une expression RQL est vérifiée.
+
+Les groupes standards sont :
+
+* `guests`
+
+* `users`
+
+* `managers`
+
+* `owners` : groupe virtuel correspondant au propriétaire d'une entité. Celui-ci
+  ne peut être utilisé que pour les actions `update` et `delete` d'un type
+  d'entité. 
+
+Il est également possible d'utiliser des groupes spécifiques devant être pour
+cela créés dans le precreate de l'application (`migration/precreate.py`).
+
+Utilisation d'expression RQL sur les droits en écriture
+```````````````````````````````````````````````````````
+Il est possible de définir des expressions RQL donnant des droits de
+modification (`add`, `delete`, `update`) sur les types d'entité et de relation.
+
+Expression RQL pour les permissions sur un type d'entité :
+
+* il faut utiliser la classe `ERQLExpression`
+
+* l'expression utilisée correspond à la clause WHERE d'une requête RQL
+
+* dans cette expression, les variables X et U sont des références prédéfinies
+  respectivement sur l'entité courante (sur laquelle l'action est vérifiée) et
+  sur l'utilisateur ayant effectué la requête
+
+* il est possible d'utiliser dans cette expression les relations spéciales
+  "has_<ACTION>_permission" dont le sujet est l'utilisateur et l'objet une
+  variable quelquonque, signifiant ainsi que l'utilisateur doit avoir la
+  permission d'effectuer l'action <ACTION> sur la ou les entités liées cette
+  variable
+
+Pour les expressions RQL sur un type de relation, les principes sont les mêmes
+avec les différences suivantes :
+
+* il faut utiliser la classe `RRQLExpression` dans le cas d'une relation non
+  finale
+
+* dans cette expression, les variables S, O et U sont des références
+  prédéfinies respectivement sur le sujet et l'objet de la relation
+  courante (sur laquelle l'action est vérifiée) et sur l'utilisateur
+  ayant effectué la requête
+
+* On peut aussi définir des droits sur les attributs d'une entité (relation non
+  finale), sachant les points suivants :
+
+  - pour définir des expressions rql, il faut utiliser la classe `ERQLExpression`
+    dans laquelle X représentera l'entité auquel appartient l'attribut
+
+  - les permissions 'add' et 'delete' sont équivalentes. En pratique seul
+    'add'/'read' son pris en considération
+
+
+En plus de cela, le type d'entité `EPermission` de la librairie standard permet
+de construire des modèles de sécurités très complexes et dynamiques. Le schéma
+de ce type d'entité est le suivant : ::
+
+
+    class EPermission(MetaEntityType):
+	"""entity type that may be used to construct some advanced security configuration
+	"""
+	name = String(required=True, indexed=True, internationalizable=True, maxsize=100)
+	require_group = SubjectRelation('EGroup', cardinality='+*',
+					description=_('groups to which the permission is granted'))
+	require_state = SubjectRelation('State',
+				    description=_("entity'state in which the permission is applyable"))
+	# can be used on any entity
+	require_permission = ObjectRelation('**', cardinality='*1', composite='subject',
+					    description=_("link a permission to the entity. This "
+							  "permission should be used in the security "
+							  "definition of the entity's type to be useful."))
+
+
+Exemple de configuration extrait de *jpl* ::
+
+    ...
+
+    class Version(EntityType):
+	"""a version is defining the content of a particular project's release"""
+
+	permissions = {'read':   ('managers', 'users', 'guests',),
+		       'update': ('managers', 'logilab', 'owners',),
+		       'delete': ('managers', ),
+		       'add':    ('managers', 'logilab',
+				  ERQLExpression('X version_of PROJ, U in_group G,'
+						 'PROJ require_permission P, P name "add_version",'
+						 'P require_group G'),)}
+
+    ...
+
+    class version_of(RelationType):
+	"""link a version to its project. A version is necessarily linked to one and only one project.
+	"""
+	permissions = {'read':   ('managers', 'users', 'guests',),
+		       'delete': ('managers', ),
+		       'add':    ('managers', 'logilab',
+				  RRQLExpression('O require_permission P, P name "add_version",'
+						 'U in_group G, P require_group G'),)
+		       }
+	inlined = True
+
+Cette configuration suppose indique qu'une entité `EPermission` de nom
+"add_version" peut-être associée à un projet et donner le droit de créer des
+versions sur ce projet à des groupes spécifiques. Il est important de noter les
+points suivants :
+
+* dans ce cas il faut protéger à la fois le type d'entité "Version" et la
+  relation liant une version à un projet ("version_of")
+
+* du fait de la généricité du type d'entité `EPermission`, il faut effectuer
+  l'unification avec les groupes et / ou les états le cas échéant dans
+  l'expression ("U in_group G, P require_group G" dans l'exemple ci-dessus)
+
+
+Utilisation d'expression RQL sur les droits en lecture
+``````````````````````````````````````````````````````
+Les principes sont les mêmes mais avec les restrictions suivantes :
+
+* on ne peut de `RRQLExpression` sur les types de relation en lecture
+
+* les relations spéciales "has_<ACTION>_permission" ne sont pas utilisables
+
+
+Note sur l'utilisation d'expression RQL sur la permission 'add'
+```````````````````````````````````````````````````````````````
+L'utilisation d'expression RQL sur l'ajout d'entité ou de relation pose
+potentiellement un problème pour l'interface utilisateur car si l'expression
+utilise l'entité ou la relation à créer, on est pas capable de vérifier les
+droits avant d'avoir effectué l'ajout (noter que cela n'est pas un problème coté
+serveur rql car la vérification des droits est effectuée après l'ajout
+effectif). Dans ce cas les méthodes de vérification des droits (check_perm,
+has_perm) peuvent inidquer qu'un utilisateur n'a pas le droit d'ajout alors
+qu'il pourrait effectivement l'obtenir. Pour palier à ce soucis il est en général
+nécessaire dans tel cas d'utiliser une action reflétant les droits du schéma
+mais permettant de faire la vérification correctement afin qu'elle apparaisse
+bien le cas échéant.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_erudi-ctl.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,70 @@
+L'outil `cubicweb-ctl`
+-------------------
+`cubicweb-ctl` est le couteau suisse pour la gestion d'instances CubicWeb.
+La syntaxe générale est ::
+
+  cubicweb-ctl <commande> [options commande] <arguments commandes>
+
+Pour voir les commandes disponibles ::
+
+  cubicweb-ctl
+  cubicweb-ctl --help
+
+A noter que les commandes disponibles varient en fonction des parties de CubicWeb
+qui sont installées.
+
+Pour voir l'aide pour une commande spécifiques ::
+
+  cubicweb-ctl <commande> --help
+
+Commandes pour la création d'un composant
+````````````````````````````````````````
+* ``newtemplate``, crée un nouveau composant sur le système de fichiers
+  à partir du nom passé en paramètre. Cette commande crée le composant à partir
+  d'une squelette d'application, incluant également les fichiers pour le
+  packaging debian)
+  
+Commandes pour la création d'une instance
+`````````````````````````````````````````
+* ``create``, crée les fichiers de configuration d'une instance
+* ``db-create``, crée la base de données système d'une instance (tables et
+  extensions uniquement)
+* ``db-init``, initialise la base de données système d'une instance (schéma,
+  groupes, utilisateurs, workflows...)
+
+Par défaut ces trois commandes sont enchainées.
+
+Commandes pour le lancement des instances
+`````````````````````````````````````````
+* ``start``, démarre une, plusieurs, ou toutes les instances
+* ``stop``, arrêt une, plusieurs, ou toutes les instances
+* ``restart``, redémarre une, plusieurs, ou toutes les instances
+* ``status``, donne l'état des instances
+
+Commandes pour la maintenance des instances
+```````````````````````````````````````````
+* ``upgrade``, lance la migration d'instance(s) existante(s) lorsqu'une nouvelle
+  version d'CubicWeb ou du composant est installée
+* ``shell``, ouvre un shell de migration pour maintenance manuelle d'une instance
+* ``db-dump``, crée un dump de la base de données système
+* ``db-restore``, restore un dump de la base de données système
+* ``db-check``, vérifie l'intégrité des données d'une instance. Si la correction
+  automatique est activée, il est conseillé de faire un dump avant cette
+  opération
+* ``schema-sync``, , synchronise le schéma persistent d'une instance avec le schéma
+  de l'application. Il est conseillé de faire un dump avant cette opération
+
+Commandes pour la maintenance des catalogues i18n
+`````````````````````````````````````````````````
+* ``i18nlibupdate``, regénère les catalogues de messages de la librairie CubicWeb
+* ``i18nupdate``, regénère les catalogues de messages d'un composant
+* ``i18ncompile``, recompile les catalogues de messages d'une instance. Cela est
+  effectué automatiquement lors d'une upgrade
+
+Cf Internationalisation_.
+
+Autres commandes
+````````````````
+* ``list``, donne la liste des configurations, des composants et des instances
+  disponibles
+* ``delete``, supprime une instance (fichiers de configuration et base de données)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_installation.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,76 @@
+Installation de cubicweb et de ses dépendances
+-------------------------------------------
+Tout le système CubicWeb est préparé pour l'installation sur une machine
+debian. L'installation manuelle est un peu pénible du fait des nombreuses
+dépendances à installer (twisted, postgres, autres paquets python...). Nous
+supposerons donc ici que l'installation se fait sur une machine debian ayant
+dans ses sources apt un entrepôt contenant les paquets pour CubicWeb.
+
+Pour tout installer sur le système ::
+
+  apt-get install cubicweb
+
+On peut également n'installer que les paquets cubicweb-server ou cubicweb-twisted pour
+n'avoir que la partie serveur ou client web sur une machine.
+
+Pour tout installer la documentation et les librairies/outils de développement ::
+
+  apt-get install cubicweb-documentation cubicweb-dev
+
+On pourra ensuite installer les paquets suivants :
+
+* `pyro` si vous voulez que l'entrepôt soit accessible via Pyro ou si le client
+  et le serveur ne sont pas sur la même machine (auquel cas il faut installer ce
+  paquet sur les machines clientes et serveur)
+
+* `python-ldap` si vous voulez utiliser une source ldap sur le serveur
+
+* `postgresql-8.1`, `postgresql-contrib-8.1` et `postgresql-plpython-8.1` la
+  machine devant héberger la base de données système
+
+Configuration de l'environnement
+--------------------------------
+Ajouter les lignes suivantes à son `.bashrc` ou `.bash_profile` pour configurer
+votre environnement de développement ::
+
+  export CW_REGISTRY=~/etc/cubicweb.d/
+  export CW_CUBES=~/hg/
+  export CW_RUNTIME=/tmp/
+
+Cela suppose que le composant cubicweb que vous développez est dans un
+sous-répertoire de *~/hg/* et que vous avez créé le répertoire *~/etc/cubicweb.d/*
+pour que `cubicweb-ctl` y place vos instances de test.
+
+
+Configuration Postgres
+----------------------
+* création d'un super utilisateur pour la création d'instance (**root**) ::
+
+    createuser --superuser --createdb -P pgadmin
+
+  Un mot de passe de connection pour cet utilisateur vous sera demandé. Il
+  faudra utiliser ce login / mot de passe à la création d'instance via
+  `cubicweb-ctl`
+
+* installation des extensions pour l'index plein texte ::
+
+    cat /usr/share/postgresql/8.1/contrib/tsearch2.sql | psql -U pgadmin template1
+
+* installation du langage plpythonu par défaut ::
+
+    createlang -U pgadmin plpythonu template1
+
+
+Configuration Pyro
+------------------
+Si vous utilisez Pyro, il est nécessaire d'avoir un serveur de noms Pyro
+tournant sur votre réseau (par défaut celui-ci est repéré par une requête
+broadcast). Pour cela il faut soit :
+
+* le lancer à la main avant le démarrage de cubicweb avec la commande `pyro-ns`
+
+* le lancer à la main avant le démarrage de cubicweb sous forme d'un serveur avec
+  la commande `pyro-nsd start`
+
+* éditer le fichier */etc/default/pyro-nsd* pour que le serveur de nom pyro soit
+  lancé automatiquement au démarrage de la machine
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_mercurial.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,110 @@
+Présentation de Mercurial
+-------------------------
+
+Introduction
+````````````
+Mercurial_ gère un ensemble distribué d'entrepôts contenant des arbres de
+révisions (chaque révision indique les changements à effectuer pour obtenir la
+version suivante, et ainsi de suite). Localement, on dispose d'un entrepôt
+contenant un arbre de révisions, et d'un répertoire de travail. Il est possible
+de mettre dans son répertoire de travail, une des versions issue de son entrepôt
+local, de la modifier puis de la verser dans son entrepôt. Il est également
+possible de récuprer dans son entrepôt local des révisions venant d'un autre
+entrepôt, ou d'exporter ses propres révisions depuis son entrepôt local vers un
+autre entrepôt.
+
+A noter que contrairement à CVS/Subversion, on crée généralement un entrepôt par
+projet à gérer.
+
+Lors d'un développement collaboratif, on crée généralement un entrepôt central
+accessible à tout les développeurs du projet. Ces entrepôts centraux servent de
+référence. Selon ses besoins, chacun peut ensuite disposer d'un entrepôt local,
+qu'il faudra penser à synchroniser avec l'entrepôt central de temps à autre. 
+
+
+Principales commandes
+`````````````````````
+* Créer un entrepôt local ::
+
+    hg clone ssh://orion//home/src/prive/rep
+
+* Voir le contenu de l'entrepôt local (outil graphique en Tk) ::
+
+    hg view
+
+* Ajouter un sous-répertoire ou un fichier dans le répertoire courant ::
+
+    hg add rep
+
+* Placer dans son répertoire de travail une révision spécifique (ou la dernière
+  revision) issue de l'entrepôt local ::
+
+    hg update [identifiant-revision]
+    hg up [identifiant-revision]
+
+* Récupérer dans son entrepôt local, l'arbre de révisions contenu dans un
+  entrepôt distant (cette opération ne modifie pas le répertoire local) ::
+
+    hg pull ssh://orion//home/src/prive/rep
+    hg pull -u ssh://orion//home/src/prive/rep # équivalent à pull + update
+
+* Voir quelles sont les têtes de branches de l'entrepôt local si un `pull` a
+  tiré une nouvelle branche ::
+
+    hg heads
+
+* Verser le répertoire de travail dans l'entrepôt local (et créer une nouvelle
+  révision) ::
+
+    hg commit
+    hg ci
+
+* Fusionner, avec la révision mère du répertoire local, une autre révision issue
+  de l'entrepôt local (la nouvelle révision qui en résultera aura alors deux
+  révisions mères) ::
+
+    hg merge identifiant-revision
+
+* Exporter dans un entrepôt distant, l'arbre de révisions contenu dans son
+  entrepôt local (cette opération ne modifie pas le répertoire local) ::
+
+    hg push ssh://orion//home/src/prive/rep
+
+* Voir quelle sont les révisions locales non présentes dans un autre entrepôt ::
+
+    hg outgoing ssh://orion//home/src/prive/rep
+
+* Voir quelle sont les révisions d'un autre entrepôt non présentes localement ::
+
+    hg incoming ssh://orion//home/src/prive/rep
+
+* Voir quelle est la révision issue de l'entrepôt local qui a été sortie dans le
+  répertoire de travail et modifiée ::
+
+    hg parent
+
+* Voir les différences entre le répertoire de travail et la révision mère de
+  l'entrepôt local, éventuellement permettant de les verser dans l'entrepôt
+  local ::
+
+    hg diff
+    hg commit-tool
+    hg ct
+
+
+Bonnes pratiques
+````````````````
+* penser à faire un `hg pull -u` régulièrement et particulièrement avant de
+  faire un `hg commit`
+
+* penser à faire un `hg push` lorsque votre entrepôt contient une version
+  relativement stable de vos modifications
+
+* si un `hg pull -u` a créé une nouvelle tête de branche :
+
+  1. identifier l'identifiant de celle-ci avec `hg head`
+  2. fusionner avec `hg merge`
+  3. `hg ci`
+  4. `hg push`
+
+.. _Mercurial: http://www.selenic.com/mercurial/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_stdlib_schemas.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,68 @@
+Schémas prédéfinies dans la librairie
+-------------------------------------
+
+La librairie définit un certain nombre de schémas d'entités nécessaires
+au système ou bien couramment utilisées dans les application `cubicweb`.
+Vous pouvez bien entendu étendre ces schémas au besoin.
+
+
+Schémas "systèmes"
+``````````````````
+
+* `EUser`, utilisateurs du système
+* `EGroup`, groupes d'utilisateurs
+* `EEType`, types d'entité
+* `ERType`, types de relation
+
+* `State`, état d'un workflow
+* `Transition`, transition d'un workflow
+* `TrInfo`, enregistrement d'un passage de transition pour une entité
+
+* `EmailAddress`, adresse électronique, utilisé par le système de notification
+  pour les utilisateurs et par d'autres schéma optionnels
+
+* `EProperty`, utilisé pour configurer l'application
+* `EPermission`, utilisé pour configurer la sécurité de l'application
+
+* `Card`, fiche documentaire générique
+* `Bookmark`, un type d'entité utilisé pour permetter à un utilisateur de
+  personnaliser ses liens de navigation dans l'application.
+
+
+Composants de la librairie
+``````````````````
+Une application est construite sur la base de plusieurs composants de base.
+Parmi les composants de base disponible, on trouve par exemple :
+
+* `ecomment`, fournit le type d'entité `Comment` permettant de commenter les
+  entités du site
+  
+* `emailinglist`, fournit le type d'entité `Mailinglist` regroupant des
+  informations sur une liste de discussion
+
+* `efile`, fournit les types d'entités `File` et `Image` utilisés pour
+  représenter des fichiers (texte ou binaire) avec quelques données
+  supplémentaires comme le type MIME ou l'encodage le cas échéant ().
+  
+* `elink`, fournit le type d'entité lien internet (`Link`)
+
+* `eblog`, fournit le type d'entité weblog (`Blog`)
+
+* `eperson`, fournit le type d'entité personne physique (`Person`)
+
+* `eaddressbook`, fournit les types d'entités utilisés pour représenter des n°
+  de téléphone (`PhoneNumber`) et des adresses postales (`PostalAddress`)
+  
+* `eclasstags`, système de classfication à base d'étiquettes (`Tag`)
+
+* `eclassfolders`, système de classification à base de dossiers hiérarchiques
+  destinés à créer des rubriques de navigation (`Folder`)
+
+* `eemail`, gestion d'archives de courriers électroniques (`Email`, `Emailpart`,
+  `Emailthread`)
+
+* `ebasket`, gestion de paniers (`Basket`) permettant de regrouper des entités
+
+Pour déclarer l'utilisation d'un composant, une fois celui-ci installé, ajoutez
+le nom du composant à la variable `__use__` du fichier `__pkginfo__.py` de
+votre propre composant.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/devmanual_fr/sect_stdlib_vues.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,64 @@
+
+Vues prédéfinies dans la librairie
+----------------------------------
+Un certain nombre de vues sont utilisées pour construire l'interface web, qui
+s'appliquent à une ou plusieurs entités. On les distingue par leur identifiant,
+et les principales sont : 
+
+:primary:
+    vue principale pour une entité, elle est appelée par défaut lorsqu'il n'y a
+    qu'un seul élément correspondant à la recherche. Cette vue est censée
+    afficher le maximum d'informations à propos de l'objet.
+:secondary:
+    vue secondaire d'une entité. Par défaut, Elle affiche les deux premiers
+    attributs de l'entité sous la forme d'un lien cliquable amenant sur la vue
+    primaire.
+:oneline:
+    similaire à la vue `secondary`, mais appelée dans des cas où l'on désire que
+    la vue tient sur une ligne, ou de manière générale juste avoir une vue plus
+    abbrégée. Par défaut, cette vue utilise le paramètre de configuration
+    `MAX_LINE_CHAR` pour contrôler la taille du résultat.
+:text:
+    similaire à la vue `oneline`, mais ne devant pas contenir de html.
+:incontext, outofcontext:
+    similaire à la vue `secondary`, mais appelé si l'entité est considérée comme
+    en dehors ou dans son contexte. Par défault renvoie respectivement le
+    résultat de `textincontext` et `textoutofcontext` entouré par un lien
+    permettant d'accéder à la vue primaire de l'entité
+:textincontext, textoutofcontext:
+    similaire à la vue `text`, mais appelé si l'entité est considérée comme
+    en dehors ou dans son contexte. Par défault renvoie respectivement le
+    résultat des méthodes `.dc_title` et `.dc_long_title` de l'entité
+:list:
+    crée une liste html (<ul>) et appelle la vue `listitem` pour chaque entité
+:listitem:
+    redirige par défaut vers la vue `outofcontext`
+:rss:
+    crée unvue RSS/XML et appelle la vue `rssitem` pour chaque entité
+:rssitem:
+    crée unvue RSS/XML pour une entité à partir des résultats renvoyés par les
+    méthodes dublin core de l'objet (`dc_*`)
+
+Vues de départ :
+
+:index:
+    page d'acceuil
+:schema:
+    affiche le schéma de l'application
+
+Vues particulières :
+
+:noresult:
+    appelé si le result set est vide
+:finall:
+    affiche la valeur de la cellule sans transformation (dans le cas d'une
+    entité non finale, on voit son eid). Appelable sur n'importe quel result
+    set.
+:table:
+    crée une table html (<table>) et appelle la vue `cell` pour chaque cellule
+    du résultat. Appelable sur n'importe quel result set.
+:cell:
+    par défaut redirige sur la vue `final` si c'est une entité finale
+    ou sur la vue `outofcontext` sinon
+:null:
+    vue toujours appelable et ne retournant rien
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/quickstart.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,126 @@
+Introduction
+=============
+
+
+Concepts et vocabulaire
+-----------------------
+
+*schéma*
+  le schéma définit le modèle de données d'une application sous forme
+  d'entités et de relations. C'est l'élément central d'une
+  application.
+
+*result set*
+  objet qui encaspule les résultats d'une requête adressée à l'entrepôt 
+  de données et des informations sur cette requête.
+
+*vue*
+  une vue est une manière de représenter les données d'un `result set`
+  sous forme HTML, CSV, JSON, etc.
+
+
+
+Définition d'une application de Blog
+====================================
+
+La première chose à faire est de copier le squelette depuis le répertoire 
+``lax/skel`` vers un nouveau répertoire qui sera votre application 
+``Google AppEngine``::
+
+  $ cp -r lax/skel myapp
+
+Définition du schéma
+--------------------
+
+Ouvrir le fichier ``myapp/schema.py`` afin de définir le schéma des
+données manipulées. La syntaxe de la définition est la même que celle
+proposée par `Google AppEngine`_ mais il faut remplacer la ligne
+d'import::
+  
+  from google.appengine.ext import db
+
+par celle-ci::
+
+  from cubicweb.goa import db
+
+
+Un exemple de schéma de données pour un ``Blog`` pourrait être::
+
+  from cubicweb.goa import db
+  
+  class Blog(db.Model):
+      # un titre à donner à l'entrée
+      title = db.StringProperty(required=True)
+      # la date à laquelle le blog est créé
+      diem = db.DateProperty(required=True, auto_now_add=True)
+      # le contenu de l'entrée
+      content = db.TextProperty()
+      # une entrée peut en citer une autre
+      cites = db.SelfReferenceProperty() 
+      
+
+Personnalisation des vues
+-------------------------
+
+``LAX`` permet de générer directement, à partir de la définition
+du schéma, des vues de consultation, d'ajout et de modification 
+pour tous les types de donées manipulés. Il est toutefois 
+généralement souhaitable de personnaliser les vues de consultations.
+
+Dans ``LAX``, les vues sont représentées par des classes Python.
+
+Une vue se caractérise par :
+
+- un identifiant (tous les objets dans ``LAX`` sont enregistrés
+  dans un registre et cet identifiant sert de clé pour y retrouver
+  la vue)
+  
+- une description des types de données auxquels elle s'applique
+
+Il existe dans ``LAX`` des vues prédéfinies et utilisées par le moteur
+d'affichage. Pour avoir une liste exhaustive de ces vues prédéfinies,
+vous pouvez consulter cette page. (XXX mettre le lien vers la liste).
+Par exemple, la vue ``primary`` est la vue utilisée pour générer la
+page principale de consultation d'un objet.
+
+Par exemple, si on souhaite modifier la page principale d'une entrée de
+blog, il faut surcharger la vue ``primary`` des objets ``Blog`` dans
+le fichier ``myapp/views.py``::
+  
+  from cubicweb.web.views import baseviews
+  
+  class BlogPrimaryView(baseviews.PrimaryView):
+      accepts = ('Blog',)
+      
+      def cell_call(self, row, col):
+          entity = self.entity(row, col)
+          self.w(u'<h1>%s</h1>' % entity.title)
+          self.w(u'<div>%s</div>' entity.content)
+    
+
+Génération du graphique de schéma
+---------------------------------
+
+Il existe une vue ``schema`` qui permet d'afficher un graphique
+représantant les différents types d'entités définis dans le schéma
+ainsi que les relations entre ces types. Ce graphique doit être généré
+statiquement. Le script à utiliser pour générer ce schéma est 
+dans ``myapp/tools``. Ce script nécessite d'avoir accès aux
+bibliothèques fournies par le SDK de ``Google AppEngine``. Il faut
+donc modifier son PYTHONPATH::
+
+  $ export PYTHONPATH=GAE_ROOT/google:GAE_ROOT/lib/yaml
+  $ python tools/generate_schema_img.py 
+
+
+Génération des fichiers de traduction
+-------------------------------------
+
+Des catalogues de traduction se trouvent dans `myapp/i18n`. Il faut
+pour l'instant les mettre à jour à la main (et/ou avec les outils
+``GNU`` comme ``xgettext``) et ensuite les compiler grâce au script
+``myapp/tools/i18ncompile.py``::
+
+  $ python tools/i18ncompile.py
+
+.. _`Google AppEngine` :: http://code.google.com/appengine/docs/datastore/overview.html
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/tutorial-wine.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,158 @@
+.. -*- coding: utf-8 -*-
+
+=============
+LAX Tutorial
+=============
+
+Introduction
+------------
+
+LAX is a web framework on top of the Google AppEngine datastore.
+
+
+features: schema/data-model at core of app, selection/view mechanism,
+reuseable components, very fast development
+
+
+Since we are french, let us develop an example application that deals
+with wine and will allow any wine enthusiast to track the content of
+its cellar and share his tasting experiences.
+
+Schema
+------
+
+With LAX, the core of the application is the schema/datamodel.
+
+laxctl newapp ? XXX
+
+We will start by something simple and define three entities: WineMaker,
+Wine and Bottle.
+
+::
+
+  class WineMaker(EntityType):
+      name = String(maxsize=50, required=True)
+
+  class Wine(EntityType):
+      name = String(required=True, maxsize=100, fulltextindexed=True)
+      vintage = Int(required=True, constraints=[IntervalBoundConstraint(1850,2100)])
+      grown_by = SubjectRelation('WineMaker', cardinality='?*',
+                                 description=_('Winemaker who grew the wine'))
+
+  class Bottle(EntityType):
+      buy_date = Date(description=_('Date when the bottle was bought.'),
+                      default='TODAY')
+      bottle_of = SubjectRelation('Wine', cardinality='?*')
+
+A WineMaker only has a name which is a string that is required and
+must be less than 50 characters.
+
+A Wine has a name, which is a string that is required, must be less
+than 100 characters and will be indexed in the full-text index XXX
+fulltextindex marche pas encore. A Wine
+also has a vintage year which is an integer that is required and must
+be between 1850 and 2100. A Wine also has a relationship ``grown_by``
+that link it to a WineMaker. Cardinality ``?*`` means that a Wine can
+have zero or one WineMaker (``?`` means `zero or one`) and that a
+WineMaker can have any number of Wine entities (``*`` means `any number
+including zero`).
+
+A Bottle has a buy_date attribute, which is a date with a default
+value of TODAY, meaning that when a new bottle is created, it will
+have its creation date as buy_date unless the user changes it to some
+other date. A Bottle also has a relationship ``bottle_of`` that link
+it to a Wine. The cardinality of that relationship implies that a
+Bottle can be linked to zero or one Wine and that a Wine can by linked
+to any number of Bottle entities.
+
+
+Defining this simple schema is enough to get us started, launch the
+application with the command::
+
+   laxctl start Winopedia
+
+and point your browser at localhost:8080
+
+You will see the home page of your application. It lists the entity
+types: WineMaker, Wine, Bottle.
+
+Let us create a few of these. Click on the [+] at the right of the
+link WineMaker. Call this new WineMaker ``Domaine du château`` and
+validate the form by clicking on ``button_ok``. 
+
+Click on the logo at top left to get back to the home page, then
+follow the WineMaker link. You should be seeing a list with a single
+item ``Domaine du château``. Clicking on this item will get you to 
+its detailed description except that in this case, there is not much
+to display besides the name.
+
+Now get back to the home page by clicking on the top-left logo, then
+create a new WineMaker called ``Vallon de la Dame`` and get back to the
+home page again to follow the WineMaker link for the second time. The
+list now has two items.
+
+Get back to the home page and click on [+] at the right of the link
+Wine. Call this new wine ``Cuvée du Roi`` and enter 2008 as vintage,
+then click on ``button_ok``. You added a new wine without saying who
+made it. There is a box on the left entitled "actions", click on the
+menu item `modify`. You are back to the form to edit the wine entity
+you just created, except that the form now has another section with a
+combobox titled "add a relationship". Chose "grown_by" in this
+menu and a second combobox appears where you pick ``Domaine du
+château``. Validate the changes by clicking  ``button_ok``. The entity
+Wine that is displayed now includes a link to the entity WineMaker
+named ``Domaine du château``.
+
+Exercise
+~~~~~~~~
+
+Create new entities Wine and Bottle.
+
+What we learned
+~~~~~~~~~~~~~~~
+
+Creating a simple schema was enough to set up a new application that
+can store WineMaker, Wine, Bottle. 
+
+What is next ?
+--------------
+
+Althought the application is fully functionnal, its look is very
+basic. We will now improve how information is displayed by writing
+views.
+
+
+Views
+======
+
+...
+
+Defining views with selection/views
+
+implementing interfaces, calendar for bottles bought and for tasting.
+calendar with export icalput attribute drink_date on bottle 
+
+add attribute wine color
+
+create view "bottle table" with color, buy_date, drink_date.
+
+in view wine, select Wine.bottles and apply view "bottle table"
+
+demo ajax with filter on bottle table
+
+Components
+===========
+
+...
+
+
+
+customize MainTemplate
+
+rss channel of new bottles or wines
+
+use URLRewriting for nice urls
+
+talk about security access rights
+
+talk about rql
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/doc/tutorial.en.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,227 @@
+.. -*- coding: utf-8 -*-
+
+=============
+LAX Tutorial
+=============
+
+Introduction
+============
+
+LAX stands for Logilab App engine eXtension. It is a web framework
+running on top of the Google AppEngine datastore.
+
+Distinctive features include a data-model driven engine, a query
+language, a selection/view mechanism for HTML/XML/text generation,
+reuseable components, etc. It all sums up to very fast and efficient
+development.
+
+This tutorial will guide you to build a blog application step by step
+to discover the unique features of LAX. It assumes that you followed
+the installation guidelines and that both the AppEngine SDK and the
+LAX framework are setup on your computer.
+
+Creating a very simple application
+==================================
+
+Creating a new application
+--------------------------
+
+When you installed lax, you saw a directory named skel. Make a copy of
+this directory and call it BlogDemo.
+
+Defining a schema
+-----------------
+
+With LAX, the schema/datamodel is the core of the application.
+
+Let us start with something simple and improve on it later. First, we
+make sure that in appconfig.py we have a line ::
+
+  schema_type = 'yams'
+
+Then, in schema.py, we define two entities : ``Blog`` and ``BlogEntry``.
+
+::
+				   
+  class Blog(EntityType):
+      title = String(maxsize=50, required=True)
+      description = String()
+
+  class BlogEntry(EntityType):
+      title = String(maxsize=100, required=True)
+      publish_date = Date(default='TODAY')
+      text = String(fulltextindexed=True)
+      category = String(vocabulary=('important','business'))
+      entry_of = SubjectRelation('Blog', cardinality='?*')
+
+A Blog has a title and a description. The title is a string that is
+required and must be less than 50 characters. The description is a
+string that is not constrained.
+
+A BlogEntry has a title, a publish_date and a text. The title is a
+string that is required and must be less than 100 characters. The
+publish_date is a Date with a default value of TODAY, meaning that
+when a BlogEntry is created, its publish_date will be the current day
+unless it is modified. The text is a string that will be indexed in
+the full-text index and has no constraint.
+
+A BlogEntry also has a relationship ``entry_of`` that link it to a
+Blog. The cardinality ``?*`` means that a BlogEntry can be part of
+zero or one Blog (``?`` means `zero or one`) and that a Blog can
+have any number of BlogEntry (``*`` means `any number including
+zero`). For completeness, remember that ``+`` means `one or more`.
+
+:note: in lax-0.3.0, cardinality checking is not fully ported to
+AppEngine, so cardinality limits are not enforced. This should be
+fixed in lax-0.4.0 available at the beginning of June.
+
+Using the application
+---------------------
+
+Defining this simple schema is enough to get us started. Launch the
+application with the command::
+
+   python dev_appserver.py BlogDemo
+
+and point your browser at localhost:8080
+
+You will see the home page of your application. It lists the entity
+types: Blog and BlogEntry.
+
+Let us create a few of these. Click on the [+] at the right of the
+link Blog. Call this new Blog ``Tech-blog`` and type in
+``everything about technology`` as the description, then validate the
+form by clicking on ``button_ok``.
+
+Click on the logo at top left to get back to the home page, then
+follow the Blog link. If this link reads ``blog_plural`` it is because
+i18n is not working for you yet. Let us ignore this for a while. After
+following the link, you should be seeing a list with a single item
+``Tech-blog``. Clicking on this item will get you to its detailed
+description except that in this case, there is not much to display
+besides the name and the phrase ``everything about technology``.
+
+Now get back to the home page by clicking on the top-left logo, then
+create a new Blog called ``MyLife`` and get back to the home page
+again to follow the Blog link for the second time. The list now
+has two items.
+
+Get back to the home page and click on [+] at the right of the link
+BlogEntry. Call this new entry ``Hello World`` and type in some text
+before clicking on ``button_ok``. You added a new blog entry without
+saying to what blog it belongs. There is a box on the left entitled
+``actions``, click on the menu item ``modify``. You are back to the form
+to edit the blog entry you just created, except that the form now has
+another section with a combobox titled ``add relation``. Chose
+``entry_of`` in this menu and a second combobox appears where you pick
+``MyLife``. Validate the changes by clicking
+``button_ok``. The entity BlogEntry that is displayed now includes a link
+to the entity Blog named ``MyLife``.
+
+Conclusion
+----------
+
+Exercise
+~~~~~~~~
+
+Create new blog entries in ``Tech-blog``.
+
+What we learned
+~~~~~~~~~~~~~~~
+
+Creating a simple schema was enough to set up a new application that
+can store blogs and blog entries. 
+
+What is next ?
+--------------
+
+Althought the application is fully functionnal, its look is very
+basic. We will now improve how information is displayed by writing
+views.
+
+
+Developing the user interface with Views
+========================================
+
+[WRITE ME]
+
+* Defining views with selection/views
+
+* implementing interfaces, calendar for blog entries.
+
+* show that a calendar view can export data to ical. 
+
+* create view "blogentry table" with title, publish_date, category.
+
+* in view blog, select blogentries and apply view "blogentry table"
+
+* demo ajax by filtering blogentry table on category
+
+Components
+===========
+
+[WRITE ME]
+
+* explain the component architecture
+
+* add comments to the blog by importing the comments component
+
+Boxes
+======
+
+[WRITE ME]
+
+* explain how to build a box
+
+* add an blogentry archives box
+
+Preferences
+============
+
+[WRITE ME]
+
+* talk about the user preferences
+
+* add an example on how to hide / display / move a component or a box
+
+MainTemplate
+============
+
+[WRITE ME]
+
+* customize MainTemplate and show that everything in the user
+  interface can be changed
+
+
+RSS Channel
+===========
+
+[WRITE ME]
+
+* show that the RSS view can be used to display an ordered selection
+  of blog entries, thus providing a RSS channel
+
+* show that a different selection (by category) means a different channel
+
+RQL
+====
+
+[WRITE ME]
+
+* talk about the Relation Query Language
+
+URL Rewriting
+=============
+
+[WRITE ME]
+
+* show how urls are mapped to selections and views and explain URLRewriting 
+
+Security
+=========
+
+[WRITE ME]
+
+* talk about security access rights and show that security is defined
+  using RQL
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/gaesource.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,320 @@
+"""Adapter for google appengine source.
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached, clear_cache
+
+from cubicweb import AuthenticationError, UnknownEid, server
+from cubicweb.server.sources import AbstractSource, ConnectionWrapper
+from cubicweb.server.pool import SingleOperation
+from cubicweb.server.utils import crypt_password
+from cubicweb.goa.dbinit import set_user_groups
+from cubicweb.goa.rqlinterpreter import RQLInterpreter
+
+from google.appengine.api.datastore import Key, Entity, Get, Put, Delete
+from google.appengine.api.datastore import Query
+from google.appengine.api import datastore_errors, users
+    
+def _init_groups(guser, euser):
+    # set default groups
+    if guser is None:
+        groups = ['guests']
+    else:
+        groups = ['users']
+        if users.is_current_user_admin():
+            groups.append('managers')
+    set_user_groups(euser, groups)
+
+def _clear_related_cache(session, gaesubject, rtype, gaeobject):
+    subject, object = str(gaesubject.key()), str(gaeobject.key())
+    for eid, role in ((subject, 'subject'), (object, 'object')):
+        # clear related cache if necessary
+        try:
+            entity = session.entity_cache(eid)
+        except KeyError:
+            pass
+        else:
+            entity.clear_related_cache(rtype, role)
+    if gaesubject.kind() == 'EUser':
+        for asession in session.repo._sessions.itervalues():
+            if asession.user.eid == subject:
+                asession.user.clear_related_cache(rtype, 'subject')
+    if gaeobject.kind() == 'EUser':
+        for asession in session.repo._sessions.itervalues():
+            if asession.user.eid == object:
+                asession.user.clear_related_cache(rtype, 'object')
+
+def _mark_modified(session, gaeentity):
+    modified = session.query_data('modifiedentities', {}, setdefault=True)
+    modified[str(gaeentity.key())] = gaeentity
+    DatastorePutOp(session)
+
+def _rinfo(session, subject, rtype, object):
+    gaesubj = session.datastore_get(subject)
+    gaeobj = session.datastore_get(object)
+    rschema = session.vreg.schema.rschema(rtype)
+    cards = rschema.rproperty(gaesubj.kind(), gaeobj.kind(), 'cardinality')
+    return gaesubj, gaeobj, cards
+
+def _radd(session, gaeentity, targetkey, relation, card):
+    if card in '?1':
+        gaeentity[relation] = targetkey
+    else:
+        try:
+            related = gaeentity[relation]
+        except KeyError:
+            related = []
+        else:
+            if related is None:
+                related = []
+        related.append(targetkey)
+        gaeentity[relation] = related
+    _mark_modified(session, gaeentity)
+
+def _rdel(session, gaeentity, targetkey, relation, card):
+    if card in '?1':
+        gaeentity[relation] = None
+    else:
+        related = gaeentity[relation]
+        if related is not None:
+            related = [key for key in related if not key == targetkey]
+            gaeentity[relation] = related or None
+    _mark_modified(session, gaeentity)
+
+    
+class DatastorePutOp(SingleOperation):
+    """delayed put of entities to have less datastore write api calls
+
+    * save all modified entities at precommit (should be the first operation
+      processed, hence the 0 returned by insert_index())
+      
+    * in case others precommit operations modify some entities, resave modified
+      entities at commit. This suppose that no db changes will occurs during
+      commit event but it should be the case.
+    """
+    def insert_index(self):
+        return 0
+
+    def _put_entities(self):
+        pending = self.session.query_data('pendingeids', ())
+        modified = self.session.query_data('modifiedentities', {})
+        for eid, gaeentity in modified.iteritems():
+            assert not eid in pending
+            Put(gaeentity)
+        modified.clear()
+        
+    def commit_event(self):
+        self._put_entities()
+        
+    def precommit_event(self):
+        self._put_entities()
+
+
+class GAESource(AbstractSource):
+    """adapter for a system source on top of google appengine datastore"""
+
+    passwd_rql = "Any P WHERE X is EUser, X login %(login)s, X upassword P"
+    auth_rql = "Any X WHERE X is EUser, X login %(login)s, X upassword %(pwd)s"
+    _sols = ({'X': 'EUser', 'P': 'Password'},)
+    
+    options = ()
+    
+    def __init__(self, repo, appschema, source_config, *args, **kwargs):
+        AbstractSource.__init__(self, repo, appschema, source_config,
+                                *args, **kwargs)
+        if repo.config['use-google-auth']:
+            self.info('using google authentication service')
+            self.authenticate = self.authenticate_gauth
+        else:
+            self.authenticate = self.authenticate_local
+            
+    def reset_caches(self):
+        """method called during test to reset potential source caches"""
+        pass
+    
+    def init_creating(self):
+        pass
+
+    def init(self):
+        # XXX unregister unsupported hooks
+        from cubicweb.server.hooks import sync_owner_after_add_composite_relation
+        self.repo.hm.unregister_hook(sync_owner_after_add_composite_relation,
+                                     'after_add_relation', '')
+
+    def get_connection(self):
+        return ConnectionWrapper()
+    
+    # ISource interface #######################################################
+
+    def compile_rql(self, rql):
+        rqlst = self.repo.querier._rqlhelper.parse(rql)
+        rqlst.restricted_vars = ()
+        rqlst.children[0].solutions = self._sols
+        return rqlst
+    
+    def set_schema(self, schema):
+        """set the application'schema"""
+        self.interpreter = RQLInterpreter(schema)
+        self.schema = schema
+        if 'EUser' in schema and not self.repo.config['use-google-auth']:
+            # rql syntax trees used to authenticate users
+            self._passwd_rqlst = self.compile_rql(self.passwd_rql)
+            self._auth_rqlst = self.compile_rql(self.auth_rql)
+                
+    def support_entity(self, etype, write=False):
+        """return true if the given entity's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+        """
+        return True
+    
+    def support_relation(self, rtype, write=False):
+        """return true if the given relation's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+        """
+        return True
+
+    def authenticate_gauth(self, session, login, password):
+        guser = users.get_current_user()
+        # allowing or not anonymous connection should be done in the app.yaml
+        # file, suppose it's authorized if we are there
+        if guser is None:
+            login = u'anonymous'
+        else:
+            login = unicode(guser.nickname())
+        # XXX http://code.google.com/appengine/docs/users/userobjects.html
+        # use a reference property to automatically work with email address
+        # changes after the propagation feature is implemented
+        key = Key.from_path('EUser', 'key_' + login, parent=None)
+        try:
+            euser = session.datastore_get(key)
+            # XXX fix user. Required until we find a better way to fix broken records
+            if not euser.get('s_in_group'):
+                _init_groups(guser, euser)
+                Put(euser)
+            return str(key)
+        except datastore_errors.EntityNotFoundError:
+            # create a record for this user
+            euser = Entity('EUser', name='key_' + login)
+            euser['s_login'] = login
+            _init_groups(guser, euser)
+            Put(euser)
+            return str(euser.key())
+        
+    def authenticate_local(self, session, login, password):
+        """return EUser eid for the given login/password if this account is
+        defined in this source, else raise `AuthenticationError`
+
+        two queries are needed since passwords are stored crypted, so we have
+        to fetch the salt first
+        """
+        args = {'login': login, 'pwd' : password}
+        if password is not None:
+            rset = self.syntax_tree_search(session, self._passwd_rqlst, args)
+            try:
+                pwd = rset[0][0]
+            except IndexError:
+                raise AuthenticationError('bad login')
+            # passwords are stored using the bytea type, so we get a StringIO
+            if pwd is not None:
+                args['pwd'] = crypt_password(password, pwd[:2])
+        # get eid from login and (crypted) password
+        rset = self.syntax_tree_search(session, self._auth_rqlst, args)
+        try:
+            return rset[0][0]
+        except IndexError:
+            raise AuthenticationError('bad password')
+    
+    def syntax_tree_search(self, session, union, args=None, cachekey=None, 
+                           varmap=None):
+        """return result from this source for a rql query (actually from a rql
+        syntax tree and a solution dictionary mapping each used variable to a
+        possible type). If cachekey is given, the query necessary to fetch the
+        results (but not the results themselves) may be cached using this key.
+        """
+        results, description = self.interpreter.interpret(union, args,
+                                                          session.datastore_get)
+        return results # XXX description
+                
+    def flying_insert(self, table, session, union, args=None, varmap=None):
+        raise NotImplementedError
+    
+    def add_entity(self, session, entity):
+        """add a new entity to the source"""
+        # do not delay add_entity as other modifications, new created entity
+        # needs an eid
+        entity.put()
+        
+    def update_entity(self, session, entity):
+        """replace an entity in the source"""
+        gaeentity = entity.to_gae_model()
+        _mark_modified(session, entity.to_gae_model())
+        if gaeentity.kind() == 'EUser':
+            for asession in self.repo._sessions.itervalues():
+                if asession.user.eid == entity.eid:
+                    asession.user.update(dict(gaeentity))
+                
+    def delete_entity(self, session, etype, eid):
+        """delete an entity from the source"""
+        # do not delay delete_entity as other modifications to ensure
+        # consistency
+        key = Key(eid)
+        Delete(key)
+        session.clear_datastore_cache(key)
+        session.drop_entity_cache(eid)
+        session.query_data('modifiedentities', {}).pop(eid, None)
+
+    def add_relation(self, session, subject, rtype, object):
+        """add a relation to the source"""
+        gaesubj, gaeobj, cards = _rinfo(session, subject, rtype, object)
+        _radd(session, gaesubj, gaeobj.key(), 's_' + rtype, cards[0])
+        _radd(session, gaeobj, gaesubj.key(), 'o_' + rtype, cards[1])
+        _clear_related_cache(session, gaesubj, rtype, gaeobj)
+            
+    def delete_relation(self, session, subject, rtype, object):
+        """delete a relation from the source"""
+        gaesubj, gaeobj, cards = _rinfo(session, subject, rtype, object)
+        pending = session.query_data('pendingeids', set(), setdefault=True)
+        if not subject in pending:
+            _rdel(session, gaesubj, gaeobj.key(), 's_' + rtype, cards[0])
+        if not object in pending:
+            _rdel(session, gaeobj, gaesubj.key(), 'o_' + rtype, cards[1])
+        _clear_related_cache(session, gaesubj, rtype, gaeobj)
+        
+    # system source interface #################################################
+
+    def eid_type_source(self, session, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        try:
+            key = Key(eid)
+        except datastore_errors.BadKeyError:
+            raise UnknownEid(eid)
+        return key.kind(), 'system', None
+    
+    def create_eid(self, session):
+        return None # let the datastore generating key
+
+    def add_info(self, session, entity, source, extid=None):
+        """add type and source info for an eid into the system table"""
+        pass
+
+    def delete_info(self, session, eid, etype, uri, extid):
+        """delete system information on deletion of an entity by transfering
+        record from the entities table to the deleted_entities table
+        """
+        pass
+        
+    def fti_unindex_entity(self, session, eid):
+        """remove text content for entity with the given eid from the full text
+        index
+        """
+        pass
+        
+    def fti_index_entity(self, session, entity):
+        """add text content of a created/modified entity to the full text index
+        """
+        pass
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/goaconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,164 @@
+"""google appengine configuration
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+from os.path import join
+
+from cubicweb import CW_SOFTWARE_ROOT
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.web.webconfig import WebConfiguration, merge_options, Method
+from cubicweb.server.serverconfig import ServerConfiguration
+from cubicweb.goa.dbmyams import load_schema
+
+UNSUPPORTED_OPTIONS = set(('connections-pool-size',
+                           'pyro-port', 'pyro-id', 'pyro-application-id',
+                           'pyro-ns-host', 'pyro-ns-port', 'pyro-ns-group',
+                           'https-url', 'host', 'pid-file', 'uid', 'base-url', 'log-file',
+                           'smtp-host', 'smtp-port',
+                           'embed-allowed',
+                           ))
+
+# XXX fix:
+# * default sender-name / sender-addr value
+# * what about *session-time
+# * check auth-mode=http + fix doc (eg require use-google-auth = False)
+
+class GAEConfiguration(ServerConfiguration, WebConfiguration):
+    """repository and web application in the same twisted process"""
+    name = 'app'
+    repo_method = 'inmemory'
+    options = merge_options(( 
+        ('included-cubes',
+         {'type' : 'csv',
+          'default': [],
+          'help': 'list of db model based cubes used by the application.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('included-yams-cubes',
+         {'type' : 'csv',
+          'default': [],
+          'help': 'list of yams based cubes used by the application.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('use-google-auth',
+         {'type' : 'yn',
+          'default': True,
+          'help': 'does this application rely on google authentication service or not.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('schema-type',
+         {'type' : 'choice', 'choices': ('yams', 'dbmodel'),
+          'default': 'yams',
+          'help': 'does this application is defining its schema using yams or db model.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        # overriden options
+        ('query-log-file',
+         {'type' : 'string',
+          'default': None,
+          'help': 'web application query log file: DON\'T SET A VALUE HERE WHEN '
+          'UPLOADING YOUR APPLICATION. This should only be used to analyse '
+          'queries issued by your application in the development environment.',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('anonymous-user',
+         {'type' : 'string',
+          'default': None,
+          'help': 'login of the CubicWeb user account to use for anonymous user '
+          '(if you want to allow anonymous). This option will be ignored if '
+          'use-google-auth option is set (in which case you should control '
+          'anonymous access using the app.yaml file)',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        
+        ) + WebConfiguration.options + ServerConfiguration.options)
+    options = [(optname, optdict) for optname, optdict in options
+               if not optname in UNSUPPORTED_OPTIONS]
+
+    cubicweb_vobject_path = WebConfiguration.cubicweb_vobject_path | ServerConfiguration.cubicweb_vobject_path
+    cubicweb_vobject_path = list(cubicweb_vobject_path) + ['goa/appobjects']
+    cube_vobject_path = WebConfiguration.cube_vobject_path | ServerConfiguration.cube_vobject_path
+
+    # use file system schema
+    bootstrap_schema = read_application_schema = False
+    # schema is not persistent, don't load schema hooks (unavailable)
+    schema_hooks = False
+    # no user workflow for now
+    consider_user_state = False
+
+    # deactivate some hooks during [pre|post]create scripts execution
+    # (unique values check, owned_by/created_by relations setup)
+    free_wheel = True
+    
+    if not os.environ.get('APYCOT_ROOT'):
+        CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
+    
+    def __init__(self, appid, apphome=None):
+        if apphome is None:
+            apphome = 'data'
+        self._apphome = apphome
+        self._base_url = None
+        CubicWebConfiguration.__init__(self, appid)
+
+    def __getitem__(self, key):
+        if key == 'connections-pool-size':
+            return 4 # > 1 to allow multiple user sessions in tests
+        if key == 'base-url':
+            return self._base_url
+        return super(GAEConfiguration, self).__getitem__(key)
+    
+    # overriden from cubicweb base configuration
+
+    @property
+    def apphome(self):
+        return self._apphome
+
+    def cubes(self):
+        """return the list of top level cubes used by this instance (eg
+        without dependencies)
+        """
+        if self._cubes is None:
+            cubes = self['included-cubes'] + self['included-yams-cubes']
+            cubes = self.expand_cubes(cubes)
+            return self.reorder_cubes(cubes)
+        return self._cubes
+
+    def vc_config(self):
+        """return CubicWeb's engine and application's cube versions number"""
+        return {}
+
+    # overriden from cubicweb web configuration
+
+    def instance_md5_version(self):
+        return ''
+    
+    def _init_base_url(self):
+        pass
+    
+    # overriden from cubicweb server configuration
+    
+    def sources(self):
+        return {'system': {'adapter': 'gae'}}
+    
+    def load_schema(self, schemaclasses=None, extrahook=None):
+        try:
+            return self._schema
+        except AttributeError:
+            self._schema = load_schema(self, schemaclasses, extrahook)
+            return self._schema
+
+    # goa specific
+    def repo_session(self, sessionid):
+        return self.repository()._sessions[sessionid]
+    
+    def is_anonymous_user(self, login):
+        if self['use-google-auth']:
+            from google.appengine.api import users
+            return users.get_current_user() is None
+        else:
+            return login == self.anonymous_user()[0]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/goactl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,229 @@
+"""cubicweb on appengine plugins for cubicweb-ctl
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os.path import exists, join, split, dirname, basename, normpath, abspath
+
+from cubicweb import BadCommandUsage
+from cubicweb import CW_SOFTWARE_ROOT
+from cubicweb.toolsutils import (Command, register_commands, copy_skeleton,
+                              create_dir, create_symlink, create_copy)
+from cubicweb.cwconfig import CubicWebConfiguration
+
+from logilab import common as lgc
+from logilab.common.textutils import get_csv
+from logilab import constraint as lgcstr
+from logilab import mtconverter as lgmtc
+import rql, yams, yapps, simplejson, dateutil, vobject, docutils, roman
+
+SLINK_DIRECTORIES = (
+    (lgc.__path__[0], 'logilab/common'),
+    (lgmtc.__path__[0], 'logilab/mtconverter'),
+    (lgcstr.__path__[0], 'logilab/constraint'),
+    (rql.__path__[0], 'rql'),
+    (simplejson.__path__[0], 'simplejson'),
+    (yams.__path__[0], 'yams'),
+    (yapps.__path__[0], 'yapps'),
+    (dateutil.__path__[0], 'dateutil'),
+    (vobject.__path__[0], 'vobject'),
+    (docutils.__path__[0], 'docutils'),
+    (roman.__file__.replace('.pyc', '.py'), 'roman.py'),
+
+    (join(CW_SOFTWARE_ROOT, 'embedded', 'mx'), 'mx'),
+    ('/usr/share/fckeditor/', 'fckeditor'),
+
+    (join(CW_SOFTWARE_ROOT, 'web', 'data'), join('cubes', 'shared', 'data')), 
+    (join(CW_SOFTWARE_ROOT, 'web', 'wdoc'), join('cubes', 'shared', 'wdoc')),
+    (join(CW_SOFTWARE_ROOT, 'i18n'), join('cubes', 'shared', 'i18n')),
+    (join(CW_SOFTWARE_ROOT, 'goa', 'tools'), 'tools'),
+    (join(CW_SOFTWARE_ROOT, 'goa', 'bin'), 'bin'),
+    )
+
+COPY_CW_FILES = (
+    '__init__.py',
+    '__pkginfo__.py',
+    '_exceptions.py',
+    'dbapi.py',
+    'cwvreg.py',
+    'cwconfig.py',
+    'interfaces.py',
+    'rset.py',
+    'schema.py',
+    'schemaviewer.py',
+    'vregistry.py',
+    
+    'common/appobject.py',
+    'common/entity.py',
+    'common/html4zope.py',
+    'common/mail.py',
+    'common/migration.py',
+    'common/mixins.py',
+    'common/mttransforms.py',
+    'common/registerers.py',
+    'common/rest.py',
+    'common/selectors.py',
+    'common/view.py',
+    'common/uilib.py',
+    'common/utils.py',
+
+    'server/hookhelper.py',
+    'server/hooksmanager.py',
+    'server/hooks.py',
+    'server/migractions.py',
+    'server/pool.py',
+    'server/querier.py',
+    'server/repository.py',
+    'server/rqlrewrite.py',
+    'server/securityhooks.py',
+    'server/session.py',
+    'server/serverconfig.py',
+    'server/ssplanner.py',
+    'server/utils.py',
+    'server/sources/__init__.py',
+
+    'entities/__init__.py',
+    'entities/authobjs.py',
+    'entities/lib.py',
+    'entities/schemaobjs.py',
+
+    'sobjects/__init__.py',
+    'sobjects/notification.py',
+    
+# XXX would be necessary for goa.testlib but require more stuff to be added
+#     such as server.serverconfig and so on (check devtools.__init__)
+#    'devtools/__init__.py',
+#    'devtools/fake.py',
+    
+    'web/__init__.py',
+    'web/_exceptions.py',
+    'web/action.py',
+    'web/application.py',
+    'web/box.py',
+    'web/component.py',
+    'web/controller.py',
+    'web/form.py',
+    'web/htmlwidgets.py',
+    'web/httpcache.py',
+    'web/request.py',
+    'web/webconfig.py',
+    'web/widgets.py',
+
+    'web/views/__init__.py',
+    'web/views/actions.py',
+    'web/views/basecomponents.py',
+    'web/views/basecontrollers.py',
+    'web/views/baseforms.py',
+    'web/views/basetemplates.py',
+    'web/views/baseviews.py',
+    'web/views/boxes.py',
+    'web/views/calendar.py',
+    'web/views/error.py',
+    'web/views/editcontroller.py',
+    'web/views/ibreadcrumbs.py',
+    'web/views/idownloadable.py',
+    'web/views/magicsearch.py',
+    'web/views/management.py',
+    'web/views/navigation.py',
+    'web/views/startup.py',
+    'web/views/wdoc.py',
+    'web/views/urlpublishing.py',
+    'web/views/urlrewrite.py',
+    'web/views/xbel.py',
+
+    'wsgi/__init__.py',
+    'wsgi/handler.py',
+    'wsgi/request.py',
+    
+    'goa/__init__.py',
+    'goa/db.py',
+    'goa/dbinit.py',
+    'goa/dbmyams.py',
+    'goa/goaconfig.py',
+    'goa/goavreg.py',
+    'goa/gaesource.py',
+    'goa/rqlinterpreter.py',
+    'goa/appobjects/__init__.py',
+    'goa/appobjects/components.py', 
+    'goa/appobjects/dbmgmt.py', 
+    'goa/appobjects/gauthservice.py', 
+    'goa/appobjects/sessions.py',
+
+    'schemas/bootstrap.py',
+    'schemas/base.py',
+    )
+
+OVERRIDEN_FILES = (
+    ('toolsutils.py', 'toolsutils.py'),
+    ('mttransforms.py', 'common/mttransforms.py'),
+    ('server__init__.py', 'server/__init__.py'),
+    ('rqlannotation.py', 'server/rqlannotation.py'),
+    )
+
+
+def create_init_file(pkgdir, pkgname):
+    open(join(pkgdir, '__init__.py'), 'w').write('"""%s pkg"""' % pkgname)
+
+
+class NewGoogleAppCommand(Command):
+    """Create a new google appengine application.
+
+    <application directory>
+      the path to the appengine application directory
+    """
+    name = 'newgapp'
+    arguments = '<application directory>'
+    
+    def run(self, args):
+        if len(args) != 1:
+            raise BadCommandUsage("exactly one argument is expected")
+        appldir, = args
+        appldir = normpath(abspath(appldir))
+        appid = basename(appldir)
+        context = {'appname': appid}
+        # goa application'skeleton
+        copy_skeleton(join(CW_SOFTWARE_ROOT, 'goa', 'skel'),
+                      appldir, context, askconfirm=True)
+        # cubicweb core dependancies
+        for directory, subdirectory in SLINK_DIRECTORIES:
+            subdirectory = join(appldir, subdirectory)
+            if not exists(split(subdirectory)[0]):
+                create_dir(split(subdirectory)[0])
+            create_symlink(directory, join(appldir, subdirectory))
+        create_init_file(join(appldir, 'logilab'), 'logilab')
+        # copy supported part of cubicweb 
+        create_dir(join(appldir, 'cubicweb'))
+        for fpath in COPY_CW_FILES:
+            target = join(appldir, 'cubicweb', fpath)
+            if not exists(split(target)[0]):
+                create_dir(split(target)[0])
+            create_symlink(join(CW_SOFTWARE_ROOT, fpath), target)
+        # overriden files
+        create_init_file(join(appldir, 'cubicweb/common'), 'cubicweb.common')
+        for fpath, subfpath in OVERRIDEN_FILES:
+            create_symlink(join(CW_SOFTWARE_ROOT, 'goa', 'overrides', fpath),
+                           join(appldir, 'cubicweb', subfpath))
+        # link every supported components
+        cubesdir = join(appldir, 'cubes')
+        cubesdir = CubicWebConfiguration.cubes_dir()
+        for include in ('eaddressbook','ebasket', 'eblog','eclassfolders',
+                        'eclasstags', 'ecomment', 'efile', 'elink',
+                        'emailinglist', 'eperson', 'etask', 'ezone',
+                        ):
+            create_symlink(join(cubesdir, include), join(cubesdir, include))
+        # generate sample config
+        from cubicweb.goa.goaconfig import GAEConfiguration
+        from cubicweb.common.migration import MigrationHelper
+        config = GAEConfiguration(appid, appldir)
+        if exists(config.main_config_file()):
+            mih = MigrationHelper(config)
+            mih.rewrite_configuration()
+        else:
+            config.save()
+
+
+register_commands((NewGoogleAppCommand,
+                   ))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/goavreg.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,75 @@
+"""goa specific registry
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from os import listdir
+from os.path import join, isdir
+
+from cubicweb import CW_SOFTWARE_ROOT
+from cubicweb.cwvreg import CubicWebRegistry
+
+
+def _pkg_name(cube, module):
+    if cube is None:
+        return module
+    return '%s.%s' % (cube, module)
+
+class GAERegistry(CubicWebRegistry):
+    
+    def set_schema(self, schema):
+        """disable reload hooks of cubicweb registry set_schema method"""
+        self.schema = schema
+
+    def load(self, applroot):
+        from cubicweb.goa import db
+        self.load_module(db) # AnyEntity class
+        # explicit loading, we don't want to load __init__.py
+        self.load_directory(join(CW_SOFTWARE_ROOT, 'entities'),
+                            'cubicweb.entities', skip=('__init__.py',))
+        self.load_directory(join(CW_SOFTWARE_ROOT, 'web', 'views'),
+                            'cubicweb.web.views')
+        self.load_directory(join(CW_SOFTWARE_ROOT, 'goa', 'appobjects'),
+                            'cubicweb.goa.appobjects')
+        for cube in reversed(self.config.cubes()):
+            self.load_cube(cube)
+        self.load_application(applroot)
+        
+    def load_directory(self, directory, cube, skip=()):
+        for filename in listdir(directory):
+            if filename[-3:] == '.py' and not filename in skip:
+                self._import('%s.%s' % (cube, filename[:-3]))
+
+    def load_cube(self, cube):
+        self._auto_load(self.config.cube_dir(cube),
+                        cube in self.config['included-cubes'],
+                        cube)
+
+    def load_application(self, applroot):
+        self._auto_load(applroot, self.config['schema-type'] == 'dbmodel')
+
+    def _import(self, modname):
+        obj = __import__(modname)
+        for attr in modname.split('.')[1:]:
+            obj = getattr(obj, attr)
+        self.load_module(obj)
+
+    def _auto_load(self, path, loadschema, cube=None):
+        vobjpath = self.config.cube_vobject_path
+        for filename in listdir(path):
+            if filename[-3:] == '.py' and filename[:-3] in vobjpath:
+                self._import(_pkg_name(cube, filename[:-3]))
+            else:
+                abspath = join(path, filename)
+                if isdir(abspath) and filename in vobjpath:
+                    self.load_directory(abspath, _pkg_name(cube, filename))
+        if loadschema:
+            # when using db.Model defined schema, the defined class is used as
+            # entity class as well and so have to be registered
+            self._import(_pkg_name(cube, 'schema'))
+
+
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+# server.__init__
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/mttransforms.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+"""mime type transformation engine for cubicweb, based on mtconverter
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab import mtconverter
+
+from logilab.mtconverter.engine import TransformEngine
+from logilab.mtconverter.transform import Transform
+from cubicweb.common.uilib import rest_publish, html_publish, remove_html_tags
+
+HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml')
+# CubicWeb specific transformations
+
+class rest_to_html(Transform):
+    inputs = ('text/rest', 'text/x-rst')
+    output = 'text/html'
+    def _convert(self, trdata):
+        return rest_publish(trdata.appobject, trdata.decode())
+
+class html_to_html(Transform):
+    inputs = HTML_MIMETYPES
+    output = 'text/html'
+    def _convert(self, trdata):
+        return html_publish(trdata.appobject, trdata.data)
+
+
+# Instantiate and configure the transformation engine
+
+mtconverter.UNICODE_POLICY = 'replace'
+
+ENGINE = TransformEngine()
+ENGINE.add_transform(rest_to_html())
+ENGINE.add_transform(html_to_html())
+
+HAS_PIL_TRANSFORMS = False
+HAS_PYGMENTS_TRANSFORMS = False
+    
+class html_to_text(Transform):
+    inputs = HTML_MIMETYPES
+    output = 'text/plain'
+    def _convert(self, trdata):
+        return remove_html_tags(trdata.data)
+ENGINE.add_transform(html_to_text())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/rqlannotation.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+def sqlgen_annotate(schema, rqlhelper, rqlst):
+    rqlst.has_text_query = False
+    rqlst.need_distinct = False
+    
+def set_qdata(union, noinvariant):
+    pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/server__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+# server debugging flag
+DEBUG = False
+
+# sqlite'stored procedures have to be registered at connexion opening time
+SQL_CONNECT_HOOKS = {}
+
+# add to this set relations which should have their add security checking done
+# *BEFORE* adding the actual relation (done after by default)
+BEFORE_ADD_RELATIONS = set(('owned_by',))
+
+# add to this set relations which should have their add security checking done
+# *at COMMIT TIME* (done after by default)
+ON_COMMIT_ADD_RELATIONS = set(())
+
+# available sources registry
+SOURCE_TYPES = {}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/server_utils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+
+class RepoThread(object):
+    def __init__(self, *args):
+        pass # XXX raise
+    def start(self):
+        pass
+    def join(self):
+        pass
+    
+class LoopTask(RepoThread):
+    def cancel(self):
+        pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/overrides/toolsutils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,50 @@
+import sys
+from cubicweb import warning
+
+def lines(path, comments=None):
+    result = []
+    for line in open(path, 'U'):
+        line = line.strip()
+        if line and (comments is None or not line.startswith(comments)):
+            result.append(line)
+    return result
+
+def read_config(config_file):
+    """read the application configuration from a file and return it as a
+    dictionnary
+
+    :type config_file: str
+    :param config_file: path to the configuration file
+
+    :rtype: dict
+    :return: a dictionary with specified values associated to option names 
+    """
+    config = current = {}
+    try:
+        for line in lines(config_file, comments='#'):
+            try:
+                option, value = line.split('=', 1)
+            except ValueError:
+                option = line.strip().lower()
+                if option[0] == '[':
+                    # start a section
+                    section = option[1:-1]
+                    assert not config.has_key(section), \
+                           'Section %s is defined more than once' % section
+                    config[section] = current = {}
+                    continue
+                print >> sys.stderr, 'ignoring malformed line\n%r' % line
+                continue
+            option = option.strip().replace(' ', '_')
+            value = value.strip()
+            current[option] = value or None
+    except IOError, ex:
+        warning('missing or non readable configuration file %s (%s)',
+                config_file, ex)
+    return config
+
+def env_path(env_var, default, name):
+    return default
+
+def create_dir(*args):
+    raise RuntimeError()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/rqlinterpreter.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,682 @@
+"""provide a minimal RQL support for google appengine dbmodel
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from mx.DateTime import DateTimeType, DateTimeDeltaType
+from datetime import datetime
+
+from rql import RQLHelper, nodes
+from logilab.common.compat import any
+
+from cubicweb import Binary
+from cubicweb.rset import ResultSet
+from cubicweb.goa import mx2datetime, datetime2mx
+from cubicweb.server import SQL_CONNECT_HOOKS
+
+from google.appengine.api.datastore import Key, Get, Query, Entity
+from google.appengine.api.datastore_types import Text, Blob
+from google.appengine.api.datastore_errors import EntityNotFoundError, BadKeyError
+
+
+def etype_from_key(key):
+    return Key(key).kind()
+
+def poss_var_types(myvar, ovar, kind, solutions):
+    return frozenset(etypes[myvar] for etypes in solutions 
+                     if etypes[ovar] == kind)
+
+def expand_result(results, result, myvar, values, dsget=None):
+    values = map(dsget, values)
+    if values:
+        result[myvar] = values.pop(0)
+        for value in values:
+            newresult = result.copy()
+            newresult[myvar] = value
+            results.append(newresult)
+    else:
+        results.remove(result)
+
+def _resolve(restrictions, solutions, fixed):
+    varname = restrictions[0].searched_var
+    objs = []
+    for etype in frozenset(etypes[varname] for etypes in solutions):
+        gqlargs = {}
+        query = Query(etype)
+        for restriction in restrictions:
+            restriction.fill_query(fixed, query)
+        pobjs = query.Run()
+        if varname in fixed:
+            value = fixed[varname]
+            objs += (x for x in pobjs if x == value)
+        else:
+            objs += pobjs
+    if varname in fixed and not objs:
+        raise EidMismatch(varname, value)
+    return objs
+
+def _resolve_not(restrictions, solutions, fixed):
+    restr = restrictions[0]
+    constrvarname = restr.constraint_var
+    if len(restrictions) > 1 or not constrvarname in fixed:
+        raise NotImplementedError()
+    varname = restr.searched_var
+    objs = []
+    for etype in frozenset(etypes[varname] for etypes in solutions):
+        gqlargs = {}
+        for operator in ('<', '>'):
+            query = Query(etype)
+            restr.fill_query(fixed, query, operator)
+            objs += query.Run()
+    return objs
+
+def _print_results(rlist):
+    return '[%s]' % ', '.join(_print_result(r) for r in rlist)
+
+def _print_result(rdict):
+    string = []
+    for k, v in rdict.iteritems():
+        if isinstance(v, Entity):
+            string.append('%s: %s' % (k, v.key()))#_print_result(v)))
+        elif isinstance(v, list):
+            string.append('%s: [%s]' % (k, ', '.join(str(i) for i in v)))
+        else:
+            string.append('%s: %s' % (k, v))
+    return '{%s}' % ', '.join(string)
+
+                         
+class EidMismatch(Exception):
+    def __init__(self, varname, value):
+        self.varname = varname
+        self.value = value
+
+
+class Restriction(object):
+    supported_operators = ('=',)
+    def __init__(self, rel):
+        operator = rel.children[1].operator
+        if not operator in self.supported_operators:
+            raise NotImplementedError('unsupported operator')
+        self.rel = rel
+        self.operator = operator
+        self.rtype = rel.r_type
+        self.var = rel.children[0]
+        
+    def __repr__(self):
+        return '<%s for %s>' % (self.__class__.__name__, self.rel)
+    
+    @property
+    def rhs(self):
+        return self.rel.children[1].children[0]
+
+        
+class MultipleRestriction(object):
+    def __init__(self, restrictions):
+        self.restrictions = restrictions
+        
+    def resolve(self, solutions, fixed):
+        return _resolve(self.restrictions, solutions, fixed)
+
+    
+class VariableSelection(Restriction):
+    def __init__(self, rel, dsget, prefix='s'):
+        Restriction.__init__(self, rel)
+        self._dsget = dsget
+        self._not = self.rel.neged(strict=True)
+        self._prefix = prefix + '_'
+        
+    def __repr__(self):
+        return '<%s%s for %s>' % (self._prefix[0], self.__class__.__name__, self.rel)
+        
+    @property
+    def searched_var(self):
+        if self._prefix == 's_':
+            return self.var.name
+        return self.rhs.name
+        
+    @property
+    def constraint_var(self):
+        if self._prefix == 's_':
+            return self.rhs.name
+        return self.var.name
+        
+    def _possible_values(self, myvar, ovar, entity, solutions, dsprefix):
+        if self.rtype == 'identity':
+            return (entity.key(),)
+        value = entity.get(dsprefix + self.rtype)
+        if value is None:
+            return []
+        if not isinstance(value, list):
+            value = [value]
+        vartypes = poss_var_types(myvar, ovar, entity.kind(), solutions)
+        return (v for v in value if v.kind() in vartypes)
+        
+    def complete_and_filter(self, solutions, results):
+        myvar = self.rhs.name
+        ovar = self.var.name
+        rtype = self.rtype
+        if self.schema.rschema(rtype).is_final():
+            # should be detected by rql.stcheck: "Any C WHERE NOT X attr C" doesn't make sense
+            #if self._not:
+            #    raise NotImplementedError()
+            for result in results:
+                result[myvar] = result[ovar].get('s_'+rtype)
+        elif self.var.name in results[0]:
+            if self.rhs.name in results[0]:
+                self.filter(solutions, results)
+            else:
+                if self._not:
+                    raise NotImplementedError()
+                for result in results[:]:
+                    values = self._possible_values(myvar, ovar, result[ovar],
+                                                   solutions, 's_')
+                    expand_result(results, result, myvar, values, self._dsget)
+        else:
+            assert self.rhs.name in results[0]
+            self.object_complete_and_filter(solutions, results)           
+            
+    def filter(self, solutions, results):
+        myvar = self.rhs.name
+        ovar = self.var.name
+        newsols = {}
+        for result in results[:]:
+            entity = result[ovar]
+            key = entity.key()
+            if not key in newsols:
+                values = self._possible_values(myvar, ovar, entity, solutions, 's_')
+                newsols[key] = frozenset(v for v in values)
+            if self._not:
+                if result[myvar].key() in newsols[key]:
+                    results.remove(result)                
+            elif not result[myvar].key() in newsols[key]:
+                results.remove(result)
+    
+    def object_complete_and_filter(self, solutions, results):
+        if self._not:
+            raise NotImplementedError()
+        myvar = self.var.name
+        ovar = self.rhs.name
+        for result in results[:]:
+            values = self._possible_values(myvar, ovar, result[ovar],
+                                           solutions, 'o_')
+            expand_result(results, result, myvar, values, self._dsget)
+
+    
+class EidRestriction(Restriction):
+    def __init__(self, rel, dsget):
+        Restriction.__init__(self, rel)
+        self._dsget = dsget
+
+    def resolve(self, kwargs):
+        value = self.rel.children[1].children[0].eval(kwargs)
+        return self._dsget(value)
+
+
+class RelationRestriction(VariableSelection):
+
+    def _get_value(self, fixed):
+        return fixed[self.constraint_var].key()
+    
+    def fill_query(self, fixed, query, operator=None):
+        restr = '%s%s %s' % (self._prefix, self.rtype, operator or self.operator)
+        query[restr] = self._get_value(fixed)
+
+    def resolve(self, solutions, fixed):
+        if self.rtype == 'identity':
+            if self._not:
+                raise NotImplementedError()
+            return [fixed[self.constraint_var]]
+        if self._not:
+            return _resolve_not([self], solutions, fixed)
+        return _resolve([self], solutions, fixed)
+
+
+class NotRelationRestriction(RelationRestriction):
+
+    def _get_value(self, fixed):
+        return None
+    
+    def resolve(self, solutions, fixed):
+        if self.rtype == 'identity':
+            raise NotImplementedError()
+        return _resolve([self], solutions, fixed)
+
+
+class AttributeRestriction(RelationRestriction):
+    supported_operators = ('=', '>', '>=', '<', '<=', 'ILIKE')
+    def __init__(self, rel, kwargs):
+        RelationRestriction.__init__(self, rel, None)
+        value = self.rhs.eval(kwargs)
+        if isinstance(value, (DateTimeType, DateTimeDeltaType)):
+            #yamstype = self.schema.rschema(self.rtype).objects()[0]
+            value = mx2datetime(value, 'Datetime')
+        self.value = value
+        if self.operator == 'ILIKE':
+            if value.startswith('%'):
+                raise NotImplementedError('LIKE is only supported for prefix search')
+            if not value.endswith('%'):
+                raise NotImplementedError('LIKE is only supported for prefix search')
+            self.operator = '>'
+            self.value = value[:-1]
+            
+    def complete_and_filter(self, solutions, results):
+        # check lhs var first in case this is a restriction
+        assert self._not
+        myvar, rtype, value = self.var.name, self.rtype, self.value
+        for result in results[:]:
+            if result[myvar].get('s_'+rtype) == value:
+                results.remove(result)
+            
+    def _get_value(self, fixed):
+        return self.value
+
+
+class DateAttributeRestriction(AttributeRestriction):
+    """just a thin layer on top af `AttributeRestriction` that
+    tries to convert date strings such as in :
+    Any X WHERE X creation_date >= '2008-03-04'
+    """
+    def __init__(self, rel, kwargs):
+        super(DateAttributeRestriction, self).__init__(rel, kwargs)
+        if isinstance(self.value, basestring):
+#             try:
+            self.value = datetime.strptime(self.value, '%Y-%m-%d')
+#             except Exception, exc:
+#                 from logging import error
+#                 error('unable to parse date %s with format %%Y-%%m-%%d (exc=%s)', value, exc)
+
+
+class AttributeInRestriction(AttributeRestriction):
+    def __init__(self, rel, kwargs):
+        RelationRestriction.__init__(self, rel, None)
+        values = []
+        for c in self.rel.children[1].iget_nodes(nodes.Constant):
+            value = c.eval(kwargs)
+            if isinstance(value, (DateTimeType, DateTimeDeltaType)):
+                #yamstype = self.schema.rschema(self.rtype).objects()[0]
+                value = mx2datetime(value, 'Datetime')
+            values.append(value)
+        self.value = values
+
+    @property
+    def operator(self):
+        return 'in'
+            
+
+class TypeRestriction(AttributeRestriction):
+    def __init__(self, var):
+        self.var = var
+
+    def __repr__(self):
+        return '<%s for %s>' % (self.__class__.__name__, self.var)
+    
+    def resolve(self, solutions, fixed):
+        objs = []
+        for etype in frozenset(etypes[self.var.name] for etypes in solutions):
+            objs += Query(etype).Run()
+        return objs
+
+
+def append_result(res, descr, i, j, value, etype):
+    if value is not None:
+        if etype in ('Date', 'Datetime', 'Time'):
+            value = datetime2mx(value, etype)
+        elif isinstance(value, Text):
+            value = unicode(value)
+        elif isinstance(value, Blob):
+            value = Binary(str(value))
+    if j == 0:
+        res.append([value])
+        descr.append([etype])
+    else:
+        res[i].append(value)
+        descr[i].append(etype)
+
+
+class ValueResolver(object):
+    def __init__(self, functions, args, term):
+        self.functions = functions
+        self.args = args
+        self.term = term
+        self._solution = self.term.stmt.solutions[0]
+        
+    def compute(self, result):
+        """return (entity type, value) to which self.term is evaluated according
+        to the given result dictionnary and to query arguments (self.args)
+        """
+        return self.term.accept(self, result)
+
+    def visit_function(self, node, result):
+        args = tuple(n.accept(self, result)[1] for n in node.children)
+        value = self.functions[node.name](*args)
+        return node.get_type(self._solution, self.args), value
+    
+    def visit_variableref(self, node, result):
+        value = result[node.name]
+        try:
+            etype = value.kind()
+            value = str(value.key())
+        except AttributeError:
+            etype = self._solution[node.name]
+        return etype, value
+    
+    def visit_constant(self, node, result):
+        return node.get_type(kwargs=self.args), node.eval(self.args)
+    
+        
+class RQLInterpreter(object):
+    """algorithm:
+    1. visit the restriction clauses and collect restriction for each subject
+       of a relation. Different restriction types are:
+       * EidRestriction
+       * AttributeRestriction
+       * RelationRestriction
+       * VariableSelection (not really a restriction)
+       -> dictionary {<variable>: [restriction...], ...}
+    2. resolve eid restrictions
+    3. for each select in union:
+           for each solution in select'solutions:
+               1. resolve variables which have attribute restriction
+               2. resolve relation restriction
+               3. resolve selection and add to global results 
+    """
+    def __init__(self, schema):
+        self.schema = schema
+        Restriction.schema = schema # yalta!
+        self.rqlhelper = RQLHelper(schema, {'eid': etype_from_key})
+        self._stored_proc = {'LOWER': lambda x: x.lower(),
+                             'UPPER': lambda x: x.upper()}
+        for cb in SQL_CONNECT_HOOKS.get('sqlite', []):
+            cb(self)
+            
+    # emulate sqlite connection interface so we can reuse stored procedures
+    def create_function(self, name, nbargs, func):
+        self._stored_proc[name] = func
+        
+    def create_aggregate(self, name, nbargs, func):
+        self._stored_proc[name] = func
+
+        
+    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+        rqlst = self.rqlhelper.parse(operation, annotate=True)
+        try:
+            self.rqlhelper.compute_solutions(rqlst, kwargs=parameters)
+        except BadKeyError:
+            results, description = [], []
+        else:
+            results, description = self.interpret(rqlst, parameters)
+        return ResultSet(results, operation, parameters, description, rqlst=rqlst)
+        
+    def interpret(self, node, kwargs, dsget=None):
+        if dsget is None:
+            self._dsget = Get
+        else:
+            self._dsget = dsget
+        try:
+            return node.accept(self, kwargs)
+        except NotImplementedError:
+            self.critical('support for query not implemented: %s', node)
+            raise
+
+    def visit_union(self, node, kwargs):
+        results, description = [], []
+        extra = {'kwargs': kwargs}
+        for child in node.children:
+            pres, pdescr = self.visit_select(child, extra)
+            results += pres
+            description += pdescr
+        return results, description
+    
+    def visit_select(self, node, extra):
+        constraints = {}
+        if node.where is not None:
+            node.where.accept(self, constraints, extra)
+        fixed, toresolve, postresolve, postfilters = {}, {}, {}, []
+        # extract NOT filters
+        for vname, restrictions in constraints.items():
+            for restr in restrictions[:]:
+                if isinstance(restr, AttributeRestriction) and restr._not:
+                    postfilters.append(restr)
+                    restrictions.remove(restr)
+                    if not restrictions:
+                        del constraints[vname]
+        # add TypeRestriction for variable which have no restrictions at all
+        for varname, var in node.defined_vars.iteritems():
+            if not varname in constraints:
+                constraints[varname] = [TypeRestriction(var)]
+        #print node, constraints
+        # compute eid restrictions
+        kwargs = extra['kwargs']
+        for varname, restrictions in constraints.iteritems():
+            for restr in restrictions[:]:
+                if isinstance(restr, EidRestriction):
+                    assert not varname in fixed    
+                    try:
+                        value = restr.resolve(kwargs)
+                        fixed[varname] = value
+                    except EntityNotFoundError:
+                        return [], []
+                    restrictions.remove(restr)
+        #print 'fixed', fixed.keys()
+        # combine remaining restrictions
+        for varname, restrictions in constraints.iteritems():
+            for restr in restrictions:
+                if isinstance(restr, AttributeRestriction):
+                    toresolve.setdefault(varname, []).append(restr)
+                elif isinstance(restr, NotRelationRestriction) or (
+                    isinstance(restr, RelationRestriction) and 
+                    not restr.searched_var in fixed and restr.constraint_var in fixed):
+                    toresolve.setdefault(varname, []).append(restr)
+                else:
+                    postresolve.setdefault(varname, []).append(restr)
+            try:
+                if len(toresolve[varname]) > 1:
+                    toresolve[varname] = MultipleRestriction(toresolve[varname])
+                else:
+                    toresolve[varname] = toresolve[varname][0]
+            except KeyError:
+                pass
+        #print 'toresolve %s' % toresolve
+        #print 'postresolve %s' % postresolve
+        # resolve additional restrictions
+        if fixed:
+            partres = [fixed.copy()]
+        else:
+            partres = []
+        for varname, restr in toresolve.iteritems():
+            varpartres = partres[:]
+            try:
+                values = tuple(restr.resolve(node.solutions, fixed))
+            except EidMismatch, ex:
+                varname = ex.varname
+                value = ex.value
+                partres = [res for res in partres if res[varname] != value]
+                if partres:
+                    continue
+                # some join failed, no possible results
+                return [], []
+            if not values:
+                # some join failed, no possible results
+                return [], []
+            if not varpartres:
+                # init results
+                for value in values:
+                    partres.append({varname: value})
+            elif not varname in partres[0]:
+                # cartesian product
+                for res in partres:                    
+                    res[varname] = values[0]
+                for res in partres[:]:
+                    for value in values[1:]:
+                        res = res.copy()
+                        res[varname] = value
+                        partres.append(res)
+            else:
+                # union 
+                for res in varpartres:
+                    for value in values:
+                        res = res.copy()
+                        res[varname] = value
+                        partres.append(res)
+        #print 'partres', len(partres)
+        #print partres                        
+        # Note: don't check for empty partres since constant selection may still
+        # produce result at this point
+        # sort to get RelationRestriction before AttributeSelection
+        restrictions = sorted((restr for restrictions in postresolve.itervalues()
+                               for restr in restrictions),
+                              key=lambda x: not isinstance(x, RelationRestriction))
+        # compute stuff not doable in the previous step using datastore queries
+        for restr in restrictions + postfilters:
+            restr.complete_and_filter(node.solutions, partres)
+            if not partres:
+                # some join failed, no possible results
+                return [], []
+        if extra.pop('has_exists', False):
+            # remove potential duplicates introduced by exists
+            toremovevars = [v.name for v in node.defined_vars.itervalues()
+                            if not v.scope is node]
+            if toremovevars:
+                newpartres = []
+                for result in partres:
+                    for var in toremovevars:
+                        del result[var]
+                    if not result in newpartres:
+                        newpartres.append(result)
+                if not newpartres:
+                    # some join failed, no possible results
+                    return [], []
+                partres = newpartres
+        if node.orderby:
+            for sortterm in reversed(node.orderby):
+                resolver = ValueResolver(self._stored_proc, kwargs, sortterm.term)
+                partres.sort(reverse=not sortterm.asc,
+                             key=lambda x: resolver.compute(x)[1])
+        if partres:
+            if node.offset:
+                partres = partres[node.offset:]
+            if node.limit:
+                partres = partres[:node.limit]
+            if not partres:
+                return [], []
+        #print 'completed partres', _print_results(partres)
+        # compute results
+        res, descr = [], []
+        for j, term in enumerate(node.selection):
+            resolver = ValueResolver(self._stored_proc, kwargs, term)
+            if not partres:
+                etype, value = resolver.compute({})
+                # only constant selected
+                if not res:
+                    res.append([])
+                    descr.append([])
+                    res[0].append(value)
+                    descr[0].append(etype)
+            else:
+                for i, sol in enumerate(partres):
+                    etype, value = resolver.compute(sol)
+                    append_result(res, descr, i, j, value, etype)
+        #print '--------->', res
+        return res, descr
+    
+    def visit_and(self, node, constraints, extra): 
+        for child in node.children:
+            child.accept(self, constraints, extra)
+    def visit_exists(self, node, constraints, extra):
+        extra['has_exists'] = True
+        self.visit_and(node, constraints, extra)
+    
+    def visit_not(self, node, constraints, extra):
+        for child in node.children:
+            child.accept(self, constraints, extra)
+        try:
+            extra.pop(node)
+        except KeyError:
+            raise NotImplementedError()
+        
+    def visit_relation(self, node, constraints, extra):
+        if node.is_types_restriction():
+            return
+        rschema = self.schema.rschema(node.r_type)
+        neged = node.neged(strict=True)
+        if neged:
+            # ok, we *may* process this Not node (not implemented error will be
+            # raised later if we can't)
+            extra[node.parent] = True
+        if rschema.is_final():
+            self._visit_final_relation(rschema, node, constraints, extra)
+        elif neged:
+            self._visit_non_final_neged_relation(rschema, node, constraints)
+        else:
+            self._visit_non_final_relation(rschema, node, constraints)
+                
+    def _visit_non_final_relation(self, rschema, node, constraints, not_=False):
+        lhs, rhs = node.get_variable_parts()
+        for v1, v2, prefix in ((lhs, rhs, 's'), (rhs, lhs, 'o')):
+            #if not_:
+            nbrels = len(v2.variable.stinfo['relations'])
+            #else:
+            #    nbrels = len(v2.variable.stinfo['relations']) - len(v2.variable.stinfo['uidrels'])
+            if nbrels > 1:
+                constraints.setdefault(v1.name, []).append(
+                    RelationRestriction(node, self._dsget, prefix))
+                # just init an empty list for v2 variable to avoid a 
+                # TypeRestriction being added for it
+                constraints.setdefault(v2.name, [])
+                break
+        else:
+            constraints.setdefault(rhs.name, []).append(
+                VariableSelection(node, self._dsget, 's'))
+                
+    def _visit_non_final_neged_relation(self, rschema, node, constraints):
+        lhs, rhs = node.get_variable_parts()
+        for v1, v2, prefix in ((lhs, rhs, 's'), (rhs, lhs, 'o')):
+            stinfo = v2.variable.stinfo
+            if not stinfo['selected'] and len(stinfo['relations']) == 1:
+                constraints.setdefault(v1.name, []).append(
+                    NotRelationRestriction(node, self._dsget, prefix))
+                constraints.setdefault(v2.name, [])
+                break
+        else:
+            self._visit_non_final_relation(rschema, node, constraints, True)
+
+    def _visit_final_relation(self, rschema, node, constraints, extra):
+        varname = node.children[0].name
+        if rschema.type == 'eid':
+            constraints.setdefault(varname, []).append(
+                EidRestriction(node, self._dsget))
+        else:
+            rhs = node.children[1].children[0]
+            if isinstance(rhs, nodes.VariableRef):
+                constraints.setdefault(rhs.name, []).append(
+                    VariableSelection(node, self._dsget))
+            elif isinstance(rhs, nodes.Constant):
+                if rschema.objects()[0] in ('Datetime', 'Date'): # XXX
+                    constraints.setdefault(varname, []).append(
+                        DateAttributeRestriction(node, extra['kwargs']))
+                else:
+                    constraints.setdefault(varname, []).append(
+                        AttributeRestriction(node, extra['kwargs']))
+            elif isinstance(rhs, nodes.Function) and rhs.name == 'IN':
+                constraints.setdefault(varname, []).append(
+                    AttributeInRestriction(node, extra['kwargs']))
+            else:
+                raise NotImplementedError()
+        
+    def _not_implemented(self, *args, **kwargs):
+        raise NotImplementedError()
+    
+    visit_or = _not_implemented
+    # shouldn't occurs
+    visit_set = _not_implemented
+    visit_insert = _not_implemented
+    visit_delete = _not_implemented
+        
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(RQLInterpreter, getLogger('cubicweb.goa.rqlinterpreter'))
+set_log_methods(Restriction, getLogger('cubicweb.goa.rqlinterpreter'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/app.yaml.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,72 @@
+application: %(appname)s
+version: 0-1
+runtime: python
+api_version: 1
+
+handlers:
+- url: /admin/.*
+  script: $PYTHON_LIB/google/appengine/ext/admin
+  login: admin
+- url: /data
+  static_dir: data
+- url: /fckeditor
+  static_dir: fckeditor
+- url: /_load
+  script: loader.py
+  login: admin
+- url: .*
+  script: main.py
+  # comment the line below to allow anonymous access or if you don't want to use
+  # google authentication service
+  login: required
+
+skip_files: |
+ ^(.*/)?(
+ (app\.yaml)|
+ (app\.yml)|
+ (index\.yaml)|
+ (index\.yml)|
+ (#.*#)|
+ (.*~)|
+ (.*\.py[co])|
+ (.*\.xcf)|
+ (.*\.asp)|
+ (.*\.aspx)|
+ (.*\.cfm)|
+ (.*\.po)|
+ (.*/RCS/.*)|
+ (\..*)|
+ (.*ChangeLog)|
+ (.*README)|
+ (.*TODO)|
+ (.*DEPENDS)|
+ (.*MANIFEST)|
+ (.*MANIFEST.in)|
+ (.*setup\.py)|
+ (.*,cover)|
+ (.*\.orig)|
+ (.*/test/.*)|
+ (.*/tests/.*)|
+ (.*/bin/.*)|
+ (.*/build/.*)|
+ (.*/debian/.*)|
+ (.*/doc/.*)|
+ (.*/skins/office2003/.*)|
+ (.*/editor/skins/silver/.*)|
+ (.*/editor/filemanager/.*)|
+ (.*/editor/plugins/.*)|
+ (.*/editor/images/smiley/.*)|
+ (.*/editor/.*spellerpages.*)|
+ (.*/docutils/writers/s5_html/.*)|
+ (.*/docutils/writers/latex2e/.*)|
+ (.*/docutils/writers/newlatex2e/.*)|
+ (.*/docutils/writers/pep_html/.*)|
+ (bin/.*)|
+ (tools/.*)|
+ (cubicweb.*/data/.*\.js)|
+ (cubicweb.*/data/.*\.css)|
+ (cubicweb.*/data/.*\.png)|
+ (cubicweb.*/data/.*\.gif)|
+ (cubicweb.*/data/.*\.gif)|
+ )$
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/custom.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+def postinit(vreg):
+    """this callback is called at the end of initialization process
+    and can be used to load explicit modules (views or entities).
+
+    For instance :
+    import someviews
+    vreg.load_module(someviws)
+    """
+    # from migration import migrate
+    # migrate(vreg)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/cw-cubes/README.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+This directory is where you should put your lax components.
+
+For your application to actually use a component, you also 
+have to modify the ``INCLUDED_COMPONENTS`` variable in 
+the ``custom.py`` module.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/i18n/en.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+# LAX application po file
+
+msgid ""
+msgstr ""
+"Project-Id-Version: erudi 2.48.2\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: ginco-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/i18n/fr.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+# LAX application po file
+
+msgid ""
+msgstr ""
+"Project-Id-Version: erudi 2.48.2\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: ginco-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/loader.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,24 @@
+if __name__ == '__main__':
+
+    from os.path import dirname, abspath
+    from cubicweb import goa
+    from cubicweb.goa.goaconfig import GAEConfiguration
+    from cubicweb.goa.dbinit import create_user, create_groups
+    
+    # compute application's root directory
+    APPLROOT = dirname(abspath(__file__))
+    # apply monkey patches first
+    goa.do_monkey_patch()
+    # get application's configuration (will be loaded from app.conf file)
+    GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js')
+    config = GAEConfiguration('toto', APPLROOT)
+    # create default groups
+    create_groups()
+    if not config['use-google-auth']:
+        # create default admin
+        create_user('admin', 'admin', ('managers', 'users'))
+        # create anonymous user if specified
+        anonlogin = config['anonymous-user']
+        if anonlogin:
+            create_user(anonlogin, config['anonymous-password'], ('guests',))
+    print 'content initialized'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/main.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,49 @@
+"""module defining the root handler for a lax application. You should not have
+to change anything here.
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+# compute application's root directory
+from os.path import dirname, abspath
+APPLROOT = dirname(abspath(__file__))
+
+# apply monkey patches first
+from cubicweb import goa
+goa.do_monkey_patch()
+
+# get application's configuration (will be loaded from app.conf file)
+from cubicweb.goa.goaconfig import GAEConfiguration
+GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js')
+config = GAEConfiguration('toto', APPLROOT)
+
+# dynamic objects registry
+from cubicweb.goa.goavreg import GAERegistry
+vreg = GAERegistry(config, debug=goa.MODE == 'dev')
+
+# trigger automatic classes registration (metaclass magic), should be done
+# before schema loading
+import custom
+
+# load application'schema
+vreg.schema = config.load_schema()
+
+# load dynamic objects
+vreg.load(APPLROOT)
+
+# call the postinit so custom get a chance to do application specific stuff
+custom.postinit(vreg)
+
+from cubicweb.wsgi.handler import CubicWebWSGIApplication
+application = CubicWebWSGIApplication(config, vreg=vreg)
+
+# main function so this handler module is cached 
+def main():
+    from wsgiref.handlers import CGIHandler
+    CGIHandler().run(application)
+
+if __name__ == "__main__":
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+from cubicweb.schema import format_constraint
+
+class Blog(EntityType):
+    title = String(maxsize=50, required=True)
+    description = String()
+
+class BlogEntry(EntityType):
+    title = String(maxsize=100, required=True)
+    publish_date = Date(default='TODAY')
+    text_format = String(meta=True, internationalizable=True, maxsize=50,
+                         default='text/rest', constraints=[format_constraint])
+    text = String(fulltextindexed=True)
+    category = String(vocabulary=('important','business'))
+    entry_of = SubjectRelation('Blog', cardinality='?*')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/skel/views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,51 @@
+# custom application views
+
+from mx.DateTime import DateTime
+
+from cubicweb.web.views import baseviews
+from cubicweb.web.views.boxes import BoxTemplate
+from cubicweb.web.views.calendar import MONTHNAMES
+from cubicweb.web.htmlwidgets import BoxLink, BoxWidget
+
+_ = unicode
+
+
+class BlogEntryPrimaryView(baseviews.PrimaryView):
+    accepts = ('BlogEntry',)
+    
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        self.w(u'<h1>%s</h1>' % entity.dc_title())
+        entity.view('metadata', w=self.w)
+        self.w(entity.printable_value('text'))
+        
+
+class BlogArchiveBox(BoxTemplate):
+    """side box usually displaying some related entities in a primary view"""
+    id = 'blog_archives_box'
+    title = _('blog archives')
+
+    def call(self, **kwargs):
+        """display a list of entities by calling their <item_vid> view
+        """
+        _ = self.req._
+        rset = self.req.execute('Any CD ORDERBY CD DESC WHERE B is Blog, B creation_date CD')
+        blogmonths = []
+        for (blogdate,) in rset:
+            year, month = blogdate.year, blogdate.month
+            if (year, month) not in blogmonths:
+                blogmonths.append( (year, month) )
+        box = BoxWidget(_('Blog archives'), id=self.id)
+        for year, month in blogmonths:
+            firstday = DateTime(year, month, 1)
+            lastday = DateTime(year, month, firstday.days_in_month)
+            rql = ('Any B WHERE B is BlogEntry, B creation_date >= "%s", B creation_date <= "%s"'
+                   % (firstday.strftime('%Y-%m-%d'), lastday.strftime('%Y-%m-%d')))
+            url = self.build_url(rql=rql)
+            label = u'%s %s' % (_(MONTHNAMES[month-1]), year)
+            box.append( BoxLink(url, label) )
+        box.render(self.w)
+
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/data/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""zou"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/data/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+
+
+class YamsEntity(EntityType):
+    if 'Blog' in defined_types and 'Article' in defined_types:
+        ambiguous_relation = SubjectRelation(('Blog', 'Article'))
+    if 'Blog' in defined_types:
+        inlined_relation = SubjectRelation('Blog', cardinality='?*')
+
+class inlined_relation(RelationType):
+    inlined = True
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/data/settings.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+TEMPLATE_DEBUG = False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/data/views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+import os
+os.environ["DJANGO_SETTINGS_MODULE"] = 'data.settings'
+
+from django import template
+
+
+def encode_output(self, output):
+    # Check type so that we don't run str() on a Unicode object
+    if not isinstance(output, basestring):
+        return unicode(output)
+    return output
+
+template.VariableNode.encode_output = encode_output
+
+from cubicweb.common.view import StartupView
+
+INDEX_TEMPLATE = template.Template(u'''
+ <h1>hellô {{ user.login }}</h1>
+''')
+
+class MyIndex(StartupView):
+    id = 'index'
+    
+    def call(self):
+        ctx = template.Context({'user': self.req.user})
+        return INDEX_TEMPLATE.render(ctx)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/pytestconf.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+"""this pytestconf automatically adds the mx's python version in the PYTHONPATH
+"""
+import sys
+import os.path as osp
+
+import cubicweb
+# remove 'mx' modules imported by cubicweb
+for modname in sys.modules.keys(): 
+    if modname.startswith('mx'):
+        sys.modules.pop(modname)
+
+# this is where mx should get imported from
+mxpath = osp.abspath(osp.join(osp.dirname(cubicweb.__file__), 'embedded'))
+sys.path.insert(1, mxpath)
+
+# make sure the correct mx is imported
+import mx
+assert osp.dirname(mx.__file__) == osp.join(mxpath, 'mx'), '%s != %s' % (osp.dirname(mx.__file__), mxpath)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_db.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+from cubicweb.goa.testlib import *
+
+from cubicweb import Binary
+from cubicweb.goa.goaconfig import GAEConfiguration
+from cubicweb.server.utils import crypt_password
+
+from google.appengine.api.datastore_types import Text, Blob
+
+
+class Blog(db.Model):
+    data = db.BlobProperty()
+    
+class DBTest(GAEBasedTC):
+    config = GAEConfiguration('toto')
+    config.global_set_option('use-google-auth', False)
+    
+    MODEL_CLASSES = (Blog,)
+
+    def test_set_none_relation(self):
+        eprop = self.add_entity('EProperty', pkey=u'ui.language', value=u'en')
+        self.failUnless('s_for_user' in eprop._dbmodel)
+        self.assertEquals(eprop._dbmodel['s_for_user'], None)
+
+    def test_euser_key(self):
+        euser = self.add_entity('EUser', login=u'toto', upassword='toto')
+        self.assertEquals(euser.key().name(), 'key_toto')
+        
+    def test_egroup_key(self):
+        egroup = self.add_entity('EGroup', name=u'toto')
+        self.assertEquals(egroup.key().name(), 'key_toto')
+
+    def test_password_encryption(self):
+        euser = self.add_entity('EUser', login=u'toto', upassword='toto')
+        self.failUnless(euser.upassword != 'toto', euser.upassword)
+        self.assertEquals(crypt_password('toto', euser.upassword[:2]), euser.upassword)
+
+    def test_long_text(self):
+        # datastore string type is limited to 500 bytes
+        text = u'e'*501
+        entity = self.add_entity('State', name=u'test', description=text)
+        self.assertIsInstance(entity.description, unicode)
+        self.failIf(isinstance(entity.description, Text)) 
+        self.assertEquals(entity.description, text)
+
+    def test_long_accentued_text(self):
+        # datastore string type is limited to 500 bytes
+        text = u'é'*500
+        entity = self.add_entity('State', name=u'test', description=text)
+        self.assertIsInstance(entity.description, unicode)
+        self.failIf(isinstance(entity.description, Text)) 
+        self.assertEquals(entity.description, text)
+
+    def test_blob(self):
+        data = 'e'*501
+        entity = self.add_entity('Blog', data=data)
+        self.assertIsInstance(entity.data, Binary)
+        value = entity.data.getvalue()
+        self.failIf(isinstance(value, Blob)) 
+        self.assertEquals(value, data)
+        
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_editcontroller.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,411 @@
+from cubicweb.goa.testlib import *
+
+from urllib import unquote
+
+from cubicweb.common import ValidationError
+from cubicweb.common.uilib import rql_for_eid
+
+from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect
+
+from cubicweb.goa.goaconfig import GAEConfiguration
+from cubicweb.entities.authobjs import EUser
+
+
+class EditControllerTC(GAEBasedTC):
+    
+    config = GAEConfiguration('toto')
+    config.global_set_option('use-google-auth', False)
+    config.global_set_option('schema-type', 'yams')
+    config.global_set_option('included-cubes', ())
+    config.global_set_option('included-yams-cubes', ('eblog',))
+    
+    MODEL_CLASSES = ()
+    from cubicweb.web.views import editcontroller
+    from cubicweb.entities import lib
+    LOAD_APP_MODULES = (editcontroller, lib)
+    
+    def setUp(self):
+        GAEBasedTC.setUp(self)
+        self.req = self.request()
+        self.ctrl = self.get_ctrl(self.req)
+        
+    def get_ctrl(self, req):
+        return self.vreg.select(self.vreg.registry_objects('controllers', 'edit'),
+                                req=req, appli=self)
+
+    def publish(self, req):
+        assert req is self.ctrl.req
+        try:
+            result = self.ctrl.publish()
+            req.cnx.commit()
+        except Redirect:
+            req.cnx.commit()
+            raise
+        except:
+            req.cnx.rollback()
+            raise
+        return result
+
+    def expect_redirect_publish(self, req=None):
+        if req is not None:
+            self.ctrl = self.get_ctrl(req)
+        else:
+            req = self.req
+        try:
+            res = self.publish(req)
+        except Redirect, ex:
+            try:
+                path, params = ex.location.split('?', 1)
+            except:
+                path, params = ex.location, ""
+            req._url = path
+            cleanup = lambda p: (p[0], unquote(p[1]))
+            params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
+            return req.relative_path(False), params # path.rsplit('/', 1)[-1], params
+        else:
+            self.fail('expected a Redirect exception')
+
+    def test_noparam_edit(self):
+        """check behaviour of this controller without any form parameter"""
+        self.req.form = {}
+        self.assertRaises(ValidationError, self.publish, self.req)
+        
+    def test_validation_unique(self):
+        """test creation of two linked entities"""        
+        user = self.user
+        self.req.form = {'eid': 'X', '__type:X': 'EUser',
+                         'login:X': self.user.login, 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+
+    def test_user_editing_itself(self):
+        """checking that a manager user can edit itself"""
+        self.skip('missing actual gae support, retry latter')
+        user = self.user
+        basegroups = [str(eid) for eid, in self.req.execute('EGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})]
+        groupeids = [eid for eid, in self.req.execute('EGroup G WHERE G name in ("managers", "users")')]
+        groups = [str(eid) for eid in groupeids]
+        stateeid = [eid for eid, in self.req.execute('State S WHERE S name "activated"')][0]
+        self.req.form = {
+            'eid':       user.eid,
+            '__type:'+user.eid:    'EUser',
+            'login:'+user.eid:     unicode(user.login),
+            'firstname:'+user.eid: u'Th\xe9nault',
+            'surname:'+user.eid:   u'Sylvain',
+            'in_group:'+user.eid:  groups,
+            'in_state:'+user.eid:  stateeid,
+            #
+            'edits-login:'+user.eid:     unicode(user.login),
+            'edits-firstname:'+user.eid: u'',
+            'edits-surname:'+user.eid:   u'',
+            'edits-in_group:'+user.eid:  basegroups,
+            'edits-in_state:'+user.eid:  stateeid,
+            }
+        path, params = self.expect_redirect_publish()
+        e = self.req.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.firstname, u'Th\xe9nault')
+        self.assertEquals(e.surname, u'Sylvain')
+        self.assertEquals(e.login, user.login)
+        self.assertEquals([g.eid for g in e.in_group], groupeids)
+        self.assertEquals(e.in_state[0].eid, stateeid)
+
+    def test_user_can_change_its_password(self):
+        user = self.create_user('user')
+        cnx = self.login('user')
+        req = self.request()
+        #self.assertEquals(self.ctrl.schema['EUser']._groups['read'],
+        #                  ('managers', 'users'))
+        req.form = {
+            'eid': user.eid, '__type:'+user.eid: 'EUser',
+            '__maineid' : str(user.eid),
+            'upassword:'+user.eid: 'tournicoton',
+            'upassword-confirm:'+user.eid: 'tournicoton',
+            'edits-upassword:'+user.eid:  '',
+            }
+        path, params = self.expect_redirect_publish(req)
+        cnx.commit() # commit to check we don't get late validation error for instance
+        self.assertEquals(path, 'euser/user')
+        self.failIf('vid' in params)
+
+    def test_user_editing_itself_no_relation(self):
+        """checking we can edit an entity without specifying some required
+        relations (meaning no changes)
+        """
+        user = self.user
+        groupeids = [eid for eid, in self.req.execute('EGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})]
+        self.req.form = {
+            'eid':       user.eid,
+            '__type:'+user.eid:    'EUser',
+            'login:'+user.eid:     unicode(user.login),
+            'firstname:'+user.eid: u'Th\xe9nault',
+            'surname:'+user.eid:   u'Sylvain',
+            #
+            'edits-login:'+user.eid:     unicode(user.login),
+            'edits-firstname:'+user.eid: u'',
+            'edits-surname:'+user.eid:   u'',
+            }
+        path, params = self.expect_redirect_publish()
+        self.req.drop_entity_cache(user.eid)
+        e = self.req.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.login, user.login)
+        self.assertEquals(e.firstname, u'Th\xe9nault')
+        self.assertEquals(e.surname, u'Sylvain')
+        self.assertUnorderedIterableEquals([g.eid for g in e.in_group], groupeids)
+        #stateeids = [eid for eid, in self.req.execute('State S WHERE S name "activated"')]
+        #self.assertEquals([s.eid for s in e.in_state], stateeids)
+        
+        
+    def test_create_multiple_linked(self):
+        gueid = self.req.execute('EGroup G WHERE G name "users"')[0][0]
+        self.req.form = {'eid': ['X', 'Y'],
+                         
+                         '__type:X': 'EUser',
+                         '__maineid' : 'X',
+                         'login:X': u'adim', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         'surname:X': u'Di Mascio', 'edits-surname:X': '',
+
+                         'in_group:X': gueid, 'edits-in_group:X': INTERNAL_FIELD_VALUE, 
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'dima@logilab.fr', 'edits-address:Y': '',
+                         'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, 
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        self.assertEquals(path, 'euser/adim')
+        e = self.req.execute('Any P WHERE P surname "Di Mascio"').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Mascio')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'dima@logilab.fr')
+        
+    def test_edit_multiple_linked(self):
+        peid = self.create_user('adim').eid
+        self.req.form = {'eid': [peid, 'Y'],
+                         '__type:%s'%peid: 'EUser',
+                         'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: '',
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'dima@logilab.fr', 'edits-address:Y': '',
+                         'use_email:%s'%peid: 'Y', 'edits-use_email:%s'%peid: INTERNAL_FIELD_VALUE,
+                         
+                         '__redirectrql': 'Any X WHERE X eid %s'%peid,
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        eid = params['rql'].split()[-1]
+        e = self.req.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Masci')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'dima@logilab.fr')
+        
+        emaileid = email.eid
+        self.req.form = {'eid': [peid, emaileid],
+                         '__type:%s'%peid: 'EUser',
+                         'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: 'Di Masci',
+                         '__type:%s'%emaileid: 'EmailAddress',
+                         'address:%s'%emaileid: u'adim@logilab.fr', 'edits-address:%s'%emaileid: 'dima@logilab.fr',
+                         'use_email:%s'%peid: emaileid, 'edits-use_email:%s'%peid: emaileid, 
+                         '__redirectrql': 'Any X WHERE X eid %s'%peid,
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        eid = params['rql'].split()[-1]
+        # XXX this should not be necessary, it isn't with regular cubicweb
+        self.req._eid_cache = {}
+        e = self.req.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Masci')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'adim@logilab.fr')
+
+        
+    def test_password_confirm(self):
+        """test creation of two linked entities
+        """        
+        user = self.user
+        self.req.form = {'__cloned_eid:X': user.eid,
+                         'eid': 'X', '__type:X': 'EUser',
+                         'login:X': u'toto', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+        self.req.form = {'__cloned_eid:X': user.eid,
+                         'eid': 'X', '__type:X': 'EUser',
+                         'login:X': u'toto', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'tutu', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+
+    def test_req_pending_insert(self):
+        """make sure req's pending insertions are taken into account"""
+        tmpgroup = self.add_entity('EGroup', name=u"test")
+        user = self.user
+        self.req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)]))
+        path, params = self.expect_redirect_publish()
+        usergroups = [gname for gname, in
+                      self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        self.assertUnorderedIterableEquals(usergroups, ['managers', 'users', 'test'])
+        self.assertEquals(self.req.get_pending_inserts(), [])
+
+
+    def test_req_pending_delete(self):
+        """make sure req's pending deletions are taken into account"""
+        user = self.user
+        groupeid = self.req.execute('INSERT EGroup G: G name "test", U in_group G WHERE U eid %(x)s',
+                                    {'x': user.eid})[0][0]
+        usergroups = [gname for gname, in
+                      self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        # just make sure everything was set correctly
+        self.assertUnorderedIterableEquals(usergroups, ['managers', 'users', 'test'])
+        # now try to delete the relation
+        self.req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)]))
+        path, params = self.expect_redirect_publish()
+        usergroups = [gname for gname, in
+                      self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        self.assertUnorderedIterableEquals(usergroups, ['managers', 'users'])
+        #self.assertUnorderedIterableEquals(usergroups, ['managers'])
+        self.assertEquals(self.req.get_pending_deletes(), [])
+
+    def test_custom_attribute_handler(self):
+        def custom_login_edit(self, formparams, value, relations):
+            formparams['login'] = value.upper()
+            relations.append('X login %(login)s')
+        EUser.custom_login_edit = custom_login_edit
+        try:
+            user = self.user
+            eid = repr(user.eid)
+            self.req.form = {
+                'eid': eid,
+                '__type:'+eid:  'EUser',
+                'login:'+eid: u'foo',
+                'edits-login:'+eid:  unicode(user.login),
+                }
+            path, params = self.expect_redirect_publish()
+            rset = self.req.execute('Any L WHERE X eid %(x)s, X login L', {'x': user.eid}, 'x')
+            self.assertEquals(rset[0][0], 'FOO')
+        finally:
+            del EUser.custom_login_edit
+        
+    def test_redirect_apply_button(self):
+        redirectrql = rql_for_eid(4012) # whatever
+        self.req.form = {
+                         'eid': 'A', '__type:A': 'BlogEntry',
+                         '__maineid' : 'A',
+                         'content:A': u'"13:03:43"', 'edits-content:A': '',
+                         'title:A': u'huuu', 'edits-title:A': '',
+                         '__redirectrql': redirectrql,
+                         '__redirectvid': 'primary',
+                         '__redirectparams': 'toto=tutu&tata=titi',
+                         '__form_id': 'edition',
+                         '__action_apply': '',
+                         }
+        path, params = self.expect_redirect_publish()
+        self.failUnless(path.startswith('blogentry/'))
+        eid = path.split('/')[1]
+        self.assertEquals(params['vid'], 'edition')
+        self.assertNotEquals(eid, '4012')
+        self.assertEquals(params['__redirectrql'], redirectrql)
+        self.assertEquals(params['__redirectvid'], 'primary')
+        self.assertEquals(params['__redirectparams'], 'toto=tutu&tata=titi')
+
+    def test_redirect_ok_button(self):
+        redirectrql = rql_for_eid(4012) # whatever
+        self.req.form = {
+                         'eid': 'A', '__type:A': 'BlogEntry',
+                         '__maineid' : 'A',
+                         'content:A': u'"13:03:43"', 'edits-content:A': '',
+                         'title:A': u'huuu', 'edits-title:A': '',
+                         '__redirectrql': redirectrql,
+                         '__redirectvid': 'primary',
+                         '__redirectparams': 'toto=tutu&tata=titi',
+                         '__form_id': 'edition',
+                         }
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'view')
+        self.assertEquals(params['rql'], redirectrql)
+        self.assertEquals(params['vid'], 'primary')
+        self.assertEquals(params['tata'], 'titi')
+        self.assertEquals(params['toto'], 'tutu')
+
+    def test_redirect_delete_button(self):
+        eid = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid
+        self.req.form = {'eid': str(eid), '__type:%s'%eid: 'BlogEntry',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'blogentry')
+        self.assertEquals(params, {u'__message': u'entity deleted'})
+        eid = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid
+        self.req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
+                         {'x': self.user.eid, 'e': eid}, 'x')
+        self.commit()
+        self.req.form = {'eid': str(eid), '__type:%s'%eid: 'EmailAddress',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(unquote(path), 'euser/'+self.user.login)
+        self.assertEquals(params, {u'__message': u'entity deleted'})
+        eid1 = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid
+        eid2 = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid
+        self.req.form = {'eid': [str(eid1), str(eid2)],
+                         '__type:%s'%eid1: 'BlogEntry',
+                         '__type:%s'%eid2: 'EmailAddress',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'view')
+        self.assertEquals(params, {u'__message': u'entities deleted'})
+        
+
+    def test_nonregr_multiple_empty_email_addr(self):
+        gueid = self.req.execute('EGroup G WHERE G name "users"')[0][0]
+        self.req.form = {'eid': ['X', 'Y'],
+                         
+                         '__type:X': 'EUser',
+                         'login:X': u'adim', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         'in_group:X': gueid, 'edits-in_group:X': INTERNAL_FIELD_VALUE, 
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'', 'edits-address:Y': '',
+                         'alias:Y': u'', 'edits-alias:Y': '',
+                         'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+
+    def test_nonregr_rollback_on_validation_error(self):
+        self.skip('lax fix me')
+        p = self.create_user("doe")
+        # do not try to skip 'primary_email' for this test
+        old_skips = p.__class__.skip_copy_for
+        p.__class__.skip_copy_for = ()
+        try:
+            e = self.add_entity('EmailAddress', address=u'doe@doe.com')
+            self.req.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s',
+                         {'p' : p.eid, 'e' : e.eid})
+            self.req.form = {'__cloned_eid:X': p.eid,
+                             'eid': 'X', '__type:X': 'EUser',
+                             'login': u'dodo', 'edits-login': u'dodo', 
+                             'surname:X': u'Boom', 'edits-surname:X': u'',
+                             '__errorurl' : "whatever but required",
+                             }
+            # try to emulate what really happens in the web application
+            # 1/ validate form => EditController.publish raises a ValidationError
+            #    which fires a Redirect
+            # 2/ When re-publishing the copy form, the publisher implicitly commits
+            try:
+                self.env.app.publish('edit', self.req)
+            except Redirect:
+                self.req.form['rql'] = 'Any X WHERE X eid %s' % p.eid
+                self.req.form['vid'] = 'copy'
+                self.env.app.publish('view', self.req)
+            rset = self.req.execute('EUser P WHERE P surname "Boom"')
+            self.assertEquals(len(rset), 0)
+        finally:
+            p.__class__.skip_copy_for = old_skips
+
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_metadata.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,106 @@
+from cubicweb.goa.testlib import *
+
+import time
+from mx.DateTime import DateTimeType
+from datetime import datetime
+from cubicweb.goa import db
+
+from google.appengine.api import datastore
+
+class Article(db.Model):        
+    content = db.TextProperty()
+    synopsis = db.StringProperty(default='hello')
+
+class Blog(db.Model):
+    diem = db.DateProperty(required=True, auto_now_add=True)
+    title = db.StringProperty(required=True)
+    content = db.TextProperty()
+    talks_about = db.ReferenceProperty(Article) 
+    cites = db.SelfReferenceProperty() 
+
+  
+class MetaDataTC(GAEBasedTC):
+    MODEL_CLASSES = (Article, Blog)
+    
+    def setUp(self):
+        GAEBasedTC.setUp(self)
+        self.req = self.request()
+        self.a = self.add_entity('Article')
+        self.p = self.add_entity('EProperty', pkey=u'ui.language', value=u'en')
+        self.session.commit()
+        
+    def _test_timestamp(self, entity, attr, sleep=0.1):
+        timestamp = getattr(entity, attr)
+        self.failUnless(timestamp)
+        self.assertIsInstance(timestamp, DateTimeType)
+        self.assertIsInstance(entity.to_gae_model()['s_'+attr], datetime)
+        time.sleep(sleep)
+        if entity.id == 'Article':
+            entity.set_attributes(content=u'zou')
+        else:
+            entity.set_attributes(value=u'en')
+        self.session.commit()
+        return timestamp
+    
+    def test_creation_date_dbmodel(self):
+        cdate = self._test_timestamp(self.a, 'creation_date')
+        self.assertEquals(cdate, self.a.creation_date)
+        
+    def test_creation_date_yams(self):
+        cdate = self._test_timestamp(self.p, 'creation_date')
+        self.assertEquals(cdate, self.p.creation_date)
+        
+    def test_modification_date_dbmodel(self):
+        mdate = self._test_timestamp(self.a, 'modification_date', sleep=1)
+        a = self.execute('Any X WHERE X eid %(x)s', {'x': self.a.eid}, 'x').get_entity(0, 0)
+        self.failUnless(mdate < a.modification_date, (mdate, a.modification_date))
+        
+    def test_modification_date_yams(self):
+        mdate = self._test_timestamp(self.p, 'modification_date', sleep=1)
+        p = self.execute('Any X WHERE X eid %(x)s', {'x': self.p.eid}, 'x').get_entity(0, 0)
+        self.failUnless(mdate < p.modification_date, (mdate, p.modification_date))
+
+    def _test_owned_by(self, entity):
+        self.assertEquals(len(entity.owned_by), 1)
+        owner = entity.owned_by[0]
+        self.assertIsInstance(owner, db.Model)
+        dbmodel = entity.to_gae_model()
+        self.assertEquals(len(dbmodel['s_owned_by']), 1)
+        self.assertIsInstance(dbmodel['s_owned_by'][0], datastore.Key)
+        
+    def test_owned_by_dbmodel(self):
+        self._test_owned_by(self.a)
+        
+    def test_owned_by_yams(self):
+        self._test_owned_by(self.p)
+
+    def _test_created_by(self, entity):
+        self.assertEquals(len(entity.created_by), 1)
+        creator = entity.created_by[0]
+        self.assertIsInstance(creator, db.Model)
+        self.assertIsInstance(entity.to_gae_model()['s_created_by'], datastore.Key)
+        
+    def test_created_by_dbmodel(self):
+        self._test_created_by(self.a)
+        
+    def test_created_by_dbmodel(self):
+        self._test_created_by(self.p)
+        
+    def test_user_owns_dbmodel(self):
+        self.failUnless(self.req.user.owns(self.a.eid))
+        
+    def test_user_owns_yams(self):
+        self.failUnless(self.req.user.owns(self.p.eid))
+
+    def test_is_relation(self):
+        en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.a.eid}, 'x')[0][0]
+        self.assertEquals(en, 'Article')
+        en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.p.eid}, 'x')[0][0]
+        self.assertEquals(en, 'EProperty') 
+        en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.req.user.eid}, 'x')[0][0]
+        self.assertEquals(en, 'EUser')
+
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_rql.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,608 @@
+from cubicweb.goa.testlib import *
+
+from cubicweb import Binary
+
+from logilab.common.testlib import unittest_main
+from mx.DateTime import now, today, DateTimeType
+import rql
+
+from google.appengine.api.datastore_types import Blob, Text
+
+# stored procedure definition #################################################
+
+from rql.utils import register_function, FunctionDescr
+
+class itemtype_sort_value(FunctionDescr):
+    supported_backends = ('sqlite',)
+    rtype = 'Int'
+
+try:
+    register_function(itemtype_sort_value)
+except AssertionError:
+    pass
+
+def init_sqlite_connexion(cnx):
+    def itemtype_sort_value(text):
+        return {"personal":2, "business":1}[text]
+    cnx.create_function("ITEMTYPE_SORT_VALUE", 1, itemtype_sort_value)
+
+from cubicweb.server import SQL_CONNECT_HOOKS
+sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', [])
+sqlite_hooks.append(init_sqlite_connexion)
+
+# end stored procedure definition #############################################
+
+class Article(db.Model):        
+    content = db.TextProperty()
+    synopsis = db.StringProperty(default=u'hello')
+
+class Blog(db.Model):
+    diem = db.DateProperty(required=True, auto_now_add=True)
+    content = db.TextProperty()
+    itemtype = db.StringProperty(required=True, choices=(u'personal', u'business'))
+    talks_about = db.ReferenceProperty(Article) 
+    cites = db.SelfReferenceProperty() 
+    data = db.BlobProperty()
+
+    
+class RQLTest(GAEBasedTC):
+    MODEL_CLASSES = (Article, Blog)
+    
+    def setUp(self):
+        GAEBasedTC.setUp(self)
+        # hack to make talks_about cardinality to ** instead of ?*
+        self.schema.rschema('talks_about').set_rproperty('Blog', 'Article',
+                                                         'cardinality', '**')
+        self.req = self.request()
+        self.article = self.add_entity('Article', content=u'very interesting')
+        self.blog = self.add_entity('Blog', itemtype=u'personal', content=u'hop')
+        self.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s',
+                     {'x': self.blog.eid, 'y': self.article.eid})
+        self.commit()
+        
+    def _check_rset_size(self, rset, row, col):
+        self.assertEquals(len(rset), row)
+        self.assertEquals(len(rset[0]), col)
+        self.assertEquals(len(rset.description), row)
+        self.assertEquals(len(rset.description[0]), col)
+        
+    def _check_blog_rset(self, rset):
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.description[0][0], 'Blog')
+        self.assertEquals(rset[0][0], self.blog.eid)
+        self.assertEquals(rset.get_entity(0, 0).eid, self.blog.eid)
+
+    def test_0_const(self):
+        rset = self.req.execute('Any 1')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset[0][0], 1)
+        self.assertEquals(rset.description[0][0], 'Int')
+
+    def test_0_now_const(self):
+        rset = self.req.execute('Any NOW')
+        self._check_rset_size(rset, 1, 1)
+        self.assertIsInstance(rset[0][0], DateTimeType)
+        self.assertEquals(rset.description[0][0], 'Datetime')
+
+    def test_0_today_const(self):
+        rset = self.req.execute('Any TODAY')
+        self._check_rset_size(rset, 1, 1)
+        self.assertIsInstance(rset[0][0], DateTimeType)
+        self.assertEquals(rset[0][0], today())
+        self.assertEquals(rset.description[0][0], 'Date')
+
+
+    def test_1_eid(self):
+        rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid})
+        self._check_blog_rset(rset)
+        rset = self.req.execute('Any X WHERE X eid "%s"' % self.blog.eid)
+        self._check_blog_rset(rset)
+
+    def test_1_eid_eid(self):
+        rset = self.req.execute('Any X,Y WHERE X eid %(x)s, Y eid %(y)s', {'x': self.blog.eid,
+                                                                           'y': self.article.eid})
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset.description[0], ('Blog', 'Article'))
+        self.assertEquals(rset[0][0], self.blog.eid)
+        self.assertEquals(rset[0][1], self.article.eid)
+
+    def test_1_eid_with_is(self):
+        self.assertRaises(rql.TypeResolverException,
+                          self.req.execute, 'Any X WHERE X eid %(x)s, X is Article', {'x': self.blog.eid})
+        rset = self.req.execute('Any X WHERE X eid %(x)s, X is Blog', {'x': self.blog.eid})
+        self._check_blog_rset(rset)
+
+    def test_1_is(self):
+        rset = self.req.execute('Any X WHERE X is Blog')
+        self._check_blog_rset(rset)
+        blog2 = Blog(itemtype=u'personal', content=u'hop')
+        blog2.put()
+        rset = self.req.execute('Any X WHERE X is Blog')
+        self.assertEquals(len(rset), 2)
+        self.assertEquals(rset.description, [('Blog',), ('Blog',)])
+
+        
+    def test_2_attribute_selection_1(self):
+        rset = self.req.execute('Any X,D,C WHERE X is Blog, X diem D, X content C')
+        self._check_rset_size(rset, 1, 3)
+        self.assertEquals(rset[0], [self.blog.eid, today(), u'hop'])
+        self.assertEquals(rset.description[0], ('Blog', 'Date', 'String'))
+        self.assertIsInstance(rset[0][1], DateTimeType)
+        
+    def test_2_attribute_selection_2(self):
+        rset = self.req.execute('Any D,C WHERE X is Blog, X diem D, X content C')
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [today(), u'hop'])
+        self.assertEquals(rset.description[0], ('Date', 'String'))
+        
+    def test_2_attribute_selection_binary(self):
+        rset = self.req.execute('Any D WHERE X is Blog, X data D')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset[0], [None])
+        self.assertEquals(rset.description[0], ('Bytes',))
+        self.blog['data'] = Binary('raw data')
+        self.blog.put()
+        rset = self.req.execute('Any D WHERE X is Blog, X data D')
+        self._check_rset_size(rset, 1, 1)
+        self.assertIsInstance(rset[0][0], Binary)
+        value = rset[0][0].getvalue()
+        self.assertIsInstance(value, str)
+        self.failIf(isinstance(value, Blob)) 
+        self.assertEquals(value, 'raw data')
+        self.assertEquals(rset.description[0], ('Bytes',))
+        
+    def test_2_attribute_selection_long_text(self):
+        self.blog['content'] = text = 'a'*501
+        self.blog.put()
+        rset = self.req.execute('Any C WHERE X is Blog, X content C')
+        self._check_rset_size(rset, 1, 1)
+        self.assertIsInstance(rset[0][0], unicode)
+        self.failIf(isinstance(rset[0][0], Text)) 
+        self.assertEquals(rset[0][0], text)
+        
+    def test_2_attribute_selection_transformation(self):
+        rset = self.req.execute('Any X,UPPER(C) WHERE X is Blog, X content C')
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [self.blog.eid, u'HOP'])
+        self.assertEquals(rset.description[0], ('Blog', 'String',))
+
+
+    def test_3_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X itemtype "personal"')
+        self._check_blog_rset(rset)
+        rset = self.req.execute('Any X WHERE X itemtype "business"')
+        self.assertEquals(len(rset), 0)
+        
+    def test_3_ambigous_attribute_restriction_1(self):
+        rset = self.req.execute('Any X WHERE X content "hello"')
+        self.assertEquals(len(rset), 0)
+        
+    def test_3_ambigous_attribute_restriction_2(self):
+        rset = self.req.execute('Any X WHERE X content "hop"')
+        self._check_blog_rset(rset)
+        
+    def test_3_ambigous_attribute_restriction_3(self):
+        article = Article(content=u'hop')
+        article.put()
+        rset = self.req.execute('Any X WHERE X content "hop"')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, article.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article'])
+
+    def test_3_incoherant_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X eid %(x)s, X content "hola"',
+                                {'x': self.blog.eid})
+        self.assertEquals(len(rset), 0)
+        
+    def test_3_multiple_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X content "hop", X itemtype "personal"')
+        self._check_blog_rset(rset)
+        
+    def test_3_incoherant_multiple_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X content "hip", X itemtype "personal"')
+        self.assertEquals(len(rset), 0)
+
+    def test_3_today_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X diem < TODAY')
+        self.assertEquals(len(rset), 0)
+        rset = self.req.execute('Any X WHERE X diem <= TODAY')
+        self._check_blog_rset(rset)
+        rset = self.req.execute('Any X WHERE X diem > TODAY')
+        self.assertEquals(len(rset), 0)
+        rset = self.req.execute('Any X WHERE X diem >= TODAY')
+        self._check_blog_rset(rset)
+
+    def test_3_now_attribute_restriction(self):
+        rset = self.req.execute('Any X WHERE X diem < NOW')
+        self._check_blog_rset(rset)
+        rset = self.req.execute('Any X WHERE X diem <= NOW')
+        self._check_blog_rset(rset)
+        rset = self.req.execute('Any X WHERE X diem > NOW')
+        self.assertEquals(len(rset), 0)
+        rset = self.req.execute('Any X WHERE X diem >= NOW')
+        self.assertEquals(len(rset), 0)
+
+    def test_3_in_attribute_restriction(self):
+        self.skip('missing actual gae support, retry latter')
+        article2 = Article(content=u'hip')
+        rset = self.req.execute('Any X WHERE X content IN ("hop", "hip")')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, article.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article'])
+
+    def test_3_like(self):
+        repo = self.config.repository()
+        versions = repo.get_versions()
+        self.assertEquals(versions.keys(), ['cubicweb'])
+    
+    def _setup_relation_description(self):
+        self.article2 = self.add_entity('Article', content=u'hop')
+        self.blog2 = self.add_entity('Blog', itemtype=u'personal', content=u'hip')
+        self.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s',
+                     {'x': self.blog2.eid, 'y': self.article2.eid})
+        self.blog3 = self.add_entity('Blog', itemtype=u'business', content=u'hep')
+        self.commit()
+        
+    def test_4_relation_restriction_1(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X WHERE X talks_about Y')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset],
+                             [self.blog.eid, self.blog2.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Blog'])
+        
+    def test_4_relation_restriction_2(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any Y WHERE X talks_about Y')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset],
+                             [self.article.eid, self.article2.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description],
+                             ['Article', 'Article'])
+        
+    def test_4_relation_restriction_3(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,Y WHERE X talks_about Y')
+        self._check_rset_size(rset, 2, 2)
+        self.assertUnorderedIterableEquals([tuple(r) for r in rset],
+                             [(self.blog.eid, self.article.eid),
+                              (self.blog2.eid, self.article2.eid)])
+        self.assertUnorderedIterableEquals([tuple(r) for r in rset.description],
+                             [('Blog', 'Article'),
+                              ('Blog', 'Article')])
+        
+    def test_4_relation_restriction_4(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,Y WHERE X talks_about Y, X eid %(x)s',
+                                {'x': self.blog.eid})
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [self.blog.eid, self.article.eid])
+        self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article'])
+        
+    def test_4_relation_restriction_5(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,Y WHERE X talks_about Y, Y eid %(x)s',
+                                {'x': self.article.eid})
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [self.blog.eid, self.article.eid])
+        self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article'])
+        
+    def test_4_relation_subject_restriction(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,Y WHERE X talks_about Y, X content %(c)s',
+                                {'c': 'hop'})
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [self.blog.eid, self.article.eid])
+        self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article'])
+        
+    def test_4_relation_object_restriction(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X WHERE X is Blog, X talks_about Y, Y content %(c)s',
+                                {'c': 'very interesting'})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset[0], [self.blog.eid])
+        self.assertUnorderedIterableEquals(rset.description[0], ['Blog'])
+        
+    def test_4_relation_subject_object_restriction(self):
+        article2 = self.add_entity('Article', content=u'very interesting')
+        rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, '
+                                'X talks_about Y, Y content %(c)s',
+                                {'xc': 'hop', 'c': 'very interesting'})
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset[0], [self.blog.eid, self.blog.content])
+        self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'String'])
+        
+    def test_4_relation_subject_object_restriction_no_res(self):
+        article2 = self.add_entity('Article', content=u'very interesting')
+        rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, '
+                                'X talks_about Y, Y content %(c)s',
+                                {'xc': 'hip', 'c': 'very interesting'})
+        self.assertEquals(len(rset), 0)
+        
+    def test_4_relation_subject_object_restriction_no_res_2(self):
+        rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, '
+                                'X talks_about Y, Y content %(c)s',
+                                {'xc': 'hop', 'c': 'not interesting'})
+        self.assertEquals(len(rset), 0)
+        
+    def test_4_relation_restriction_7(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any XC,XD,YC WHERE X talks_about Y, Y eid %(x)s,'
+                                'X content XC, X diem XD, Y content YC',
+                                {'x': self.article.eid})
+        self._check_rset_size(rset, 1, 3)
+        self.assertEquals(rset[0], [self.blog.content, self.blog.diem, self.article.content])
+        self.assertUnorderedIterableEquals(rset.description[0], ['String', 'Date', 'String'])
+        
+    def test_4_relation_restriction_8(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,Y WHERE X cites Y, Y eid %(x)s', {'x': self.blog.eid})
+        self.assertEquals(len(rset), 0)
+
+    def test_4_relation_restriction_9(self):
+        article2 = self.add_entity('Article', content=u'hop')
+        self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': self.blog.eid, 'y': article2.eid})
+        rset = self.req.execute('Any X,Y WHERE X talks_about Y, X eid %(x)s, Y eid %(y)s',
+                                {'x': self.blog.eid, 'y': article2.eid})
+        self._check_rset_size(rset, 1, 2)
+        
+    def test_4_ambiguous_subject_relation(self):
+        ye = self.add_entity('YamsEntity')
+        self.req.execute('SET X ambiguous_relation Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': ye.eid, 'y': self.blog.eid})
+        self.req.execute('SET X ambiguous_relation Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': ye.eid, 'y': self.article.eid})
+        self.commit()
+        #ye = self.vreg.etype_class('YamsEntity ')(req, None)
+        #ye.to_gae_model()['s_ambiguous_relation'] = [self.blog.key(), self.article.key()]
+        #ye.put()
+        rset = self.req.execute('Any X WHERE Y ambiguous_relation X')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, self.article.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article'])
+        rset = self.req.execute('Any X WHERE Y ambiguous_relation X, Y eid %(x)s', {'x': ye.eid})
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, self.article.eid])
+        self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article'])
+        
+    def test_4_relation_selection(self):
+        req = self.request()
+        rset = req.execute('Any N WHERE G content N, U talks_about G, U eid %(u)s', {'u': self.blog.eid})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset[0][0], 'very interesting')
+
+
+    def test_5_orderby(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC ORDERBY XC WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 3, 2)
+        self.assertEquals(rset.rows,
+                          [[self.blog3.eid, 'hep'],
+                           [self.blog2.eid, 'hip'],
+                           [self.blog.eid, 'hop']])
+                           
+    def test_5_orderby_desc(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC ORDERBY XC DESC WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 3, 2)
+        self.assertEquals(rset.rows,
+                          [[self.blog.eid, 'hop'],
+                           [self.blog2.eid, 'hip'],
+                           [self.blog3.eid, 'hep']])
+
+    def test_5_orderby_several_terms(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC,XI ORDERBY XI,XC WHERE X is Blog, X content XC, X itemtype XI')
+        self._check_rset_size(rset, 3, 3)
+        self.assertEquals(rset.rows,
+                          [[self.blog3.eid, 'hep', 'business'],
+                           [self.blog2.eid, 'hip', 'personal'],
+                           [self.blog.eid, 'hop', 'personal']])
+
+    def test_5_orderby_several_terms_mixed_implicit(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC,XI ORDERBY XI,XC DESC WHERE X is Blog, X content XC, X itemtype XI')
+        self._check_rset_size(rset, 3, 3)
+        self.assertEquals(rset.rows,
+                          [[self.blog3.eid, 'hep', 'business'],
+                           [self.blog.eid, 'hop', 'personal'],
+                           [self.blog2.eid, 'hip', 'personal']])
+
+    def test_5_orderby_several_terms_explicit_order(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC,XI ORDERBY XI DESC,XC DESC WHERE X is Blog, X content XC, X itemtype XI')
+        self._check_rset_size(rset, 3, 3)
+        self.assertEquals(rset.rows,
+                          [[self.blog.eid, 'hop', 'personal'],
+                           [self.blog2.eid, 'hip', 'personal'],
+                           [self.blog3.eid, 'hep', 'business']])
+        
+    def test_5_orderby_several_terms_mixed_order(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X,XC,XI ORDERBY XI ASC,XC DESC WHERE X is Blog, X content XC, X itemtype XI')
+        self._check_rset_size(rset, 3, 3)
+        self.assertEquals(rset.rows,
+                          [[self.blog3.eid, 'hep', 'business'],
+                           [self.blog.eid, 'hop', 'personal'],
+                           [self.blog2.eid, 'hip', 'personal']])
+
+
+    def test_5_orderby_lower(self):
+        blog2 = self.add_entity('Blog', itemtype=u'business', content=u'Hup')
+        rset = self.req.execute('Any X ORDERBY LOWER(XC) '
+                                'WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [[self.blog.eid], [blog2.eid]])
+        rset = self.req.execute('Any X ORDERBY LOWER(XC) DESC'
+                                'WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [[blog2.eid], [self.blog.eid]])
+
+    def test_5_orderby_stored_proc(self):
+        blog2 = self.add_entity('Blog', itemtype=u'business', content=u'hop')
+        rset = self.req.execute('Any X ORDERBY ITEMTYPE_SORT_VALUE(XIT) '
+                                'WHERE X is Blog, X itemtype XIT')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [[blog2.eid], [self.blog.eid]])
+        rset = self.req.execute('Any X ORDERBY ITEMTYPE_SORT_VALUE(XIT) DESC'
+                                'WHERE X is Blog, X itemtype XIT')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [[self.blog.eid], [blog2.eid]])
+                          
+        
+    def test_6_limit(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any X LIMIT 2 WHERE X is Blog')
+        self._check_rset_size(rset, 2, 1)
+        
+    def test_6_offset(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any XC ORDERBY XC DESC OFFSET 1 WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [['hip'], ['hep']])
+        
+    def test_6_limit_and_orderby(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [['hep'], ['hip']])
+        
+    def test_6_limit_offset_and_orderby(self):
+        self._setup_relation_description()
+        rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 0 WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [['hep'], ['hip']])
+        rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 1 WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 2, 1)
+        self.assertEquals(rset.rows, [['hip'], ['hop']])
+        rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 2 WHERE X is Blog, X content XC')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [['hop']])
+        rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 3 WHERE X is Blog, X content XC')
+        self.failIf(rset)
+        
+
+    def test_7_simple_datetimecast(self):
+        self._setup_relation_description()
+        _today = today()
+        _tomorrow = _today + 1
+        rset = self.req.execute('Any X WHERE X is Blog, X creation_date >= "%s"'
+                                % _tomorrow.strftime('%Y-%m-%d'))
+        self.failUnless(len(rset) == 0)
+        rset = self.req.execute('Any X WHERE X is Blog, X creation_date >= "%s"'
+                                % _today.strftime('%Y-%m-%d'))
+        self._check_rset_size(rset, 3, 1)
+        rset = self.req.execute('Any X WHERE X is Blog, X creation_date <= "%s"'
+                                % _tomorrow.strftime('%Y-%m-%d'))
+        self._check_rset_size(rset, 3, 1)
+        
+    def test_7_identity_relation(self):
+        rset = self.req.execute('Any X WHERE X identity Y, X eid %(x)s, Y eid %(y)s',
+                                {'x': self.user.eid, 'y': self.user.eid})
+        self._check_rset_size(rset, 1, 1)
+        rset = self.req.execute('Any Y WHERE X identity Y, X eid %(x)s',
+                                {'x': self.user.eid})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[self.user.eid]])
+        blog2 = self.add_entity('Blog', itemtype=u'personal', content=u'hip')
+        rset = self.req.execute('Any X WHERE X identity Y, X eid %(x)s, Y eid %(y)s',
+                                {'x': self.blog.eid, 'y': blog2.eid})
+        self.failIf(rset)
+        
+    def test_8_not_relation_1(self):
+        rset = self.req.execute('Any X WHERE X identity U, NOT U in_group G, '
+                                'G name "guests", X eid %(x)s, U eid %(u)s',
+                                {'x': self.user.eid, 'u': self.user.eid})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[self.user.eid]])        
+
+    def test_8_not_relation_linked_subject(self):
+        rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y eid %(y)s',
+                                {'y': self.article.eid})
+        self.failIf(rset)
+        blog2 = self.add_entity('Blog', content=u'hop', itemtype=u'personal')
+        self.commit()
+        rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y eid %(y)s',
+                                {'y': self.article.eid})        
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[blog2.eid]])
+
+    def test_8_not_relation_linked_object(self):
+        rset = self.req.execute('Any Y WHERE NOT X talks_about Y, X eid %(x)s',
+                                {'x': self.blog.eid})
+        self.failIf(rset)
+        article2 = self.add_entity('Article', content=u'hop')
+        self.commit()
+        rset = self.req.execute('Any Y WHERE NOT X talks_about Y, X eid %(x)s',
+                                {'x': self.blog.eid})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[article2.eid]])
+
+    def test_8_not_relation_linked_attr(self):
+        self.skip('not yet implemented')
+        # TODO: this should generated 
+        # Query(X)[s_talks_about] > "hop" || Query(X)[s_talks_about] < "hop"
+        article2 = self.add_entity('Article', content=u'hop')
+        self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': self.blog.eid, 'y': article2.eid})
+        self.commit()
+        rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y content "hop"')
+        self._check_rset_size(rset, 1, 2)
+        self.assertEquals(rset.rows, [[self.blog.eid, self.article.eid]])
+
+    def test_8_not_relation_unlinked_subject(self):
+        blog2 = self.add_entity('Blog', content=u'hop', itemtype=u'personal')
+        self.commit()
+        rset = self.req.execute('Any X WHERE NOT X talks_about Y')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[blog2.eid]])
+
+    def test_8_not_relation_unlinked_object(self):
+        article2 = self.add_entity('Article', content=u'hop')
+        self.commit()
+        rset = self.req.execute('Any Y WHERE NOT X talks_about Y')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[article2.eid]])
+        
+    def test_8_not_relation_final_1(self):
+        rset = self.req.execute('Any G WHERE G is EGroup, NOT G name "guests"')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([g.name for g in rset.entities()],
+                                           ['users', 'managers'])        
+        
+    def test_8_not_relation_final_2(self):
+        rset = self.req.execute('Any GN WHERE G is EGroup, NOT G name "guests", G name GN')
+        self._check_rset_size(rset, 2, 1)
+        self.assertUnorderedIterableEquals([gn for gn, in rset.rows],
+                                           ['users', 'managers'])
+
+
+    def test_9_exists(self):
+        blog2 = self.add_entity('Article', content=u'hop')
+        article2 = self.add_entity('Article', content=u'hop')
+        self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': self.blog.eid, 'y': article2.eid})
+        self.commit()
+        rset = self.req.execute('Any X WHERE X is Blog, EXISTS(X talks_about Y)')
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset.rows, [[self.blog.eid]])
+        
+        
+    def test_error_unknown_eid(self):
+        rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': '1234'})
+        self.assertEquals(len(rset), 0)
+        self.blog.delete()
+        rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid})
+        self.assertEquals(len(rset), 0)
+
+    def test_nonregr_inlined_relation(self):
+        eid = self.execute('INSERT YamsEntity X: X inlined_relation Y WHERE Y eid %(y)s',
+                           {'y': self.blog.eid})[0][0]
+        self.commit()
+        rset = self.execute('Any X WHERE Y inlined_relation X, Y eid %(y)s', {'y': eid})
+        self._check_rset_size(rset, 1, 1)
+        self.assertEquals(rset[0][0], self.blog.eid)
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,109 @@
+from cubicweb.goa.testlib import *
+
+class Article(db.Model):        
+    content = db.TextProperty()
+    synopsis = db.StringProperty(default='hello')
+
+class Blog(db.Model):
+    diem = db.DateProperty(required=True, auto_now_add=True)
+    title = db.StringProperty(required=True)
+    content = db.TextProperty()
+    talks_about = db.ReferenceProperty(Article) 
+    cites = db.SelfReferenceProperty() 
+
+  
+class SomeViewsTC(GAEBasedTC):
+    MODEL_CLASSES = (Article, Blog)
+
+    def test_entities_and_relation(self):
+        schema = self.schema
+        self.assertSetEquals(set(str(e) for e in schema.entities()),
+                             set(('Boolean', 'Bytes', 'Date', 'Datetime', 'Float',
+                              'Decimal',
+                              'Int', 'Interval', 'Password', 'String', 'Time',
+                              'EEType', 'EGroup', 'EPermission', 'EProperty', 'ERType',
+                              'EUser', 'EmailAddress',
+                              'RQLExpression', 'State', 'Transition', 'TrInfo',
+                              'Article', 'Blog', 'YamsEntity')))
+        self.assertSetEquals(set(str(e) for e in schema.relations()),
+                             set(('add_permission', 'address', 'alias', 'allowed_transition',
+                                  'ambiguous_relation', 'canonical', 'cites',
+                                  'comment', 'comment_format', 'condition', 'content',
+                                  'created_by', 'creation_date', 'delete_permission',
+                                  'description', 'description_format', 'destination_state',
+                                  'diem', 'eid', 'expression', 'exprtype', 'final', 'firstname',
+                                  'for_user', 'from_state', 'fulltext_container', 'has_text',
+                                  'identical_to', 'identity', 'in_group', 'initial_state',
+                                  'inlined', 'inlined_relation', 'is', 'is_instance_of',
+                                  'label', 'last_login_time', 'login',
+                                  'mainvars', 'meta', 'modification_date', 'name', 'owned_by', 'pkey', 'primary_email',
+                                  'read_permission', 'require_group', 'state_of', 'surname', 'symetric',
+                                  'synopsis', 'talks_about', 'title', 'to_state', 'transition_of',
+                                  'update_permission', 'use_email', 'value')))
+
+    def test_dbmodel_imported(self):
+        eschema = self.schema['Blog']
+        orels = [str(e) for e in eschema.ordered_relations()]
+        # only relations defined in the class are actually ordered
+        orels, others = orels[:5], orels[5:]
+        self.assertEquals(orels,
+                          ['diem', 'title', 'content', 'talks_about', 'cites'])
+        self.assertUnorderedIterableEquals(others,
+                             ['eid', 'identity', 'owned_by', 'modification_date',
+                              'created_by', 'creation_date', 'is', 'is_instance_of'])
+        self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()),
+                             ('ambiguous_relation', 'cites', 'identity', 'inlined_relation'))
+        eschema = self.schema['Article']
+        orels = [str(e) for e in eschema.ordered_relations()]
+        # only relations defined in the class are actually ordered
+        orels, others = orels[:2], orels[2:]
+        self.assertEquals(orels,
+                          ['content', 'synopsis'])
+        self.assertUnorderedIterableEquals(others,
+                             ['eid', 'identity', 'owned_by', 'modification_date',
+                              'created_by', 'creation_date', 'is', 'is_instance_of'])
+        self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()),
+                             ('ambiguous_relation', 'talks_about', 'identity'))
+
+    def test_yams_imported(self):
+        eschema = self.schema['EProperty']
+        # only relations defined in the class are actually ordered
+        orels = [str(e) for e in eschema.ordered_relations()]
+        orels, others = orels[:3], orels[3:]
+        self.assertEquals(orels,
+                          ['pkey', 'value', 'for_user'])
+        self.assertEquals(others,
+                          ['created_by', 'creation_date', 'eid', 'identity',
+                           'is', 'is_instance_of', 'modification_date', 'owned_by'])
+        self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()),
+                             ('identity',))
+    
+    def test_yams_ambiguous_relation(self):
+        rschema = self.schema['ambiguous_relation']
+        # only relations defined in the class are actually ordered
+        self.assertUnorderedIterableEquals((str(e) for e in rschema.subjects()),
+                             ('YamsEntity',))
+        self.assertUnorderedIterableEquals((str(e) for e in rschema.objects()),
+                             ('Blog', 'Article'))
+
+    def test_euser(self):
+        eschema = self.schema['EUser']
+        # XXX pretend to have some relations it has not
+        self.assertEquals([str(e) for e in eschema.ordered_relations()],
+                          ['login', 'firstname', 'surname', 'last_login_time',
+                           'primary_email', 'use_email', 'in_group', 'created_by',
+                           'creation_date', 'eid', 'has_text', 'identity',
+                           'is', 'is_instance_of', 'modification_date',
+                           'owned_by'])
+        self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()),
+                             ('owned_by', 'created_by', 'identity', 'for_user'))
+
+    def test_eid(self):
+        rschema = self.schema['eid']
+        self.assertEquals(rschema.objects(), ('Bytes',))
+        self.assertEquals(rschema.rproperty('Blog', 'Bytes', 'cardinality'), '?1')
+
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/test/unittest_views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,52 @@
+from cubicweb.goa.testlib import *
+
+from cubicweb.interfaces import ICalendarable
+
+
+class Blog(db.Model):
+    diem = db.DateProperty(required=True, auto_now_add=True)
+    title = db.StringProperty(required=True)
+    content = db.TextProperty()
+
+    __implements__ = (ICalendarable,)
+
+    @property
+    def start(self):
+        return self.diem
+
+    @property
+    def stop(self):
+        return self.diem
+
+    def matching_dates(self, begin, end):
+        """calendar views interface"""
+        mydate = self.diem
+        if mydate:
+            return [mydate]
+        return []
+
+  
+class SomeViewsTC(GAEBasedTC):
+    MODEL_CLASSES = (Blog, )
+    from cubicweb.web.views import basecontrollers, baseviews, navigation, boxes, calendar
+    from data import views
+    LOAD_APP_MODULES = (basecontrollers, baseviews, navigation, boxes, calendar, views)
+    
+    def setUp(self):
+        GAEBasedTC.setUp(self)
+        self.req = self.request()
+        self.blog = Blog(title=u'a blog', content=u'hop')
+        self.blog.put(self.req)
+        
+    def test_hcal(self):
+        self.vreg.render('views', 'hcal', self.req, rset=self.blog.rset)
+        
+    def test_django_index(self):
+        self.vreg.render('views', 'index', self.req, rset=None)
+
+for vid in ('primary', 'secondary', 'oneline', 'incontext', 'outofcontext', 'text'):
+    setattr(SomeViewsTC, 'test_%s'%vid, lambda self, vid=vid: self.blog.view(vid))
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/testlib.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,196 @@
+from logilab.common.testlib import TestCase, mock_object
+
+import os, os.path as osp
+import time
+from shutil import copy
+
+# additional monkey patches necessary in regular cubicweb environment
+from cubicweb.server import rqlannotation
+from cubicweb.goa.overrides import rqlannotation as goarqlannotation
+rqlannotation.sqlgen_annotate = goarqlannotation.sqlgen_annotate
+rqlannotation.set_qdata = goarqlannotation.set_qdata
+
+try:
+    from google.appengine.api import apiproxy_stub_map
+    from google.appengine.api import datastore_file_stub
+    from google.appengine.ext import db as gdb
+    from cubicweb.goa import db, do_monkey_patch
+    from cubicweb.goa.dbmyams import load_schema
+    import_appengine_failed = None
+except ImportError, exc:
+    raise
+    class db:
+        class Model:
+            pass
+        class DummyProperty:
+            def __init__(self, *args, **kwargs):
+                pass
+        TextProperty = DummyProperty
+        StringProperty = DummyProperty
+        BlobProperty = DummyProperty
+        DateProperty = DummyProperty
+        ReferenceProperty = DummyProperty
+        SelfReferenceProperty = DummyProperty
+    import_appengine_failed = 'cannot import appengine: %s' % exc
+    
+
+from cubicweb import CW_SOFTWARE_ROOT
+from cubicweb.server.utils import crypt_password
+from cubicweb.devtools.fake import FakeRequest
+from cubicweb.goa.goavreg import GAERegistry
+from cubicweb.goa.goaconfig import GAEConfiguration
+from cubicweb.goa.dbinit import (create_user, create_groups, fix_entities,
+                              init_persistent_schema, insert_versions)
+
+import logging
+logger = logging.getLogger()
+logger.setLevel(logging.CRITICAL)
+
+do_monkey_patch()
+
+class GAEBasedTC(TestCase):
+    APP_ID = u'test_app'
+    AUTH_DOMAIN = 'gmail.com'
+    LOGGED_IN_USER = u't...@example.com'  # set to '' for no logged in user
+    MODEL_CLASSES = None
+    LOAD_APP_MODULES = None
+    config = None
+    _DS_TEMPL_FILE = 'tmpdb-template'
+
+    def load_schema_hook(self, loader):
+        loader.import_yams_template_schema('data')
+    
+    @property
+    def DS_FILE(self):
+        return self.DS_TEMPL_FILE.replace('-template', '')
+    
+    @property
+    def DS_TEMPL_FILE(self):
+        return self._DS_TEMPL_FILE + '_'.join(sorted(cls.__name__ for cls in self.MODEL_CLASSES))
+
+    def _set_ds_file(self, dsfile):
+        # Start with a fresh api proxy.
+        apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
+        # Use a fresh stub datastore.
+        stub = datastore_file_stub.DatastoreFileStub(self.APP_ID, dsfile,
+                                                     dsfile+'.history')
+        apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
+        
+    def setUp(self):
+        if import_appengine_failed:
+            self.skip(import_appengine_failed)
+        # Ensure we're in UTC.
+        os.environ['TZ'] = 'UTC'
+        time.tzset()
+        if osp.exists(self.DS_TEMPL_FILE):
+            copy(self.DS_TEMPL_FILE, self.DS_FILE)
+            need_ds_init = False
+            self._set_ds_file(self.DS_FILE)
+        else:
+            need_ds_init = True
+            self._set_ds_file(self.DS_TEMPL_FILE)
+#         from google.appengine.api import mail_stub
+#         from google3.apphosting.api import urlfetch_stub
+#         from google3.apphosting.api import user_service_stub        
+#         # Use a fresh stub UserService.
+#         apiproxy_stub_map.apiproxy.RegisterStub(
+#             'user', user_service_stub.UserServiceStub())
+        os.environ['AUTH_DOMAIN'] = self.AUTH_DOMAIN
+        os.environ['USER_EMAIL'] = self.LOGGED_IN_USER
+#         # Use a fresh urlfetch stub.
+#         apiproxy_stub_map.apiproxy.RegisterStub(
+#             'urlfetch', urlfetch_stub.URLFetchServiceStub())
+#         # Use a fresh mail stub.
+#         apiproxy_stub_map.apiproxy.RegisterStub(
+#             'mail', mail_stub.MailServiceStub())
+        if self.MODEL_CLASSES is None:
+            raise Exception('GAEBasedTC should set MODEL_CLASSES class attribute')
+        gdb._kind_map = {}
+        self.config = self.config or GAEConfiguration('toto')
+        self.config.init_log(logging.CRITICAL)
+        self.schema = self.config.load_schema(self.MODEL_CLASSES,
+                                              self.load_schema_hook)
+        self.vreg = GAERegistry(self.config)
+        self.vreg.schema = self.schema
+        self.vreg.load_module(db)
+        from cubicweb.goa.appobjects import sessions
+        self.vreg.load_module(sessions)
+        from cubicweb.entities import authobjs, schemaobjs
+        self.vreg.load_module(authobjs)
+        self.vreg.load_module(schemaobjs)
+        if self.config['use-google-auth']:
+            from cubicweb.goa.appobjects import gauthservice
+            self.vreg.load_module(gauthservice)
+        if self.LOAD_APP_MODULES is not None:
+            for module in self.LOAD_APP_MODULES:
+                self.vreg.load_module(module)
+        for cls in self.MODEL_CLASSES:
+            self.vreg.load_object(cls)
+        self.session_manager = self.vreg.select_component('sessionmanager')
+        if need_ds_init:
+            # create default groups and create entities according to the schema
+            create_groups()
+            if not self.config['use-google-auth']:
+                create_user(self.LOGGED_IN_USER, 'toto', ('users', 'managers'))
+                self.session = self.login(self.LOGGED_IN_USER, 'toto')
+            else:
+                req = FakeRequest(vreg=self.vreg)
+                self.session = self.session_manager.open_session(req)
+            self.user = self.session.user()
+            ssession = self.config.repo_session(self.session.sessionid)
+            ssession.set_pool()
+            init_persistent_schema(ssession, self.schema)
+            insert_versions(ssession, self.config)
+            ssession.commit()
+            fix_entities(self.schema)
+            copy(self.DS_TEMPL_FILE, self.DS_FILE)
+            self._set_ds_file(self.DS_FILE)
+        else:
+            if not self.config['use-google-auth']:
+                self.session = self.login(self.LOGGED_IN_USER, 'toto')
+            else:
+                req = FakeRequest(vreg=self.vreg)
+                self.session = self.session_manager.open_session(req)
+            self.user = self.session.user()
+            
+    def tearDown(self):
+        self.session.close()
+        
+    def request(self):
+        req = FakeRequest(vreg=self.vreg)
+        req.set_connection(self.session, self.user)
+        return req
+    
+    def add_entity(self, etype, **kwargs):
+        cu = self.session.cursor()
+        rql = 'INSERT %s X' % etype
+        if kwargs:
+            rql += ': %s' % ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs)
+        rset = cu.execute(rql, kwargs)
+        return rset.get_entity(0, 0)
+
+    def execute(self, *args):
+        return self.session.cursor().execute(*args)
+
+    def commit(self):
+        self.session.commit()
+
+    def rollback(self):
+        self.session.rollback()
+        
+    def create_user(self, login, groups=('users',), req=None):
+        assert not self.config['use-google-auth']
+        user = self.add_entity('EUser', upassword=str(login), login=unicode(login))
+        cu = self.session.cursor()
+        cu.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
+                    % ','.join(repr(g) for g in groups),
+                    {'x': user.eid}, 'x')
+        return user
+
+    def login(self, login, password=None):
+        assert not self.config['use-google-auth']
+        req = FakeRequest(vreg=self.vreg)
+        req.form['__login'] = login
+        req.form['__password'] = password or login
+        return self.session_manager.open_session(req)
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/tools/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""lax tools cube"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/tools/generate_schema_img.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,23 @@
+import sys
+from os.path import dirname, abspath, join
+from yams import schema2dot 
+
+APPLROOT = abspath(join(dirname(abspath(__file__)), '..'))
+
+try:
+    import custom
+except ImportError:
+    sys.path.insert(0, APPLROOT)
+    import custom
+    
+
+schema = custom.SCHEMA
+skip_rels = ('owned_by', 'created_by', 'identity', 'is', 'is_instance_of')
+path = join(APPLROOT, 'data', 'schema.png')
+schema2dot.schema2dot(schema, path, #size=size,
+                      skiprels=skip_rels, skipmeta=True)
+print 'generated', path
+path = join(APPLROOT, 'data', 'metaschema.png')
+schema2dot.schema2dot(schema, path, #size=size,
+                      skiprels=skip_rels, skipmeta=False)
+print 'generated', path
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/tools/i18n.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+"""This script is just a thin wrapper around ``msgcat`` and ``msgfmt``
+to generate ``.mo`` files
+"""
+
+import sys
+import os
+import os.path as osp
+import shutil
+from tempfile import mktemp
+from glob import glob
+from mx.DateTime import now
+
+from logilab.common.fileutils import ensure_fs_mode
+from logilab.common.shellutils import find, rm
+
+from yams import BASE_TYPES
+
+from cubicweb import CW_SOFTWARE_ROOT
+# from cubicweb.__pkginfo__ import version as cubicwebversion
+cubicwebversion = '2.48.2'
+
+DEFAULT_POT_HEAD = r'''# LAX application po file
+
+msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb %s\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: cubicweb-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+''' % cubicwebversion
+
+
+STDLIB_ERTYPES = BASE_TYPES | set( ('EUser', 'EProperty', 'Card', 'identity', 'for_user') )
+
+def create_dir(directory):
+    """create a directory if it doesn't exist yet"""
+    try:
+        os.makedirs(directory)
+        print 'created directory', directory
+    except OSError, ex:
+        import errno
+        if ex.errno != errno.EEXIST:
+            raise
+        print 'directory %s already exists' % directory
+
+def execute(cmd):
+    """display the command, execute it and raise an Exception if returned
+    status != 0
+    """
+    print cmd.replace(os.getcwd() + os.sep, '')
+    status = os.system(cmd)
+    if status != 0:
+        raise Exception()
+
+def add_msg(w, msgid):
+    """write an empty pot msgid definition"""
+    if isinstance(msgid, unicode):
+        msgid = msgid.encode('utf-8')
+    msgid = msgid.replace('"', r'\"').splitlines()
+    if len(msgid) > 1:
+        w('msgid ""\n')
+        for line in msgid:
+            w('"%s"' % line.replace('"', r'\"'))
+    else:
+        w('msgid "%s"\n' % msgid[0])
+    w('msgstr ""\n\n')
+
+
+def generate_schema_pot(w, vreg, tmpldir):
+    """generate a pot file with schema specific i18n messages
+
+    notice that relation definitions description and static vocabulary
+    should be marked using '_' and extracted using xgettext
+    """
+    cube = tmpldir and osp.split(tmpldir)[-1]
+    config = vreg.config
+    vreg.register_objects(config.vregistry_path())
+    w(DEFAULT_POT_HEAD)
+    _generate_schema_pot(w, vreg, vreg.schema, libschema=None, # no libschema for now
+                         cube=cube)
+
+
+def _generate_schema_pot(w, vreg, schema, libschema=None, cube=None):
+    w('# schema pot file, generated on %s\n' % now().strftime('%Y-%m-%d %H:%M:%S'))
+    w('# \n')
+    w('# singular and plural forms for each entity type\n')
+    w('\n')
+    # XXX hard-coded list of stdlib's entity schemas
+    libschema = libschema or STDLIB_ERTYPES
+    entities = [e for e in schema.entities() if not e in libschema]
+    done = set()
+    for eschema in sorted(entities):
+        etype = eschema.type
+        add_msg(w, etype)
+        add_msg(w, '%s_plural' % etype)
+        if not eschema.is_final():
+            add_msg(w, 'This %s' % etype)
+            add_msg(w, 'New %s' % etype)
+            add_msg(w, 'add a %s' % etype)
+            add_msg(w, 'remove this %s' % etype)
+        if eschema.description and not eschema.description in done:
+            done.add(eschema.description)
+            add_msg(w, eschema.description)
+    w('# subject and object forms for each relation type\n')
+    w('# (no object form for final relation types)\n')
+    w('\n')
+    if libschema is not None:
+        relations = [r for r in schema.relations() if not r in libschema]
+    else:
+        relations = schema.relations()
+    for rschema in sorted(set(relations)):
+        rtype = rschema.type
+        add_msg(w, rtype)
+        if not (schema.rschema(rtype).is_final() or rschema.symetric):
+            add_msg(w, '%s_object' % rtype)
+        if rschema.description and rschema.description not in done:
+            done.add(rschema.description)
+            add_msg(w, rschema.description)
+    w('# add related box generated message\n')
+    w('\n')
+    for eschema in schema.entities():
+        if eschema.is_final():
+            continue
+        entity = vreg.etype_class(eschema)(None, None)
+        for x, rschemas in (('subject', eschema.subject_relations()),
+                            ('object', eschema.object_relations())):
+            for rschema in rschemas:
+                if rschema.is_final():
+                    continue
+                for teschema in rschema.targets(eschema, x):
+                    if defined_in_library(libschema, eschema, rschema, teschema, x):
+                        continue
+                    if entity.relation_mode(rschema.type, teschema.type, x) == 'create':
+                        if x == 'subject':
+                            label = 'add %s %s %s %s' % (eschema, rschema, teschema, x)
+                            label2 = "creating %s (%s %%(linkto)s %s %s)" % (teschema, eschema, rschema, teschema)
+                        else:
+                            label = 'add %s %s %s %s' % (teschema, rschema, eschema, x)
+                            label2 = "creating %s (%s %s %s %%(linkto)s)" % (teschema, teschema, rschema, eschema)
+                        add_msg(w, label)
+                        add_msg(w, label2)
+    cube = (cube or 'cubicweb') + '.'
+    done = set()
+    for reg, objdict in vreg.items():
+        for objects in objdict.values():
+            for obj in objects:
+                objid = '%s_%s' % (reg, obj.id)
+                if objid in done:
+                    continue
+                if obj.__module__.startswith(cube) and obj.property_defs:
+                    add_msg(w, '%s_description' % objid)
+                    add_msg(w, objid)
+                    done.add(objid)
+                    
+def defined_in_library(libschema, etype, rtype, tetype, x):
+    """return true if the given relation definition exists in cubicweb's library"""
+    if libschema is None:
+        return False
+    if x == 'subject':
+        subjtype, objtype = etype, tetype
+    else:
+        subjtype, objtype = tetype, etype
+    try:
+        return libschema.rschema(rtype).has_rdef(subjtype, objtype)
+    except (KeyError, AttributeError):
+        # if libschema is a simple list of entity types (lax specific)
+        # or if the relation could not be found
+        return False
+
+
+
+# XXX check if this is a pure duplication of the original
+# `cubicweb.common.i18n` function
+def compile_i18n_catalogs(sourcedirs, destdir, langs):
+    """generate .mo files for a set of languages into the `destdir` i18n directory
+    """
+    print 'compiling %s catalogs...' % destdir
+    errors = []
+    for lang in langs:
+        langdir = osp.join(destdir, lang, 'LC_MESSAGES')
+        if not osp.exists(langdir):
+            create_dir(langdir)
+        pofiles = [osp.join(path, '%s.po' % lang) for path in sourcedirs]
+        pofiles = [pof for pof in pofiles if osp.exists(pof)]
+        mergedpo = osp.join(destdir, '%s_merged.po' % lang)
+        try:
+            # merge application messages' catalog with the stdlib's one
+            execute('msgcat --use-first --sort-output --strict %s > %s'
+                    % (' '.join(pofiles), mergedpo))
+            # make sure the .mo file is writeable and compile with *msgfmt*
+            applmo = osp.join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo')
+            try:
+                ensure_fs_mode(applmo)
+            except OSError:
+                pass # suppose not osp.exists
+            execute('msgfmt %s -o %s' % (mergedpo, applmo))
+        except Exception, ex:
+            errors.append('while handling language %s: %s' % (lang, ex))
+        try:
+            # clean everything
+            os.unlink(mergedpo)
+        except Exception:
+            continue
+    return errors
+
+
+def update_cubes_catalog(vreg, appdirectory, langs):
+    toedit = []
+    tmpl = osp.basename(osp.normpath(appdirectory))
+    tempdir = mktemp()
+    os.mkdir(tempdir)
+    print '*' * 72
+    print 'updating %s cube...' % tmpl
+    os.chdir(appdirectory)
+    potfiles = []
+    if osp.exists(osp.join('i18n', 'entities.pot')):
+        potfiles = potfiles.append( osp.join('i18n', scfile) )
+    print '******** extract schema messages'
+    schemapot = osp.join(tempdir, 'schema.pot')
+    potfiles.append(schemapot)
+    # XXX
+    generate_schema_pot(open(schemapot, 'w').write, vreg, appdirectory)
+    print '******** extract Javascript messages'
+    jsfiles =  find('.', '.js')
+    if jsfiles:
+        tmppotfile = osp.join(tempdir, 'js.pot')
+        execute('xgettext --no-location --omit-header -k_ -L java --from-code=utf-8 -o %s %s'
+                % (tmppotfile, ' '.join(jsfiles)))
+        # no pot file created if there are no string to translate
+        if osp.exists(tmppotfile): 
+            potfiles.append(tmppotfile)
+    print '******** create cube specific catalog'
+    tmppotfile = osp.join(tempdir, 'generated.pot')
+    execute('xgettext --no-location --omit-header -k_ -o %s %s'
+            % (tmppotfile, ' '.join(glob('*.py'))))
+    if osp.exists(tmppotfile): # doesn't exists of no translation string found
+        potfiles.append(tmppotfile)
+    potfile = osp.join(tempdir, 'cube.pot')
+    print '******** merging .pot files'
+    execute('msgcat %s > %s' % (' '.join(potfiles), potfile))
+    print '******** merging main pot file with existing translations'
+    os.chdir('i18n')
+    for lang in langs:
+        print '****', lang
+        tmplpo = '%s.po' % lang
+        if not osp.exists(tmplpo):
+            shutil.copy(potfile, tmplpo)
+        else:
+            execute('msgmerge -N -s %s %s > %snew' % (tmplpo, potfile, tmplpo))
+            ensure_fs_mode(tmplpo)
+            shutil.move('%snew' % tmplpo, tmplpo)
+        toedit.append(osp.abspath(tmplpo))
+    # cleanup
+    rm(tempdir)
+    # instructions pour la suite
+    print '*' * 72
+    print 'you can now edit the following files:'
+    print '* ' + '\n* '.join(toedit)
+             
+
+def getlangs(i18ndir):
+    return [fname[:-3] for fname in os.listdir(i18ndir)
+            if fname.endswith('.po')]
+
+
+def get_i18n_directory(appdirectory):
+    if not osp.isdir(appdirectory):
+        print '%s is not an application directory' % appdirectory
+        sys.exit(2)
+    i18ndir = osp.join(appdirectory, 'i18n')
+    if not osp.isdir(i18ndir):
+        print '%s is not an application directory ' \
+            '(i18n subdirectory missing)' % appdirectory
+        sys.exit(2)
+    return i18ndir
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/goa/tools/laxctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,289 @@
+"""provides all lax instances management commands into a single utility script
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import os.path as osp
+import time
+import re
+import urllib2
+from urllib import urlencode
+from Cookie import SimpleCookie
+
+from logilab.common.clcommands import Command, register_commands, main_run
+
+from cubicweb import CW_SOFTWARE_ROOT
+from cubicweb.common.uilib import remove_html_tags
+
+APPLROOT = osp.abspath(osp.join(osp.dirname(osp.abspath(__file__)), '..'))
+
+# XXX import custom?
+
+from tools import i18n
+
+def initialize_vregistry(applroot):
+    # apply monkey patches first
+    from cubicweb.goa import do_monkey_patch    
+    do_monkey_patch()
+    from cubicweb.goa.goavreg import GAERegistry
+    from cubicweb.goa.goaconfig import GAEConfiguration
+    #WebConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js')
+    config = GAEConfiguration('toto', applroot)
+    vreg = GAERegistry(config)
+    vreg.set_schema(config.load_schema())
+    return vreg
+        
+def alistdir(directory):
+    return [osp.join(directory, f) for f in os.listdir(directory)]
+
+
+class LaxCommand(Command):
+    """base command class for all lax commands
+    creates vreg, schema and calls 
+    """
+    min_args = max_args = 0
+
+    def run(self, args):
+        self.vreg = initialize_vregistry(APPLROOT)
+        self._run(args)
+                
+
+class I18nUpdateCommand(LaxCommand):
+    """updates i18n catalogs"""
+    name = 'i18nupdate'
+    
+    def _run(self, args):
+        assert not args, 'no argument expected'
+        i18ndir = i18n.get_i18n_directory(APPLROOT)
+        i18n.update_cubes_catalog(self.vreg, APPLROOT,
+                                      langs=i18n.getlangs(i18ndir))
+
+
+class I18nCompileCommand(LaxCommand):
+    """compiles i18n catalogs"""
+    name = 'i18ncompile'
+    min_args = max_args = 0
+    
+    def _run(self, args):
+        assert not args, 'no argument expected'
+        i18ndir = i18n.get_i18n_directory(APPLROOT)
+        langs = i18n.getlangs(i18ndir)
+        print 'generating .mo files for langs', ', '.join(langs)
+        cubicweb_i18ndir = osp.join(APPLROOT, 'cubes', 'shared')
+        paths = self.vreg.config.cubes_path() + [cubicweb_i18ndir]
+        sourcedirs = [i18ndir] + [osp.join(path, 'i18n') for path in paths]
+        i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs=langs)
+        
+
+class GenerateSchemaCommand(LaxCommand):
+    """generates the schema's png file"""
+    name = 'genschema'
+
+    def _run(self, args):
+        assert not args, 'no argument expected'
+        from yams import schema2dot        
+        schema = self.vreg.schema
+        skip_rels = ('owned_by', 'created_by', 'identity', 'is', 'is_instance_of')
+        path = osp.join(APPLROOT, 'data', 'schema.png')
+        schema2dot.schema2dot(schema, path, #size=size,
+                              skiprels=skip_rels, skipmeta=True)
+        print 'generated', path
+        path = osp.join(APPLROOT, 'data', 'metaschema.png')
+        schema2dot.schema2dot(schema, path, #size=size,
+                              skiprels=skip_rels, skipmeta=False)
+        print 'generated', path
+
+
+class PopulateDataDirCommand(LaxCommand):
+    """populate application's data directory according to used cubes"""
+    name = 'populatedata'
+
+    def _run(self, args):
+        assert not args, 'no argument expected'
+        # first clean everything which is a symlink from the data directory
+        datadir = osp.join(APPLROOT, 'data')
+        if not osp.exists(datadir):
+            print 'created data directory'
+            os.mkdir(datadir)
+        for filepath in alistdir(datadir):
+            if osp.islink(filepath):
+                print 'removing', filepath
+                os.remove(filepath)
+        cubes = list(self.vreg.config.cubes()) + ['shared']
+        for templ in cubes:
+            templpath = self.vreg.config.cube_dir(templ)
+            templdatadir = osp.join(templpath, 'data')
+            if not osp.exists(templdatadir):
+                print 'no data provided by', templ
+                continue
+            for resource in os.listdir(templdatadir):
+                if resource == 'external_resources':
+                    continue
+                if not osp.exists(osp.join(datadir, resource)):
+                    print 'symlinked %s from %s' % (resource, templ)
+                    os.symlink(osp.join(templdatadir, resource),
+                               osp.join(datadir, resource))
+
+
+class NoRedirectHandler(urllib2.HTTPRedirectHandler):
+    def http_error_302(self, req, fp, code, msg, headers):
+        raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
+    http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+
+class GetSessionIdHandler(urllib2.HTTPRedirectHandler):
+    def __init__(self, config):
+        self.config = config
+        
+    def http_error_303(self, req, fp, code, msg, headers):
+        cookie = SimpleCookie(headers['Set-Cookie'])
+        sessionid = cookie['__session'].value
+        print 'session id', sessionid
+        setattr(self.config, 'cookie', '__session=' + sessionid)
+        return 1 # on exception should be raised
+
+    
+class URLCommand(LaxCommand):
+    """abstract class for commands doing stuff by accessing the web application
+    """
+    min_args = max_args = 1
+    arguments = '<site url>'
+
+    options = (
+        ('cookie',
+         {'short': 'C', 'type' : 'string', 'metavar': 'key=value',
+          'default': None,
+          'help': 'session/authentication cookie.'}),
+        ('user',
+         {'short': 'u', 'type' : 'string', 'metavar': 'login',
+          'default': None,
+          'help': 'user login instead of giving raw cookie string (require lax '
+          'based authentication).'}),
+        ('password',
+         {'short': 'p', 'type' : 'string', 'metavar': 'password',
+          'default': None,
+          'help': 'user password instead of giving raw cookie string (require '
+          'lax based authentication).'}),
+        )
+    
+    def _run(self, args):
+        baseurl = args[0]
+        if not baseurl.startswith('http'):
+            baseurl = 'http://' + baseurl
+        if not baseurl.endswith('/'):
+            baseurl += '/'
+        self.base_url = baseurl
+        if not self.config.cookie and self.config.user:
+            # no cookie specified but a user is. Try to open a session using
+            # given authentication info
+            print 'opening session for', self.config.user
+            opener = urllib2.build_opener(GetSessionIdHandler(self.config))
+            urllib2.install_opener(opener)
+            data = urlencode(dict(__login=self.config.user,
+                                  __password=self.config.password))
+            self.open_url(urllib2.Request(baseurl, data))            
+        opener = urllib2.build_opener(NoRedirectHandler())
+        urllib2.install_opener(opener)        
+        self.do_base_url(baseurl)
+
+    def build_req(self, url):
+        req = urllib2.Request(url)
+        if self.config.cookie:
+            req.headers['Cookie'] = self.config.cookie
+        return req
+    
+    def open_url(self, req):
+        try:
+            return urllib2.urlopen(req)
+        except urllib2.HTTPError, ex:
+            if ex.code == 302:
+                self.error_302(req, ex)
+            elif ex.code == 500:
+                self.error_500(req, ex)
+            else:
+                raise
+
+    def error_302(self, req, ex):
+        print 'authentication required'
+        print ('visit %s?vid=authinfo with your browser to get '
+               'authentication info' % self.base_url)
+        sys.exit(1)
+
+    def error_500(self, req, ex):
+        print 'an unexpected error occured on the server'
+        print ('you may get more information by visiting '
+               '%s' % req.get_full_url())
+        sys.exit(1)
+
+    def extract_message(self, data):
+        match = re.search(r'<div class="message">(.*?)</div>', data.read(), re.M|re.S)
+        if match:
+            msg = remove_html_tags(match.group(1))
+            print msg
+            return msg
+        
+    def do_base_url(self, baseurl):
+        raise NotImplementedError()
+
+        
+class DSInitCommand(URLCommand):
+    """initialize the datastore"""
+    name = 'db-init'
+
+    options = URLCommand.options + (
+        ('sleep',
+         {'short': 's', 'type' : 'int', 'metavar': 'nb seconds',
+          'default': None,
+          'help': 'number of seconds to wait between each request to avoid '
+          'going out of quota.'}),
+        )
+        
+    def do_base_url(self, baseurl):
+        req = self.build_req(baseurl + '?vid=contentinit')
+        while True:
+            try:
+                data = self.open_url(req)
+            except urllib2.HTTPError, ex:
+                if ex.code == 303: # redirect
+                    print 'process completed'
+                    break
+                raise
+            msg = self.extract_message(data)
+            if msg and msg.startswith('error: '):
+                print ('you may to cleanup datastore by visiting '
+                       '%s?vid=contentclear (ALL ENTITIES WILL BE DELETED)'
+                       % baseurl)
+                break
+            if self.config.sleep:
+                time.sleep(self.config.sleep)
+
+
+class CleanSessionsCommand(URLCommand):
+    """cleanup sessions on the server. This command should usually be called
+    regularly by a cron job or equivalent.
+    """
+    name = "cleansessions"
+    def do_base_url(self, baseurl):
+        req = self.build_req(baseurl + '?vid=cleansessions')
+        data = self.open_url(req)
+        self.extract_message(data)
+            
+    
+register_commands([I18nUpdateCommand,
+                   I18nCompileCommand,
+                   GenerateSchemaCommand,
+                   PopulateDataDirCommand,
+                   DSInitCommand,
+                   CleanSessionsCommand,
+                   ])
+
+def run():
+    main_run(sys.argv[1:])
+    
+if __name__ == '__main__':
+    run()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hercule.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,275 @@
+"""RQL client for cubicweb, connecting to application using pyro
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+import sys
+
+from logilab.common import flatten
+from logilab.common.cli import CLIHelper
+from logilab.common.clcommands import BadCommandUsage, pop_arg
+from cubicweb.toolsutils import CONNECT_OPTIONS, Command, register_commands
+ 
+# result formatter ############################################################
+
+PAGER = os.environ.get('PAGER', 'less')
+            
+def pager_format_results(writer, layout):
+    """pipe results to a pager like more or less"""
+    (r, w) = os.pipe()
+    pid = os.fork()
+    if pid == 0:
+        os.dup2(r, 0)
+        os.close(r)
+        os.close(w)
+        if PAGER == 'less':
+            os.execlp(PAGER, PAGER, '-r')
+        else:
+            os.execlp(PAGER, PAGER)
+        sys.exit(0)
+    stream = os.fdopen(w, "w")
+    os.close(r)
+    try:
+        format_results(writer, layout, stream)
+    finally:
+        stream.close()
+        status = os.waitpid(pid, 0)
+
+def izip2(list1, list2):
+    for i in xrange(len(list1)):
+        yield list1[i] + tuple(list2[i])
+        
+def format_results(writer, layout, stream=sys.stdout): 
+    """format result as text into the given file like object"""
+    writer.format(layout, stream)
+
+
+try:
+    encoding = sys.stdout.encoding
+except AttributeError: # python < 2.3
+    encoding = 'UTF-8'
+
+def to_string(value, encoding=encoding):
+    """used to converte arbitrary values to encoded string"""
+    if isinstance(value, unicode):
+        return value.encode(encoding, 'replace')
+    return str(value)
+
+# command line querier ########################################################
+    
+class RQLCli(CLIHelper):
+    """Interactive command line client for CubicWeb, allowing user to execute
+    arbitrary RQL queries and to fetch schema information
+    """
+    # commands are prefixed by ":"
+    CMD_PREFIX = ':'
+    # map commands to folders
+    CLIHelper.CMD_MAP.update({
+        'connect' :      "CubicWeb",
+        'schema'  :      "CubicWeb",
+        'description'  : "CubicWeb",
+        'commit' :       "CubicWeb",
+        'rollback' :     "CubicWeb",
+        'autocommit'  :  "Others", 
+        'debug' :        "Others",
+        })
+    
+    def __init__(self, application=None, user=None, password=None,
+                 host=None, debug=0):
+        CLIHelper.__init__(self, os.path.join(os.environ["HOME"], ".erqlhist"))
+        self.cnx = None
+        self.cursor = None
+        # XXX give a Request like object, not None
+        from cubicweb.schemaviewer import SchemaViewer
+        self.schema_viewer = SchemaViewer(None, encoding=encoding)
+        from logilab.common.ureports import TextWriter
+        self.writer = TextWriter()
+        self.autocommit = False
+        self._last_result = None
+        self._previous_lines = []
+        if application is not None:
+            self.do_connect(application, user, password, host)
+        self.do_debug(debug)
+        
+    def do_connect(self, application, user=None, password=None, host=None):
+        """connect to an cubicweb application"""
+        from cubicweb.dbapi import connect
+        if user is None:
+            user = raw_input('login: ')
+        if password is None:
+            from getpass import getpass
+            password = getpass('password: ')
+        if self.cnx is not None:
+            self.cnx.close()
+        self.cnx = connect(user=user, password=password, host=host,
+                           database=application)
+        self.schema = self.cnx.get_schema()
+        self.cursor = self.cnx.cursor()
+        # add entities types to the completion commands
+        self._completer.list = (self.commands.keys() +
+                                self.schema.entities() + ['Any'])
+        print _('You are now connected to %s') % application
+        
+
+    help_do_connect = ('connect', "connect <application> [<user> [<password> [<host>]]]",
+                       _(do_connect.__doc__))
+
+    def do_debug(self, debug=1):
+        """set debug level"""
+        self._debug = debug
+        if debug:
+            self._format = format_results
+        else:
+            self._format = pager_format_results
+        if self._debug:
+            print _('Debug level set to %s'%debug)
+        
+    help_do_debug = ('debug', "debug [debug_level]", _(do_debug.__doc__))
+    
+    def do_description(self):
+        """display the description of the latest result"""
+        if self.cursor.description is None:
+            print _('No query has been executed')
+        else:
+            print '\n'.join([', '.join(line_desc)
+                             for line_desc in self.cursor.description])
+
+    help_do_description = ('description', "description", _(do_description.__doc__))
+    
+    def do_schema(self, name=None):
+        """display information about the application schema """
+        if self.cnx is None:
+            print _('You are not connected to an application !')
+            return
+        done = None
+        if name is None:
+            # display the full schema
+            self.display_schema(self.schema)
+            done = 1
+        else:
+            if self.schema.has_entity(name):
+                self.display_schema(self.schema.eschema(name))
+                done = 1
+            if self.schema.has_relation(name):
+                self.display_schema(self.schema.rschema(name))
+                done = 1
+        if done is None:
+            print _('Unable to find anything named "%s" in the schema !') % name
+            
+    help_do_schema = ('schema', "schema [keyword]", _(do_schema.__doc__))
+
+    
+    def do_commit(self):
+        """commit the current transaction"""
+        self.cnx.commit()
+
+    help_do_commit = ('commit', "commit", _(do_commit.__doc__))
+    
+    def do_rollback(self):
+        """rollback the current transaction"""
+        self.cnx.rollback()
+
+    help_do_rollback = ('rollback', "rollback", _(do_rollback.__doc__))
+    
+    def do_autocommit(self):
+        """toggle autocommit mode"""
+        self.autocommit = not self.autocommit
+
+    help_do_autocommit = ('autocommit', "autocommit", _(do_autocommit.__doc__))
+    
+
+    def handle_line(self, stripped_line):
+        """handle non command line :
+        if the query is complete, executes it and displays results (if any)
+        else, stores the query line and waits for the suite
+        """
+        if self.cnx is None:
+            print _('You are not connected to an application !')
+            return
+        # append line to buffer
+        self._previous_lines.append(stripped_line)
+        # query are ended by a ';'
+        if stripped_line[-1] != ';':
+            return
+        # extract query from the buffer and flush it
+        query = '\n'.join(self._previous_lines)
+        self._previous_lines = []
+        # search results
+        try:
+            self.cursor.execute(query)
+        except:
+            if self.autocommit:
+                self.cnx.rollback()
+            raise
+        else:
+            if self.autocommit:
+                self.cnx.commit()
+        self.handle_result(self.cursor.fetchall(), self.cursor.description)
+
+    def handle_result(self, result, description):
+        """display query results if any"""
+        if not result:
+            print _('No result matching query')
+        else:
+            from logilab.common.ureports import Table
+            children = flatten(izip2(description, result), to_string)
+            layout = Table(cols=2*len(result[0]), children=children, cheaders=1)
+            self._format(self.writer, layout)
+            print _('%s results matching query') % len(result)
+
+    def display_schema(self, schema):
+        """display a schema object"""
+        attr = schema.__class__.__name__.lower().replace('cubicweb', '')
+        layout = getattr(self.schema_viewer, 'visit_%s' % attr)(schema)
+        self._format(self.writer, layout)
+
+
+class CubicWebClientCommand(Command):
+    """A command line querier for CubicWeb, using the Relation Query Language.
+
+    <application>
+      identifier of the application to connect to 
+    """
+    name = 'client'
+    arguments = '<application>'
+    options = CONNECT_OPTIONS + (
+        ("verbose",
+         {'short': 'v', 'type' : 'int', 'metavar': '<level>',
+          'default': 0,
+          'help': 'ask confirmation to continue after an error.',
+          }),
+        ("batch",
+         {'short': 'b', 'type' : 'string', 'metavar': '<file>',
+          'help': 'file containing a batch of RQL statements to execute.',
+          }),
+        )
+    
+    def run(self, args):
+        """run the command with its specific arguments"""
+        appid = pop_arg(args, expected_size_after=None)
+        batch_stream = None
+        if args:
+            if len(args) == 1 and args[0] == '-':
+                batch_stream = sys.stdin
+            else:
+                raise BadCommandUsage('too many arguments')
+        if self.config.batch:
+            batch_stream = open(self.config.batch)
+        cli = RQLCli(appid, self.config.user, self.config.password,
+                     self.config.host, self.config.debug)
+        if batch_stream:
+            cli.autocommit = True
+            for line in batch_stream:
+                line = line.strip()
+                if not line:
+                    continue
+                print '>>>', line
+                cli.handle_line(line)
+        else:
+            cli.run()
+        
+register_commands((CubicWebClientCommand,))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/i18n/en.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2640 @@
+# cubicweb i18n catalog
+# Copyright 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Logilab <contact@logilab.fr>
+msgid ""
+msgstr ""
+"Project-Id-Version: 2.0\n"
+"POT-Creation-Date: 2006-01-12 17:35+CET\n"
+"PO-Revision-Date: 2008-02-29 09:26+0100\n"
+"Last-Translator: Sylvain Thenault <sylvain.thenault@logilab.fr>\n"
+"Language-Team: English <devel@logilab.fr.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+#, python-format
+msgid ""
+"\n"
+"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for "
+"entity\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+msgstr ""
+
+#, python-format
+msgid "  from state %(fromstate)s to state %(tostate)s\n"
+msgstr ""
+
+#, python-format
+msgid "%(firstname)s %(surname)s"
+msgstr ""
+
+#, python-format
+msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+msgstr ""
+
+#, python-format
+msgid "%d months"
+msgstr ""
+
+#, python-format
+msgid "%d weeks"
+msgstr ""
+
+#, python-format
+msgid "%d years"
+msgstr ""
+
+#, python-format
+msgid "%s constraint failed for value %r"
+msgstr ""
+
+#, python-format
+msgid "%s days"
+msgstr ""
+
+#, python-format
+msgid "%s error report"
+msgstr ""
+
+#, python-format
+msgid "%s hours"
+msgstr ""
+
+#, python-format
+msgid "%s minutes"
+msgstr ""
+
+#, python-format
+msgid "%s not estimated"
+msgstr ""
+
+#, python-format
+msgid "%s results matching query"
+msgstr ""
+
+#, python-format
+msgid "%s seconds"
+msgstr ""
+
+#, python-format
+msgid "%s software version of the database"
+msgstr ""
+
+#, python-format
+msgid "%s, or without time: %s"
+msgstr ""
+
+msgid "**"
+msgstr "0..n 0..n"
+
+msgid "*+"
+msgstr "0..n 1..n"
+
+msgid "*1"
+msgstr "0..n 1"
+
+msgid "*?"
+msgstr "0..n 0..1"
+
+msgid "+*"
+msgstr "1..n 0..n"
+
+msgid "++"
+msgstr "1..n 1..n"
+
+msgid "+1"
+msgstr "1..n 1"
+
+msgid "+?"
+msgstr "1..n 0..1"
+
+msgid "1*"
+msgstr "1 0..n"
+
+msgid "1+"
+msgstr "1 1..n"
+
+msgid "11"
+msgstr "1 1"
+
+msgid "1?"
+msgstr "1 0..1"
+
+msgid "?*"
+msgstr "0..1 0..n"
+
+msgid "?+"
+msgstr "0..1 1..n"
+
+msgid "?1"
+msgstr "0..1 1"
+
+msgid "??"
+msgstr "0..1 0..1"
+
+msgid "AND"
+msgstr ""
+
+msgid "Any"
+msgstr ""
+
+msgid "Application"
+msgstr ""
+
+msgid "Bookmark"
+msgstr "Bookmark"
+
+msgid "Bookmark_plural"
+msgstr "Bookmarks"
+
+msgid "Boolean"
+msgstr "Boolean"
+
+msgid "Boolean_plural"
+msgstr "Booleans"
+
+msgid "Browse by category"
+msgstr ""
+
+msgid "Bytes"
+msgstr "Bytes"
+
+msgid "Bytes_plural"
+msgstr "Bytes"
+
+msgid "Card"
+msgstr "Card"
+
+msgid "Card_plural"
+msgstr "Cards"
+
+msgid "Date"
+msgstr "Date"
+
+msgid "Date_plural"
+msgstr "Dates"
+
+msgid "Datetime"
+msgstr "Date and time"
+
+msgid "Datetime_plural"
+msgstr "Dates and times"
+
+#, python-format
+msgid "Debug level set to %s"
+msgstr ""
+
+msgid "Decimal"
+msgstr "Decimal number"
+
+msgid "Decimal_plural"
+msgstr "Decimal numbers"
+
+msgid "Do you want to delete the following element(s) ?"
+msgstr ""
+
+msgid "EConstraint"
+msgstr "Constraint"
+
+msgid "EConstraintType"
+msgstr "Constraint type"
+
+msgid "EConstraintType_plural"
+msgstr "Constraint types"
+
+msgid "EConstraint_plural"
+msgstr "Constraints"
+
+msgid "EEType"
+msgstr "Entity type"
+
+msgid "EEType_plural"
+msgstr "Entity types"
+
+msgid "EFRDef"
+msgstr "Attribute"
+
+msgid "EFRDef_plural"
+msgstr "Attributes"
+
+msgid "EGroup"
+msgstr "Group"
+
+msgid "EGroup_plural"
+msgstr "Groups"
+
+msgid "ENFRDef"
+msgstr "Relation"
+
+msgid "ENFRDef_plural"
+msgstr "Relations"
+
+msgid "EPermission"
+msgstr "Permission"
+
+msgid "EPermission_plural"
+msgstr "Permissions"
+
+msgid "EProperty"
+msgstr "Property"
+
+msgid "EProperty_plural"
+msgstr "Properties"
+
+msgid "ERType"
+msgstr "Relation type"
+
+msgid "ERType_plural"
+msgstr "Relation types"
+
+msgid "EUser"
+msgstr "User"
+
+msgid "EUser_plural"
+msgstr "Users"
+
+msgid "Email body: "
+msgstr ""
+
+msgid "EmailAddress"
+msgstr "Email address"
+
+msgid "EmailAddress_plural"
+msgstr "Email addresses"
+
+msgid "Entities"
+msgstr ""
+
+msgid "Environment"
+msgstr ""
+
+msgid "Float"
+msgstr "Float"
+
+msgid "Float_plural"
+msgstr "Floats"
+
+msgid "From: "
+msgstr ""
+
+msgid "Int"
+msgstr "Integer"
+
+msgid "Int_plural"
+msgstr "Integers"
+
+msgid "Interval"
+msgstr "Interval"
+
+msgid "Interval_plural"
+msgstr "Intervals"
+
+msgid "New Bookmark"
+msgstr "New bookmark"
+
+msgid "New Card"
+msgstr "New card"
+
+msgid "New EConstraint"
+msgstr "New constraint"
+
+msgid "New EConstraintType"
+msgstr "New constraint type"
+
+msgid "New EEType"
+msgstr "New entity type"
+
+msgid "New EFRDef"
+msgstr "New attribute"
+
+msgid "New EGroup"
+msgstr "New group"
+
+msgid "New ENFRDef"
+msgstr "New relation"
+
+msgid "New EPermission"
+msgstr "New permission"
+
+msgid "New EProperty"
+msgstr "New property"
+
+msgid "New ERType"
+msgstr "New relation type"
+
+msgid "New EUser"
+msgstr "New user"
+
+msgid "New EmailAddress"
+msgstr "New email address"
+
+msgid "New RQLExpression"
+msgstr "New RQL expression"
+
+msgid "New State"
+msgstr "New state"
+
+msgid "New TrInfo"
+msgstr "New transition information"
+
+msgid "New Transition"
+msgstr "New transition"
+
+msgid "No query has been executed"
+msgstr ""
+
+msgid "No result matching query"
+msgstr ""
+
+msgid "OR"
+msgstr ""
+
+msgid "Password"
+msgstr "Password"
+
+msgid "Password_plural"
+msgstr "Passwords"
+
+msgid "Please note that this is only a shallow copy"
+msgstr ""
+
+msgid "Problem occured"
+msgstr ""
+
+msgid "RQLExpression"
+msgstr "RQL expression"
+
+msgid "RQLExpression_plural"
+msgstr "RQL expressions"
+
+msgid "Recipients: "
+msgstr ""
+
+msgid "Relations"
+msgstr ""
+
+msgid "Request"
+msgstr ""
+
+#, python-format
+msgid "Schema %s"
+msgstr ""
+
+msgid "Search for"
+msgstr ""
+
+msgid "Server"
+msgstr ""
+
+msgid "Startup views"
+msgstr ""
+
+msgid "State"
+msgstr "State"
+
+msgid "State_plural"
+msgstr "States"
+
+msgid "String"
+msgstr "String"
+
+msgid "String_plural"
+msgstr "Strings"
+
+msgid "Subject: "
+msgstr ""
+
+msgid "Submit bug report"
+msgstr ""
+
+msgid "Submit bug report by mail"
+msgstr ""
+
+msgid "The repository holds the following entities"
+msgstr ""
+
+#, python-format
+msgid "The view %s can not be applied to this query"
+msgstr ""
+
+#, python-format
+msgid "The view %s could not be found"
+msgstr ""
+
+#, python-format
+msgid "This %s"
+msgstr ""
+
+msgid "This Bookmark"
+msgstr "This bookmark"
+
+msgid "This Card"
+msgstr "This card"
+
+msgid "This EConstraint"
+msgstr "This constraint"
+
+msgid "This EConstraintType"
+msgstr "This constraint type"
+
+msgid "This EEType"
+msgstr "This entity type"
+
+msgid "This EFRDef"
+msgstr "This attribute"
+
+msgid "This EGroup"
+msgstr "This group"
+
+msgid "This ENFRDef"
+msgstr "This relation"
+
+msgid "This EPermission"
+msgstr "This permission"
+
+msgid "This EProperty"
+msgstr "This property"
+
+msgid "This ERType"
+msgstr "This relation type"
+
+msgid "This EUser"
+msgstr "This user"
+
+msgid "This EmailAddress"
+msgstr "This email address"
+
+msgid "This RQLExpression"
+msgstr "This RQL expression"
+
+msgid "This State"
+msgstr "This state"
+
+msgid "This TrInfo"
+msgstr "This transition information"
+
+msgid "This Transition"
+msgstr "This transition"
+
+msgid "Time"
+msgstr "Time"
+
+msgid "Time_plural"
+msgstr "Times"
+
+msgid "TrInfo"
+msgstr "Transition information"
+
+msgid "TrInfo_plural"
+msgstr "Workflow history"
+
+msgid "Transition"
+msgstr "Transition"
+
+msgid "Transition_plural"
+msgstr "Transitions"
+
+#, python-format
+msgid "Unable to find anything named \"%s\" in the schema !"
+msgstr ""
+
+msgid "Used by:"
+msgstr ""
+
+msgid "What's new?"
+msgstr ""
+
+msgid "Workflow history"
+msgstr ""
+
+msgid "You are not connected to an application !"
+msgstr ""
+
+#, python-format
+msgid "You are now connected to %s"
+msgstr ""
+
+msgid "You can use any of the following substitutions in your text"
+msgstr ""
+
+msgid "You have no access to this view"
+msgstr ""
+
+#, python-format
+msgid "[%s supervision] changes summary"
+msgstr ""
+
+msgid "__msg state changed"
+msgstr "state changed"
+
+msgid ""
+"a RQL expression which should return some results, else the transition won't "
+"be available. This query may use X and U variables that will respectivly "
+"represents the current entity and the current user"
+msgstr ""
+
+msgid ""
+"a card is a textual content used as documentation, reference, procedure "
+"reminder"
+msgstr ""
+
+msgid "about this site"
+msgstr ""
+
+msgid "access type"
+msgstr ""
+
+msgid "account state"
+msgstr ""
+
+msgid "action(s) on this selection"
+msgstr ""
+
+msgid "actions"
+msgstr ""
+
+msgid "actions_addentity"
+msgstr "add an entity of this type"
+
+msgid "actions_addentity_description"
+msgstr ""
+
+msgid "actions_cancel"
+msgstr "cancel the selection"
+
+msgid "actions_cancel_description"
+msgstr ""
+
+msgid "actions_copy"
+msgstr "copy"
+
+msgid "actions_copy_description"
+msgstr ""
+
+msgid "actions_delete"
+msgstr "delete"
+
+msgid "actions_delete_description"
+msgstr ""
+
+msgid "actions_edit"
+msgstr "modify"
+
+msgid "actions_edit_description"
+msgstr ""
+
+msgid "actions_embed"
+msgstr "embed"
+
+msgid "actions_embed_description"
+msgstr ""
+
+msgid "actions_follow"
+msgstr "follow"
+
+msgid "actions_follow_description"
+msgstr ""
+
+msgid "actions_logout"
+msgstr "logout"
+
+msgid "actions_logout_description"
+msgstr ""
+
+msgid "actions_manage"
+msgstr "manage"
+
+msgid "actions_manage_description"
+msgstr ""
+
+msgid "actions_muledit"
+msgstr "modify all"
+
+msgid "actions_muledit_description"
+msgstr ""
+
+msgid "actions_myinfos"
+msgstr "my profile"
+
+msgid "actions_myinfos_description"
+msgstr ""
+
+msgid "actions_myprefs"
+msgstr "my preferences"
+
+msgid "actions_myprefs_description"
+msgstr ""
+
+msgid "actions_schema"
+msgstr "schema"
+
+msgid "actions_schema_description"
+msgstr ""
+
+msgid "actions_select"
+msgstr "select"
+
+msgid "actions_select_description"
+msgstr ""
+
+msgid "actions_sendemail"
+msgstr "send email"
+
+msgid "actions_sendemail_description"
+msgstr ""
+
+msgid "actions_siteconfig"
+msgstr "site configuration"
+
+msgid "actions_siteconfig_description"
+msgstr ""
+
+msgid "actions_view"
+msgstr "view"
+
+msgid "actions_view_description"
+msgstr ""
+
+msgid "actions_workflow"
+msgstr "see workflow"
+
+msgid "actions_workflow_description"
+msgstr ""
+
+msgid "activate"
+msgstr ""
+
+msgid "activated"
+msgstr ""
+
+msgid "add"
+msgstr ""
+
+msgid "add EEType add_permission RQLExpression subject"
+msgstr "rql expression for the add permission"
+
+msgid "add EEType delete_permission RQLExpression subject"
+msgstr "rql expression for the delete permission"
+
+msgid "add EEType read_permission RQLExpression subject"
+msgstr "rql expression for the read permission"
+
+msgid "add EEType update_permission RQLExpression subject"
+msgstr "rql expression for the update permission"
+
+msgid "add EFRDef constrained_by EConstraint subject"
+msgstr "constraint"
+
+msgid "add EFRDef relation_type ERType object"
+msgstr "attribute definition"
+
+msgid "add ENFRDef constrained_by EConstraint subject"
+msgstr "constraint"
+
+msgid "add ENFRDef relation_type ERType object"
+msgstr "relation definition"
+
+msgid "add EProperty for_user EUser object"
+msgstr "property"
+
+msgid "add ERType add_permission RQLExpression subject"
+msgstr "rql expression for the add permission"
+
+msgid "add ERType delete_permission RQLExpression subject"
+msgstr "rql expression for the delete permission"
+
+msgid "add ERType read_permission RQLExpression subject"
+msgstr "rql expression for the read permission"
+
+msgid "add EUser in_group EGroup object"
+msgstr "user"
+
+msgid "add EUser use_email EmailAddress subject"
+msgstr "email address"
+
+msgid "add State allowed_transition Transition object"
+msgstr "incoming state"
+
+msgid "add State allowed_transition Transition subject"
+msgstr "allowed transition"
+
+msgid "add State state_of EEType object"
+msgstr "state"
+
+msgid "add Transition condition RQLExpression subject"
+msgstr "condition"
+
+msgid "add Transition destination_state State object"
+msgstr "incoming transition"
+
+msgid "add Transition destination_state State subject"
+msgstr "destination state"
+
+msgid "add Transition transition_of EEType object"
+msgstr "transition"
+
+msgid "add a Bookmark"
+msgstr "add a bookmark"
+
+msgid "add a Card"
+msgstr "add a card"
+
+msgid "add a EConstraint"
+msgstr "add a constraint"
+
+msgid "add a EConstraintType"
+msgstr "add a constraint type"
+
+msgid "add a EEType"
+msgstr "add an entity type"
+
+msgid "add a EFRDef"
+msgstr "add an attribute"
+
+msgid "add a EGroup"
+msgstr "add a group"
+
+msgid "add a ENFRDef"
+msgstr "add a relation"
+
+msgid "add a EPermission"
+msgstr "add a permission"
+
+msgid "add a EProperty"
+msgstr "add a property"
+
+msgid "add a ERType"
+msgstr "add a relation type"
+
+msgid "add a EUser"
+msgstr "add a user"
+
+msgid "add a EmailAddress"
+msgstr "add an email address"
+
+msgid "add a RQLExpression"
+msgstr "add a rql expression"
+
+msgid "add a State"
+msgstr "add a state"
+
+msgid "add a TrInfo"
+msgstr "add a transition information"
+
+msgid "add a Transition"
+msgstr "add a transition"
+
+msgid "add a new permission"
+msgstr ""
+
+msgid "add relation"
+msgstr ""
+
+msgid "add_perm"
+msgstr "add permission"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgid "add_permission"
+msgstr "can be added by"
+
+msgid "add_permission_object"
+msgstr "has permission to add"
+
+#, python-format
+msgid "added %(etype)s #%(eid)s (%(title)s)"
+msgstr ""
+
+#, python-format
+msgid ""
+"added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%"
+"(toeid)s"
+msgstr ""
+
+msgid "address"
+msgstr ""
+
+msgid "alias"
+msgstr ""
+
+msgid "allowed transition from this state"
+msgstr ""
+
+msgid "allowed transitions from this state"
+msgstr ""
+
+msgid "allowed_transition"
+msgstr "allowed transition"
+
+msgid "allowed_transition_object"
+msgstr "incoming states"
+
+msgid "am/pm calendar (month)"
+msgstr ""
+
+msgid "am/pm calendar (semester)"
+msgstr ""
+
+msgid "am/pm calendar (week)"
+msgstr ""
+
+msgid "am/pm calendar (year)"
+msgstr ""
+
+msgid "an abstract for this card"
+msgstr ""
+
+msgid "an electronic mail address associated to a short alias"
+msgstr ""
+
+msgid "an error occured"
+msgstr ""
+
+msgid "an error occured while processing your request"
+msgstr ""
+
+msgid "an error occured, the request cannot be fulfilled"
+msgstr ""
+
+msgid "and linked"
+msgstr ""
+
+msgid "anonymous"
+msgstr ""
+
+msgid "application entities"
+msgstr ""
+
+msgid "application schema"
+msgstr ""
+
+msgid "april"
+msgstr ""
+
+#, python-format
+msgid "at least one relation %s is required on %s(%s)"
+msgstr ""
+
+msgid "attribute"
+msgstr ""
+
+msgid "august"
+msgstr ""
+
+msgid "authentication failure"
+msgstr ""
+
+msgid "automatic"
+msgstr ""
+
+msgid "bad value"
+msgstr ""
+
+msgid "base url"
+msgstr ""
+
+msgid "bookmark has been removed"
+msgstr ""
+
+msgid "bookmark this page"
+msgstr ""
+
+msgid "bookmarked_by"
+msgstr "bookmarked by"
+
+msgid "bookmarked_by_object"
+msgstr "has bookmarks"
+
+msgid "bookmarks"
+msgstr ""
+
+msgid "boxes"
+msgstr ""
+
+msgid "boxes_bookmarks_box"
+msgstr "bookmarks box"
+
+msgid "boxes_bookmarks_box_description"
+msgstr "box listing the user's bookmarks"
+
+msgid "boxes_edit_box"
+msgstr "actions box"
+
+msgid "boxes_edit_box_description"
+msgstr "box listing the applicable actions on the displayed data"
+
+msgid "boxes_filter_box"
+msgstr "filter"
+
+msgid "boxes_filter_box_description"
+msgstr "box providing filter within current search results functionality"
+
+msgid "boxes_possible_views_box"
+msgstr "possible views box"
+
+msgid "boxes_possible_views_box_description"
+msgstr "box listing the possible views for the displayed data"
+
+msgid "boxes_rss"
+msgstr "rss box"
+
+msgid "boxes_rss_description"
+msgstr "RSS icon to get displayed data as a RSS thread"
+
+msgid "boxes_search_box"
+msgstr "search box"
+
+msgid "boxes_search_box_description"
+msgstr "search box"
+
+msgid "boxes_startup_views_box"
+msgstr "startup views box"
+
+msgid "boxes_startup_views_box_description"
+msgstr "box listing the possible start pages"
+
+msgid "bug report sent"
+msgstr ""
+
+msgid "button_apply"
+msgstr "apply"
+
+msgid "button_cancel"
+msgstr "cancel"
+
+msgid "button_delete"
+msgstr "delete"
+
+msgid "button_ok"
+msgstr "validate"
+
+msgid "button_reset"
+msgstr "reset"
+
+msgid "by"
+msgstr ""
+
+msgid "by relation"
+msgstr ""
+
+msgid "calendar"
+msgstr ""
+
+msgid "calendar (month)"
+msgstr ""
+
+msgid "calendar (semester)"
+msgstr ""
+
+msgid "calendar (week)"
+msgstr ""
+
+msgid "calendar (year)"
+msgstr ""
+
+#, python-format
+msgid "can't change the %s attribute"
+msgstr ""
+
+#, python-format
+msgid "can't connect to source %s, some data may be missing"
+msgstr ""
+
+#, python-format
+msgid "can't display data, unexpected error: %s"
+msgstr ""
+
+#, python-format
+msgid ""
+"can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has cardinality="
+"%(card)s"
+msgstr ""
+
+msgid "cancel select"
+msgstr ""
+
+msgid "cancel this insert"
+msgstr ""
+
+msgid "canonical"
+msgstr ""
+
+msgid "cardinality"
+msgstr ""
+
+#, python-format
+msgid "changed state of %(etype)s #%(eid)s (%(title)s)"
+msgstr ""
+
+msgid "changes applied"
+msgstr ""
+
+msgid "click on the box to cancel the deletion"
+msgstr ""
+
+msgid "comment"
+msgstr ""
+
+msgid "comment:"
+msgstr ""
+
+msgid "comment_format"
+msgstr "format"
+
+msgid "components"
+msgstr ""
+
+msgid "components_appliname"
+msgstr "application title"
+
+msgid "components_appliname_description"
+msgstr "display the application title in the page's header"
+
+msgid "components_applmessages"
+msgstr "application messages"
+
+msgid "components_applmessages_description"
+msgstr "display the application messages"
+
+msgid "components_breadcrumbs"
+msgstr "breadcrumbs"
+
+msgid "components_breadcrumbs_description"
+msgstr "breadcrumbs bar that display a path locating the page in the site"
+
+msgid "components_etypenavigation"
+msgstr "filtering by type"
+
+msgid "components_etypenavigation_description"
+msgstr "permit to filter search results by entity type"
+
+msgid "components_help"
+msgstr "help button"
+
+msgid "components_help_description"
+msgstr "the help button on the top right-hand corner"
+
+msgid "components_loggeduserlink"
+msgstr "user link"
+
+msgid "components_loggeduserlink_description"
+msgstr ""
+"for anonymous users, this is a link pointing to authentication form, for "
+"logged in users, this is a link that makes a box appear and listing some "
+"possible user actions"
+
+msgid "components_logo"
+msgstr "icon"
+
+msgid "components_logo_description"
+msgstr "the application's icon displayed in the page's header"
+
+msgid "components_navigation"
+msgstr "page navigation"
+
+msgid "components_navigation_description"
+msgstr "pagination component for large resultsets"
+
+msgid "components_rqlinput"
+msgstr "rql input box"
+
+msgid "components_rqlinput_description"
+msgstr "the rql box in the page's header"
+
+msgid "composite"
+msgstr ""
+
+msgid "condition"
+msgstr ""
+
+msgid "condition:"
+msgstr ""
+
+msgid "condition_object"
+msgstr "condition of"
+
+msgid "confirm password"
+msgstr ""
+
+msgid "constrained_by"
+msgstr "constrained by"
+
+msgid "constrained_by_object"
+msgstr "constraints"
+
+msgid "constraint factory"
+msgstr ""
+
+msgid "constraints"
+msgstr ""
+
+msgid "constraints applying on this relation"
+msgstr ""
+
+msgid "content"
+msgstr ""
+
+msgid "content_format"
+msgstr "content format"
+
+msgid "contentnavigation"
+msgstr "contextual components"
+
+msgid "contentnavigation_breadcrumbs"
+msgstr "breadcrumb"
+
+msgid "contentnavigation_breadcrumbs_description"
+msgstr "breadcrumbs bar that display a path locating the page in the site"
+
+msgid "contentnavigation_prevnext"
+msgstr "previous / next entity"
+
+msgid "contentnavigation_prevnext_description"
+msgstr ""
+"display link to go from one entity to another on entities implementing the "
+"\"previous/next\" interface."
+
+msgid "contentnavigation_seealso"
+msgstr "see also"
+
+msgid "contentnavigation_seealso_description"
+msgstr ""
+"section containing entities related by the \"see also\" relation on entities "
+"supporting it."
+
+msgid "contentnavigation_wfhistory"
+msgstr "workflow history"
+
+msgid "contentnavigation_wfhistory_description"
+msgstr "show the workflow's history."
+
+msgid "context"
+msgstr ""
+
+msgid "context where this box should be displayed"
+msgstr ""
+
+msgid "context where this component should be displayed"
+msgstr ""
+
+msgid "control subject entity's relations order"
+msgstr ""
+
+msgid "copy"
+msgstr ""
+
+msgid "copy edition"
+msgstr ""
+
+msgid ""
+"core relation giving to a group the permission to add an entity or relation "
+"type"
+msgstr ""
+
+msgid ""
+"core relation giving to a group the permission to delete an entity or "
+"relation type"
+msgstr ""
+
+msgid ""
+"core relation giving to a group the permission to read an entity or relation "
+"type"
+msgstr ""
+
+msgid "core relation giving to a group the permission to update an entity type"
+msgstr ""
+
+msgid "core relation indicating a user's groups"
+msgstr ""
+
+msgid ""
+"core relation indicating owners of an entity. This relation implicitly put "
+"the owner into the owners group for the entity"
+msgstr ""
+
+msgid "core relation indicating the original creator of an entity"
+msgstr ""
+
+msgid "core relation indicating the type of an entity"
+msgstr ""
+
+msgid "cost"
+msgstr ""
+
+msgid "could not connect to the SMTP server"
+msgstr ""
+
+msgid "create an index for quick search on this attribute"
+msgstr ""
+
+msgid "create an index page"
+msgstr ""
+
+msgid "created on"
+msgstr ""
+
+msgid "created_by"
+msgstr "created by"
+
+msgid "created_by_object"
+msgstr "has created"
+
+msgid "creating EConstraint (EFRDef %(linkto)s constrained_by EConstraint)"
+msgstr "creating constraint for attribute %(linkto)s"
+
+msgid "creating EConstraint (ENFRDef %(linkto)s constrained_by EConstraint)"
+msgstr "creating constraint for relation %(linkto)s"
+
+msgid "creating EFRDef (EFRDef relation_type ERType %(linkto)s)"
+msgstr "creating attribute %(linkto)s"
+
+msgid "creating ENFRDef (ENFRDef relation_type ERType %(linkto)s)"
+msgstr "creating relation %(linkto)s"
+
+msgid "creating EProperty (EProperty for_user EUser %(linkto)s)"
+msgstr "creating property for user %(linkto)s"
+
+msgid "creating EUser (EUser in_group EGroup %(linkto)s)"
+msgstr "creating a new user in group %(linkto)s"
+
+msgid "creating EmailAddress (EUser %(linkto)s use_email EmailAddress)"
+msgstr "creating email address for user %(linkto)s"
+
+msgid "creating RQLExpression (EEType %(linkto)s add_permission RQLExpression)"
+msgstr "creating rql expression for add permission on %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s delete_permission RQLExpression)"
+msgstr "creating rql expression for delete permission on %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s read_permission RQLExpression)"
+msgstr "creating rql expression for read permission on %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s update_permission RQLExpression)"
+msgstr "creating rql expression for update permission on %(linkto)s"
+
+msgid "creating RQLExpression (ERType %(linkto)s add_permission RQLExpression)"
+msgstr "creating rql expression for add permission on relations %(linkto)s"
+
+msgid ""
+"creating RQLExpression (ERType %(linkto)s delete_permission RQLExpression)"
+msgstr "creating rql expression for delete permission on relations %(linkto)s"
+
+msgid ""
+"creating RQLExpression (ERType %(linkto)s read_permission RQLExpression)"
+msgstr "creating rql expression for read permission on relations %(linkto)s"
+
+msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)"
+msgstr "creating rql expression for transition %(linkto)s"
+
+msgid "creating State (State allowed_transition Transition %(linkto)s)"
+msgstr "creating a state able to trigger transition %(linkto)s"
+
+msgid "creating State (State state_of EEType %(linkto)s)"
+msgstr "creating state for the %(linkto)s entity type"
+
+msgid "creating State (Transition %(linkto)s destination_state State)"
+msgstr "creating destination state for transition %(linkto)s"
+
+msgid "creating Transition (State %(linkto)s allowed_transition Transition)"
+msgstr "creating triggerable transition for state %(linkto)s"
+
+msgid "creating Transition (Transition destination_state State %(linkto)s)"
+msgstr "creating transition leading to state %(linkto)s"
+
+msgid "creating Transition (Transition transition_of EEType %(linkto)s)"
+msgstr "creating transition for the %(linkto)s entity type"
+
+msgid "creation"
+msgstr ""
+
+msgid "creation time of an entity"
+msgstr ""
+
+msgid "creation_date"
+msgstr "creation date"
+
+msgid "cstrtype"
+msgstr "constraint's type"
+
+msgid "cstrtype_object"
+msgstr "used by"
+
+msgid "csv entities export"
+msgstr ""
+
+msgid "csv export"
+msgstr ""
+
+msgid "data directory url"
+msgstr ""
+
+msgid "date"
+msgstr ""
+
+msgid "deactivate"
+msgstr ""
+
+msgid "deactivated"
+msgstr ""
+
+msgid "december"
+msgstr ""
+
+msgid "default"
+msgstr ""
+
+msgid "default text format for rich text fields."
+msgstr ""
+
+msgid "defaultval"
+msgstr "default value"
+
+msgid "define a CubicWeb user"
+msgstr ""
+
+msgid "define a CubicWeb users group"
+msgstr ""
+
+msgid ""
+"define a final relation: link a final relation type from a non final entity "
+"to a final entity type. used to build the application schema"
+msgstr ""
+
+msgid ""
+"define a non final relation: link a non final relation type from a non final "
+"entity to a non final entity type. used to build the application schema"
+msgstr ""
+
+msgid "define a relation type, used to build the application schema"
+msgstr ""
+
+msgid "define a rql expression used to define permissions"
+msgstr ""
+
+msgid "define a schema constraint"
+msgstr ""
+
+msgid "define a schema constraint type"
+msgstr ""
+
+msgid "define an entity type, used to build the application schema"
+msgstr ""
+
+msgid ""
+"defines what's the property is applied for. You must select this first to be "
+"able to set value"
+msgstr ""
+
+msgid "delete"
+msgstr ""
+
+msgid "delete this bookmark"
+msgstr ""
+
+msgid "delete this permission"
+msgstr ""
+
+msgid "delete this relation"
+msgstr ""
+
+msgid "delete_perm"
+msgstr "delete permission"
+
+msgid "delete_permission"
+msgstr "can be deleted by"
+
+msgid "delete_permission_object"
+msgstr "has permission to delete"
+
+#, python-format
+msgid "deleted %(etype)s #%(eid)s (%(title)s)"
+msgstr ""
+
+#, python-format
+msgid ""
+"deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%"
+"(toeid)s"
+msgstr ""
+
+msgid "depends on the constraint type"
+msgstr ""
+
+msgid "description"
+msgstr ""
+
+msgid "description_format"
+msgstr "format"
+
+msgid "destination state for this transition"
+msgstr ""
+
+msgid "destination state of a transition"
+msgstr ""
+
+msgid "destination_state"
+msgstr "destination state"
+
+msgid "destination_state_object"
+msgstr "destination of"
+
+msgid "detach attached file"
+msgstr ""
+
+msgid "detailed schema view"
+msgstr ""
+
+msgid "display order of the action"
+msgstr ""
+
+msgid "display order of the box"
+msgstr ""
+
+msgid "display order of the component"
+msgstr ""
+
+msgid "display the action or not"
+msgstr ""
+
+msgid "display the box or not"
+msgstr ""
+
+msgid "display the component or not"
+msgstr ""
+
+msgid ""
+"distinct label to distinguate between other permission entity of the same "
+"name"
+msgstr ""
+
+msgid "download"
+msgstr ""
+
+msgid "edit bookmarks"
+msgstr ""
+
+msgid "edit the index page"
+msgstr ""
+
+msgid "edition"
+msgstr ""
+
+msgid "eid"
+msgstr ""
+
+msgid "element copied"
+msgstr ""
+
+msgid "element created"
+msgstr ""
+
+msgid "element edited"
+msgstr ""
+
+msgid "email address to use for notification"
+msgstr ""
+
+msgid "emails successfully sent"
+msgstr ""
+
+msgid "embed"
+msgstr ""
+
+msgid "embedding this url is forbidden"
+msgstr ""
+
+msgid "entities deleted"
+msgstr ""
+
+msgid "entity deleted"
+msgstr ""
+
+msgid ""
+"entity type that may be used to construct some advanced security "
+"configuration"
+msgstr ""
+
+msgid "entity types which may use this state"
+msgstr ""
+
+msgid "entity types which may use this transition"
+msgstr ""
+
+msgid "error while embedding page"
+msgstr ""
+
+#, python-format
+msgid "error while handling __method: %s"
+msgstr "error while handling method %s"
+
+msgid "error while publishing ReST text"
+msgstr ""
+
+#, python-format
+msgid "error while querying source %s, some data may be missing"
+msgstr ""
+
+msgid "eta_date"
+msgstr ""
+
+msgid "expected:"
+msgstr ""
+
+msgid "expression"
+msgstr ""
+
+msgid "exprtype"
+msgstr "expression's type"
+
+msgid "external page"
+msgstr ""
+
+msgid "february"
+msgstr ""
+
+msgid "filter"
+msgstr ""
+
+msgid "final"
+msgstr ""
+
+msgid "firstname"
+msgstr ""
+
+msgid "follow"
+msgstr ""
+
+msgid "for_user"
+msgstr "for user"
+
+msgid "for_user_object"
+msgstr "use properties"
+
+msgid "friday"
+msgstr ""
+
+msgid "from"
+msgstr ""
+
+msgid "from_entity"
+msgstr "from entity"
+
+msgid "from_entity_object"
+msgstr "subjet relation"
+
+msgid "from_state"
+msgstr "from state"
+
+msgid "from_state_object"
+msgstr "transitions from this state"
+
+msgid "full text or RQL query"
+msgstr ""
+
+msgid "fulltext_container"
+msgstr ""
+
+msgid "fulltextindexed"
+msgstr "fulltext indexed"
+
+msgid "generic plot"
+msgstr ""
+
+msgid "go back to the index page"
+msgstr ""
+
+msgid "granted to groups"
+msgstr ""
+
+msgid "graphical representation of the application'schema"
+msgstr ""
+
+#, python-format
+msgid "graphical schema for %s"
+msgstr ""
+
+#, python-format
+msgid "graphical workflow for %s"
+msgstr ""
+
+msgid "group in which a user should be to be allowed to pass this transition"
+msgstr ""
+
+msgid "groups"
+msgstr ""
+
+msgid "groups allowed to add entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to update entities of this type"
+msgstr ""
+
+msgid "groups grant permissions to the user"
+msgstr ""
+
+msgid "groups to which the permission is granted"
+msgstr ""
+
+msgid "groups:"
+msgstr ""
+
+msgid "guests"
+msgstr ""
+
+msgid "hCalendar"
+msgstr ""
+
+msgid "has_text"
+msgstr "has text"
+
+msgid "help"
+msgstr ""
+
+msgid "hide filter form"
+msgstr ""
+
+msgid "hide meta-data"
+msgstr ""
+
+msgid "home"
+msgstr ""
+
+msgid ""
+"how to format date and time in the ui (\"man strftime\" for format "
+"description)"
+msgstr ""
+
+msgid "how to format date in the ui (\"man strftime\" for format description)"
+msgstr ""
+
+msgid "how to format float numbers in the ui"
+msgstr ""
+
+msgid "how to format time in the ui (\"man strftime\" for format description)"
+msgstr ""
+
+msgid "html class of the component"
+msgstr ""
+
+msgid "htmlclass"
+msgstr ""
+
+msgid "i18n_login_popup"
+msgstr "login"
+
+msgid "i18nprevnext_next"
+msgstr "next"
+
+msgid "i18nprevnext_previous"
+msgstr "previous"
+
+msgid "i18nprevnext_up"
+msgstr "up"
+
+msgid "iCalendar"
+msgstr ""
+
+msgid "id of main template used to render pages"
+msgstr ""
+
+msgid "identical_to"
+msgstr "identical to"
+
+msgid "identity"
+msgstr ""
+
+msgid "identity_object"
+msgstr "identity"
+
+msgid ""
+"if full text content of subject/object entity should be added to other side "
+"entity (the container)."
+msgstr ""
+
+msgid "image"
+msgstr ""
+
+msgid "in memory entity schema"
+msgstr ""
+
+msgid "in memory relation schema"
+msgstr ""
+
+msgid "in_group"
+msgstr "in group"
+
+msgid "in_group_object"
+msgstr "contains"
+
+msgid "in_state"
+msgstr "in state"
+
+msgid "in_state_object"
+msgstr "state of"
+
+msgid "incontext"
+msgstr ""
+
+#, python-format
+msgid "incorrect value (%r) for type \"%s\""
+msgstr ""
+
+msgid "index"
+msgstr ""
+
+msgid "index this attribute's value in the plain text index"
+msgstr ""
+
+msgid "indexed"
+msgstr ""
+
+msgid "indicate the current state of an entity"
+msgstr ""
+
+msgid ""
+"indicate which state should be used by default when an entity using states "
+"is created"
+msgstr ""
+
+#, python-format
+msgid "initial estimation %s"
+msgstr ""
+
+msgid "initial state for entities of this type"
+msgstr ""
+
+msgid "initial_state"
+msgstr "initial state"
+
+msgid "initial_state_object"
+msgstr "initial state of"
+
+msgid "inlined"
+msgstr ""
+
+msgid "internationalizable"
+msgstr ""
+
+#, python-format
+msgid "invalid action %r"
+msgstr ""
+
+msgid "invalid date"
+msgstr ""
+
+msgid "is"
+msgstr ""
+
+msgid "is it an application entity type or not ?"
+msgstr ""
+
+msgid "is it an application relation type or not ?"
+msgstr ""
+
+msgid ""
+"is the subject/object entity of the relation composed of the other ? This "
+"implies that when the composite is deleted, composants are also deleted."
+msgstr ""
+
+msgid "is this attribute's value translatable"
+msgstr ""
+
+msgid "is this relation equivalent in both direction ?"
+msgstr ""
+
+msgid ""
+"is this relation physically inlined? you should know what you're doing if "
+"you are changing this!"
+msgstr ""
+
+msgid "is_object"
+msgstr "has instances"
+
+msgid "january"
+msgstr ""
+
+msgid "july"
+msgstr ""
+
+msgid "june"
+msgstr ""
+
+msgid "label"
+msgstr ""
+
+msgid "language of the user interface"
+msgstr ""
+
+msgid "last connection date"
+msgstr ""
+
+msgid "last_login_time"
+msgstr "last login time"
+
+msgid "latest modification time of an entity"
+msgstr ""
+
+msgid "latest update on"
+msgstr ""
+
+msgid "left"
+msgstr ""
+
+msgid ""
+"link a property to the user which want this property customization. Unless "
+"you're a site manager, this relation will be handled automatically."
+msgstr ""
+
+msgid "link a relation definition to its object entity type"
+msgstr ""
+
+msgid "link a relation definition to its relation type"
+msgstr ""
+
+msgid "link a relation definition to its subject entity type"
+msgstr ""
+
+msgid "link a state to one or more entity type"
+msgstr ""
+
+msgid "link a transition information to its object"
+msgstr ""
+
+msgid "link a transition to one or more entity type"
+msgstr ""
+
+msgid "link to each item in"
+msgstr ""
+
+msgid "list"
+msgstr ""
+
+msgid "log in"
+msgstr ""
+
+msgid "login"
+msgstr ""
+
+msgid "login_action"
+msgstr "log in"
+
+msgid "logout"
+msgstr ""
+
+#, python-format
+msgid "loop in %s relation (%s)"
+msgstr ""
+
+msgid "main informations"
+msgstr ""
+
+msgid "mainvars"
+msgstr ""
+
+msgid "manage"
+msgstr ""
+
+msgid "manage bookmarks"
+msgstr ""
+
+msgid "manage security"
+msgstr ""
+
+msgid "managers"
+msgstr ""
+
+msgid "march"
+msgstr ""
+
+msgid "maximum number of characters in short description"
+msgstr ""
+
+msgid "maximum number of entities to display in related combo box"
+msgstr ""
+
+msgid "maximum number of objects displayed by page of results"
+msgstr ""
+
+msgid "maximum number of related entities to display in the primary view"
+msgstr ""
+
+msgid "may"
+msgstr ""
+
+msgid "meta"
+msgstr "meta relation"
+
+msgid "milestone"
+msgstr ""
+
+#, python-format
+msgid "missing parameters for entity %s"
+msgstr ""
+
+msgid "modification_date"
+msgstr "modification date"
+
+msgid "modify"
+msgstr ""
+
+msgid "monday"
+msgstr ""
+
+msgid "more actions"
+msgstr ""
+
+msgid "multiple edit"
+msgstr ""
+
+msgid "name"
+msgstr ""
+
+msgid ""
+"name of the main variables which should be used in the selection if "
+"necessary (comma separated)"
+msgstr ""
+
+msgid "name or identifier of the permission"
+msgstr ""
+
+msgid "navbottom"
+msgstr "page bottom"
+
+msgid "navcontentbottom"
+msgstr "page bottom, within main content"
+
+msgid "navcontenttop"
+msgstr "page top, within main content"
+
+msgid "navigation"
+msgstr ""
+
+msgid "navtop"
+msgstr "page top"
+
+msgid "new"
+msgstr ""
+
+msgid "next_results"
+msgstr "next results"
+
+msgid "no"
+msgstr ""
+
+msgid "no associated epermissions"
+msgstr ""
+
+msgid "no related project"
+msgstr ""
+
+msgid "no selected entities"
+msgstr ""
+
+#, python-format
+msgid "no such entity type %s"
+msgstr ""
+
+msgid "no version information"
+msgstr ""
+
+msgid "not authorized"
+msgstr ""
+
+msgid "not specified"
+msgstr ""
+
+msgid "not the initial state for this entity"
+msgstr ""
+
+msgid "nothing to edit"
+msgstr ""
+
+msgid "november"
+msgstr ""
+
+msgid "object"
+msgstr ""
+
+msgid "october"
+msgstr ""
+
+msgid "one month"
+msgstr ""
+
+msgid "one week"
+msgstr ""
+
+msgid "oneline"
+msgstr "one-line"
+
+msgid "only select queries are authorized"
+msgstr ""
+
+msgid "order"
+msgstr ""
+
+msgid "ordernum"
+msgstr "order"
+
+msgid "owned by"
+msgstr ""
+
+msgid "owned_by"
+msgstr "owned by"
+
+msgid "owned_by_object"
+msgstr "owns"
+
+msgid "owners"
+msgstr ""
+
+msgid "ownership"
+msgstr ""
+
+msgid "ownerships have been changed"
+msgstr ""
+
+msgid "pageid-not-found"
+msgstr ""
+"some necessary data seem expired, please reload the page and try again."
+
+msgid "password"
+msgstr ""
+
+msgid "password and confirmation don't match"
+msgstr ""
+
+msgid "path"
+msgstr ""
+
+msgid "permission"
+msgstr ""
+
+msgid "permissions for this entity"
+msgstr ""
+
+msgid "personnal informations"
+msgstr ""
+
+msgid "pick existing bookmarks"
+msgstr ""
+
+msgid "pkey"
+msgstr "key"
+
+msgid "please correct errors below"
+msgstr ""
+
+msgid "please correct the following errors:"
+msgstr ""
+
+msgid "possible views"
+msgstr ""
+
+msgid "preferences"
+msgstr ""
+
+msgid "previous_results"
+msgstr "previous results"
+
+msgid "primary"
+msgstr ""
+
+msgid "primary_email"
+msgstr "primary email"
+
+msgid "primary_email_object"
+msgstr "primary email of"
+
+msgid "progress"
+msgstr ""
+
+msgid "progress bar"
+msgstr ""
+
+msgid "project"
+msgstr ""
+
+msgid "read"
+msgstr ""
+
+msgid "read_perm"
+msgstr ""
+
+msgid "read_permission"
+msgstr "can be read by"
+
+msgid "read_permission_object"
+msgstr "has permission to delete"
+
+#, python-format
+msgid "relation %(relname)s of %(ent)s"
+msgstr ""
+
+msgid "relation_type"
+msgstr "relation type"
+
+msgid "relation_type_object"
+msgstr "relation definitions"
+
+msgid "relations deleted"
+msgstr ""
+
+msgid "relative url of the bookmarked page"
+msgstr ""
+
+msgid "remove this Bookmark"
+msgstr "remove this bookmark"
+
+msgid "remove this Card"
+msgstr "remove this card"
+
+msgid "remove this EConstraint"
+msgstr "remove this constraint"
+
+msgid "remove this EConstraintType"
+msgstr "remove this constraint type"
+
+msgid "remove this EEType"
+msgstr "remove this entity type"
+
+msgid "remove this EFRDef"
+msgstr "remove this attribute"
+
+msgid "remove this EGroup"
+msgstr "remove this group"
+
+msgid "remove this ENFRDef"
+msgstr "remove this relation"
+
+msgid "remove this EPermission"
+msgstr "remove this permission"
+
+msgid "remove this EProperty"
+msgstr "remove this property"
+
+msgid "remove this ERType"
+msgstr "remove this relation type"
+
+msgid "remove this EUser"
+msgstr "remove this user"
+
+msgid "remove this EmailAddress"
+msgstr "remove this email address"
+
+msgid "remove this RQLExpression"
+msgstr "remove this RQL expression"
+
+msgid "remove this State"
+msgstr "remove this state"
+
+msgid "remove this TrInfo"
+msgstr "remove this transition information"
+
+msgid "remove this Transition"
+msgstr "remove this transition"
+
+msgid "require_group"
+msgstr "require the group"
+
+msgid "require_group_object"
+msgstr "required by"
+
+msgid "required attribute"
+msgstr ""
+
+msgid "required field"
+msgstr ""
+
+msgid ""
+"restriction part of a rql query. For entity rql expression, X and U are "
+"predefined respectivly to the current object and to the request user. For "
+"relation rql expression, S, O and U are predefined respectivly to the "
+"current relation'subject, object and to the request user. "
+msgstr ""
+
+msgid "revert changes"
+msgstr ""
+
+msgid "right"
+msgstr ""
+
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to update entities of this type"
+msgstr ""
+
+msgid "rql expressions"
+msgstr ""
+
+msgid "rss"
+msgstr ""
+
+msgid "sample format"
+msgstr ""
+
+msgid "saturday"
+msgstr ""
+
+msgid "schema entities"
+msgstr ""
+
+msgid "schema's permissions definitions"
+msgstr ""
+
+msgid "search"
+msgstr ""
+
+msgid "searching for"
+msgstr ""
+
+msgid "secondary"
+msgstr ""
+
+msgid "security"
+msgstr ""
+
+msgid "see them all"
+msgstr ""
+
+msgid "select"
+msgstr ""
+
+msgid "select a"
+msgstr ""
+
+msgid "select a relation"
+msgstr ""
+
+msgid "select this entity"
+msgstr ""
+
+msgid "semantic description of this attribute"
+msgstr ""
+
+msgid "semantic description of this entity type"
+msgstr ""
+
+msgid "semantic description of this relation"
+msgstr ""
+
+msgid "semantic description of this relation type"
+msgstr ""
+
+msgid "semantic description of this state"
+msgstr ""
+
+msgid "semantic description of this transition"
+msgstr ""
+
+msgid "send email"
+msgstr ""
+
+msgid "september"
+msgstr ""
+
+msgid "server debug information"
+msgstr ""
+
+msgid "server information"
+msgstr ""
+
+msgid ""
+"should html fields being edited using fckeditor (a HTML WYSIWYG editor).  "
+"You should also select text/html as default text format to actually get "
+"fckeditor."
+msgstr ""
+
+#, python-format
+msgid "show %s results"
+msgstr ""
+
+msgid "show filter form"
+msgstr ""
+
+msgid "show meta-data"
+msgstr ""
+
+msgid "site configuration"
+msgstr ""
+
+msgid "site documentation"
+msgstr ""
+
+msgid "site schema"
+msgstr ""
+
+msgid "site title"
+msgstr ""
+
+msgid "site-wide property can't be set for user"
+msgstr ""
+
+msgid "sorry, the server is unable to handle this query"
+msgstr ""
+
+msgid "startup views"
+msgstr ""
+
+msgid "state"
+msgstr ""
+
+msgid "state_of"
+msgstr "state of"
+
+msgid "state_of_object"
+msgstr "use states"
+
+msgid "status change"
+msgstr ""
+
+msgid "status changed"
+msgstr ""
+
+#, python-format
+msgid "status will change from %s to %s"
+msgstr ""
+
+msgid "subject"
+msgstr ""
+
+msgid "subject/object cardinality"
+msgstr ""
+
+msgid "sunday"
+msgstr ""
+
+msgid "surname"
+msgstr ""
+
+msgid "symetric"
+msgstr ""
+
+msgid "synopsis"
+msgstr ""
+
+msgid "system entities"
+msgstr ""
+
+msgid "table"
+msgstr ""
+
+msgid "task progression"
+msgstr ""
+
+msgid "text"
+msgstr ""
+
+msgid "text/cubicweb-page-template"
+msgstr "cubicweb page template"
+
+msgid "text/html"
+msgstr "html"
+
+msgid "text/plain"
+msgstr "plain text"
+
+msgid "text/rest"
+msgstr "ReST text"
+
+msgid "the prefered email"
+msgstr ""
+
+#, python-format
+msgid "the value \"%s\" is already used, use another one"
+msgstr ""
+
+msgid "this action is not reversible!"
+msgstr ""
+
+msgid "this entity is currently owned by"
+msgstr ""
+
+msgid "this resource does not exist"
+msgstr ""
+
+msgid "thursday"
+msgstr ""
+
+msgid "timestamp of the latest source synchronization."
+msgstr ""
+
+msgid "timetable"
+msgstr ""
+
+msgid "title"
+msgstr ""
+
+msgid "to"
+msgstr ""
+
+msgid "to associate with"
+msgstr ""
+
+msgid "to_entity"
+msgstr "to entity"
+
+msgid "to_entity_object"
+msgstr "object relations"
+
+msgid "to_state"
+msgstr "to state"
+
+msgid "to_state_object"
+msgstr "transitions to this state"
+
+msgid "todo_by"
+msgstr "to do by"
+
+msgid "transition is not allowed"
+msgstr ""
+
+msgid "transition_of"
+msgstr "transition of"
+
+msgid "transition_of_object"
+msgstr "use transitions"
+
+msgid "tuesday"
+msgstr ""
+
+msgid "type"
+msgstr ""
+
+msgid "ui"
+msgstr ""
+
+msgid "unauthorized value"
+msgstr ""
+
+msgid "unique identifier used to connect to the application"
+msgstr ""
+
+msgid "unknown property key"
+msgstr ""
+
+msgid "upassword"
+msgstr "password"
+
+msgid "update"
+msgstr ""
+
+msgid "update_perm"
+msgstr "update permission"
+
+msgid "update_permission"
+msgstr "can be updated by"
+
+msgid "update_permission_object"
+msgstr "has permission to update"
+
+#, python-format
+msgid "updated %(etype)s #%(eid)s (%(title)s)"
+msgstr ""
+
+msgid "use template languages"
+msgstr ""
+
+msgid ""
+"use to define a transition from one or multiple states to a destination "
+"states in workflow's definitions."
+msgstr ""
+
+msgid "use_email"
+msgstr "use email"
+
+msgid "use_email_object"
+msgstr "used by"
+
+msgid "use_template_format"
+msgstr "use template format"
+
+msgid ""
+"used for cubicweb configuration. Once a property has been created you can't "
+"change the key."
+msgstr ""
+
+msgid ""
+"used to associate simple states to an entity type and/or to define workflows"
+msgstr ""
+
+msgid "used to grant a permission to a group"
+msgstr ""
+
+#, python-format
+msgid ""
+"user %s has made the following change(s):\n"
+"\n"
+msgstr ""
+
+msgid ""
+"user for which this property is applying. If this relation is not set, the "
+"property is considered as a global property"
+msgstr ""
+
+msgid "user interface encoding"
+msgstr ""
+
+msgid "user preferences"
+msgstr ""
+
+msgid "users"
+msgstr ""
+
+msgid "users using this bookmark"
+msgstr ""
+
+msgid "validate modifications on selected items"
+msgstr ""
+
+msgid "validating..."
+msgstr ""
+
+msgid "value"
+msgstr ""
+
+msgid "value associated to this key is not editable manually"
+msgstr ""
+
+msgid "vcard"
+msgstr ""
+
+msgid "view"
+msgstr ""
+
+msgid "view all"
+msgstr ""
+
+msgid "view detail for this entity"
+msgstr ""
+
+msgid "view workflow"
+msgstr ""
+
+msgid "views"
+msgstr ""
+
+msgid "visible"
+msgstr ""
+
+msgid "wednesday"
+msgstr ""
+
+msgid "week"
+msgstr ""
+
+#, python-format
+msgid "welcome %s !"
+msgstr ""
+
+msgid "wf_info_for"
+msgstr "record for"
+
+msgid "wf_info_for_object"
+msgstr "workflow history"
+
+msgid ""
+"when multiple addresses are equivalent (such as python-projects@logilab.org "
+"and python-projects@lists.logilab.org), set this to true on one of them "
+"which is the preferred form."
+msgstr ""
+
+msgid "wikiid"
+msgstr "wiki identifier"
+
+#, python-format
+msgid "workflow for %s"
+msgstr ""
+
+msgid "xbel"
+msgstr ""
+
+msgid "xml"
+msgstr ""
+
+msgid "yes"
+msgstr ""
+
+msgid "you have been logged out"
+msgstr ""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/i18n/entities.pot	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,26 @@
+msgid "__msg state changed"
+msgstr ""
+
+msgid "managers"
+msgstr ""
+
+msgid "users"
+msgstr ""
+
+msgid "guests"
+msgstr ""
+
+msgid "owners"
+msgstr ""
+
+msgid "read_perm"
+msgstr ""
+
+msgid "add_perm"
+msgstr ""
+
+msgid "update_perm"
+msgstr ""
+
+msgid "delete_perm"
+msgstr ""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/i18n/fr.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2793 @@
+# cubicweb i18n catalog
+# Copyright 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Logilab <contact@logilab.fr>
+msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb 2.46.0\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#, python-format
+msgid ""
+"\n"
+"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for "
+"entity\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+msgstr ""
+"\n"
+"%(user)s a changé l'état de <%(previous_state)s> vers <%(current_state)s> "
+"pour l'entité\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+
+#, python-format
+msgid "  from state %(fromstate)s to state %(tostate)s\n"
+msgstr "  de l'état %(fromstate)s vers l'état %(tostate)s\n"
+
+#, python-format
+msgid "%(firstname)s %(surname)s"
+msgstr "%(firstname)s %(surname)s"
+
+#, python-format
+msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+
+#, python-format
+msgid "%d months"
+msgstr "%d mois"
+
+#, python-format
+msgid "%d weeks"
+msgstr "%d semaines"
+
+#, python-format
+msgid "%d years"
+msgstr "%d années"
+
+#, python-format
+msgid "%s constraint failed for value %r"
+msgstr "contrainte %s n'est pas respectée par valeur %r"
+
+#, python-format
+msgid "%s days"
+msgstr "%d jours"
+
+#, python-format
+msgid "%s error report"
+msgstr "%s rapport d'erreur"
+
+#, python-format
+msgid "%s hours"
+msgstr "%s heures"
+
+#, python-format
+msgid "%s minutes"
+msgstr "%s minutes"
+
+#, python-format
+msgid "%s not estimated"
+msgstr "%s non estimé(s)"
+
+#, python-format
+msgid "%s results matching query"
+msgstr "%s résultats pour la requête"
+
+#, python-format
+msgid "%s seconds"
+msgstr "%s secondes"
+
+#, python-format
+msgid "%s software version of the database"
+msgstr "version logicielle de la base pour %s"
+
+#, python-format
+msgid "%s, or without time: %s"
+msgstr "%s, ou bien sans préciser d'heure: %s"
+
+msgid "**"
+msgstr "0..n 0..n"
+
+msgid "*+"
+msgstr "0..n 1..n"
+
+msgid "*1"
+msgstr "0..n 1"
+
+msgid "*?"
+msgstr "0..n 0..1"
+
+msgid "+*"
+msgstr "1..n 0..n"
+
+msgid "++"
+msgstr "1..n 1..n"
+
+msgid "+1"
+msgstr "1..n 1"
+
+msgid "+?"
+msgstr "1..n 0..1"
+
+msgid "1*"
+msgstr "1 0..n"
+
+msgid "1+"
+msgstr "1 1..n"
+
+msgid "11"
+msgstr "1 1"
+
+msgid "1?"
+msgstr "1 0..1"
+
+msgid "?*"
+msgstr "0..1 0..n"
+
+msgid "?+"
+msgstr "0..1 1..n"
+
+msgid "?1"
+msgstr "0..1 1"
+
+msgid "??"
+msgstr "0..1 0..1"
+
+msgid "AND"
+msgstr "ET"
+
+msgid "Any"
+msgstr "N'importe"
+
+msgid "Application"
+msgstr "Application"
+
+msgid "Bookmark"
+msgstr "Signet"
+
+msgid "Bookmark_plural"
+msgstr "Signets"
+
+msgid "Boolean"
+msgstr "Booléen"
+
+msgid "Boolean_plural"
+msgstr "Booléen"
+
+msgid "Browse by category"
+msgstr "Naviguer par catégorie"
+
+msgid "Bytes"
+msgstr "Donnée binaires"
+
+msgid "Bytes_plural"
+msgstr "Données binaires"
+
+msgid "Card"
+msgstr "Fiche"
+
+msgid "Card_plural"
+msgstr "Fiches"
+
+msgid "Date"
+msgstr "Date"
+
+msgid "Date_plural"
+msgstr "Dates"
+
+msgid "Datetime"
+msgstr "Date et heure"
+
+msgid "Datetime_plural"
+msgstr "Date et heure"
+
+#, python-format
+msgid "Debug level set to %s"
+msgstr "Niveau de debug mis à %s"
+
+msgid "Decimal"
+msgstr "Nombre décimal"
+
+msgid "Decimal_plural"
+msgstr "Nombres décimaux"
+
+msgid "Do you want to delete the following element(s) ?"
+msgstr "Voulez vous supprimer le(s) élément(s) suivant(s)"
+
+msgid "EConstraint"
+msgstr "Contrainte"
+
+msgid "EConstraintType"
+msgstr "Type de contrainte"
+
+msgid "EConstraintType_plural"
+msgstr "Types de contrainte"
+
+msgid "EConstraint_plural"
+msgstr "Contraintes"
+
+msgid "EEType"
+msgstr "Type d'entité"
+
+msgid "EEType_plural"
+msgstr "Types d'entité"
+
+msgid "EFRDef"
+msgstr "Attribut"
+
+msgid "EFRDef_plural"
+msgstr "Attributs"
+
+msgid "EGroup"
+msgstr "Groupe"
+
+msgid "EGroup_plural"
+msgstr "Groupes"
+
+msgid "ENFRDef"
+msgstr "Relation"
+
+msgid "ENFRDef_plural"
+msgstr "Relations"
+
+msgid "EPermission"
+msgstr "Permission"
+
+msgid "EPermission_plural"
+msgstr "Permissions"
+
+msgid "EProperty"
+msgstr "Propriété"
+
+msgid "EProperty_plural"
+msgstr "Propriétés"
+
+msgid "ERType"
+msgstr "Type de relation"
+
+msgid "ERType_plural"
+msgstr "Types de relation"
+
+msgid "EUser"
+msgstr "Utilisateur"
+
+msgid "EUser_plural"
+msgstr "Utilisateurs"
+
+msgid "Email body: "
+msgstr "Contenu du courriel : "
+
+msgid "EmailAddress"
+msgstr "Adresse électronique"
+
+msgid "EmailAddress_plural"
+msgstr "Adresses électroniques"
+
+msgid "Entities"
+msgstr "entités"
+
+msgid "Environment"
+msgstr "Environement"
+
+msgid "Float"
+msgstr "Nombre flottant"
+
+msgid "Float_plural"
+msgstr "Nombres flottants"
+
+msgid "From: "
+msgstr "De : "
+
+msgid "Int"
+msgstr "Nombre entier"
+
+msgid "Int_plural"
+msgstr "Nombres entiers"
+
+msgid "Interval"
+msgstr "Durée"
+
+msgid "Interval_plural"
+msgstr "Durées"
+
+msgid "New Bookmark"
+msgstr "Nouveau signet"
+
+msgid "New Card"
+msgstr "Nouvelle fiche"
+
+msgid "New EConstraint"
+msgstr "Nouvelle contrainte"
+
+msgid "New EConstraintType"
+msgstr "Nouveau type de contrainte"
+
+msgid "New EEType"
+msgstr "Nouveau type d'entité"
+
+msgid "New EFRDef"
+msgstr "Nouvelle définition de relation finale"
+
+msgid "New EGroup"
+msgstr "Nouveau groupe"
+
+msgid "New ENFRDef"
+msgstr "Nouvelle définition de relation non finale"
+
+msgid "New EPermission"
+msgstr "Nouvelle permission"
+
+msgid "New EProperty"
+msgstr "Nouvelle propriété"
+
+msgid "New ERType"
+msgstr "Nouveau type de relation"
+
+msgid "New EUser"
+msgstr "Nouvel utilisateur"
+
+msgid "New EmailAddress"
+msgstr "Nouvelle adresse électronique"
+
+msgid "New RQLExpression"
+msgstr "Nouvelle expression rql"
+
+msgid "New State"
+msgstr "Nouvel état"
+
+msgid "New TrInfo"
+msgstr "Nouvelle information de transition"
+
+msgid "New Transition"
+msgstr "Nouvelle transition"
+
+msgid "No query has been executed"
+msgstr "Aucune requête n'a été éxécuté"
+
+msgid "No result matching query"
+msgstr "aucun résultat"
+
+msgid "OR"
+msgstr "OU"
+
+msgid "Password"
+msgstr "Mot de passe"
+
+msgid "Password_plural"
+msgstr "Mots de passe"
+
+msgid "Please note that this is only a shallow copy"
+msgstr "Attention, cela n'effectue qu'une copie de surface"
+
+msgid "Problem occured"
+msgstr "Une erreur est survenue"
+
+msgid "RQLExpression"
+msgstr "Expression RQL"
+
+msgid "RQLExpression_plural"
+msgstr "Expressions RQL"
+
+msgid "Recipients: "
+msgstr "Destinataires : "
+
+msgid "Relations"
+msgstr "Relations"
+
+msgid "Request"
+msgstr "Requête"
+
+#, python-format
+msgid "Schema %s"
+msgstr "Schéma %s"
+
+msgid "Search for"
+msgstr "Rechercher"
+
+msgid "Server"
+msgstr "Serveur"
+
+msgid "Startup views"
+msgstr "Vues de départ"
+
+msgid "State"
+msgstr "État"
+
+msgid "State_plural"
+msgstr "États"
+
+msgid "String"
+msgstr "Chaîne de caractères"
+
+msgid "String_plural"
+msgstr "Chaînes de caractères"
+
+msgid "Subject: "
+msgstr "Sujet : "
+
+msgid "Submit bug report"
+msgstr "Soumettre un rapport de bug"
+
+msgid "Submit bug report by mail"
+msgstr "Soumettre ce rapport par email"
+
+msgid "The repository holds the following entities"
+msgstr "Le dépot contient les entités suivantes"
+
+#, python-format
+msgid "The view %s can not be applied to this query"
+msgstr "La vue %s ne peut être appliquée à cette requête"
+
+#, python-format
+msgid "The view %s could not be found"
+msgstr "La vue %s est introuvable"
+
+#, python-format
+msgid "This %s"
+msgstr "Ce %s"
+
+msgid "This Bookmark"
+msgstr "Ce signet"
+
+msgid "This Card"
+msgstr "Cette fiche"
+
+msgid "This EConstraint"
+msgstr "Cette contrainte"
+
+msgid "This EConstraintType"
+msgstr "Ce type de contrainte"
+
+msgid "This EEType"
+msgstr "Ce type d'entité"
+
+msgid "This EFRDef"
+msgstr "Cette définition de relation finale"
+
+msgid "This EGroup"
+msgstr "Ce groupe"
+
+msgid "This ENFRDef"
+msgstr "Cette définition de relation non finale"
+
+msgid "This EPermission"
+msgstr "Cette permission"
+
+msgid "This EProperty"
+msgstr "Cette propriété"
+
+msgid "This ERType"
+msgstr "Ce type de relation"
+
+msgid "This EUser"
+msgstr "Cet utilisateur"
+
+msgid "This EmailAddress"
+msgstr "Cette adresse électronique"
+
+msgid "This RQLExpression"
+msgstr "Cette expression RQL"
+
+msgid "This State"
+msgstr "Cet état"
+
+msgid "This TrInfo"
+msgstr "Cette information de transition"
+
+msgid "This Transition"
+msgstr "Cette transition"
+
+msgid "Time"
+msgstr "Heure"
+
+msgid "Time_plural"
+msgstr "Heures"
+
+msgid "TrInfo"
+msgstr "Information transition"
+
+msgid "TrInfo_plural"
+msgstr "Information transitions"
+
+msgid "Transition"
+msgstr "Transition"
+
+msgid "Transition_plural"
+msgstr "Transitions"
+
+#, python-format
+msgid "Unable to find anything named \"%s\" in the schema !"
+msgstr "Rien de nommé \"%s\" dans le schéma"
+
+msgid "Used by:"
+msgstr "Utilisé par :"
+
+msgid "What's new?"
+msgstr "Nouveautés"
+
+msgid "Workflow history"
+msgstr "Historique des changements d'état"
+
+msgid "You are not connected to an application !"
+msgstr "Vous n'êtes pas connecté à une application"
+
+#, python-format
+msgid "You are now connected to %s"
+msgstr "Vous êtes connecté à l'application %s"
+
+msgid "You can use any of the following substitutions in your text"
+msgstr ""
+"Vous pouvez utiliser n'importe quelle substitution parmi la liste suivante "
+"dans le contenu de votre courriel."
+
+msgid "You have no access to this view or it's not applyable to current data"
+msgstr "Vous n'avez pas accès à cette vue ou elle ne s'applique pas aux données"
+
+#, python-format
+msgid "[%s supervision] changes summary"
+msgstr "[%s supervision] description des changements"
+
+msgid "__msg state changed"
+msgstr "l'état a été changé"
+
+msgid ""
+"a RQL expression which should return some results, else the transition won't "
+"be available. This query may use X and U variables that will respectivly "
+"represents the current entity and the current user"
+msgstr ""
+"une expression RQL devant retourner des résultats pour que la transition "
+"puisse être passée. Cette expression peut utiliser les variables X et U qui "
+"représente respectivement l'entité à laquelle on veut appliquer la "
+"transition et l'utilisateur courant."
+
+msgid ""
+"a card is a textual content used as documentation, reference, procedure "
+"reminder"
+msgstr ""
+"une fiche est un texte utilisé comme documentation, référence, rappel de "
+"procédure..."
+
+msgid "about this site"
+msgstr "à propos de ce site"
+
+msgid "access type"
+msgstr "type d'accès"
+
+msgid "account state"
+msgstr "état du compte"
+
+msgid "action(s) on this selection"
+msgstr "action(s) sur cette sélection"
+
+msgid "actions"
+msgstr "actions"
+
+msgid "actions_addentity"
+msgstr "ajouter une entité de ce type"
+
+msgid "actions_addentity_description"
+msgstr ""
+
+msgid "actions_cancel"
+msgstr "annuler la sélection"
+
+msgid "actions_cancel_description"
+msgstr ""
+
+msgid "actions_copy"
+msgstr "copier"
+
+msgid "actions_copy_description"
+msgstr ""
+
+msgid "actions_delete"
+msgstr "supprimer"
+
+msgid "actions_delete_description"
+msgstr ""
+
+msgid "actions_edit"
+msgstr "modifier"
+
+msgid "actions_edit_description"
+msgstr ""
+
+msgid "actions_embed"
+msgstr "embarquer"
+
+msgid "actions_embed_description"
+msgstr ""
+
+msgid "actions_follow"
+msgstr "suivre"
+
+msgid "actions_follow_description"
+msgstr ""
+
+msgid "actions_logout"
+msgstr "se déconnecter"
+
+msgid "actions_logout_description"
+msgstr ""
+
+msgid "actions_manage"
+msgstr "gestion du site"
+
+msgid "actions_manage_description"
+msgstr ""
+
+msgid "actions_muledit"
+msgstr "édition multiple"
+
+msgid "actions_muledit_description"
+msgstr ""
+
+msgid "actions_myinfos"
+msgstr "informations personnelles"
+
+msgid "actions_myinfos_description"
+msgstr ""
+
+msgid "actions_myprefs"
+msgstr "préférences utilisateur"
+
+msgid "actions_myprefs_description"
+msgstr ""
+
+msgid "actions_schema"
+msgstr "voir le schéma"
+
+msgid "actions_schema_description"
+msgstr ""
+
+msgid "actions_select"
+msgstr "sélectionner"
+
+msgid "actions_select_description"
+msgstr ""
+
+msgid "actions_sendemail"
+msgstr "envoyer un email"
+
+msgid "actions_sendemail_description"
+msgstr ""
+
+msgid "actions_siteconfig"
+msgstr "configuration du site"
+
+msgid "actions_siteconfig_description"
+msgstr ""
+
+msgid "actions_view"
+msgstr "voir"
+
+msgid "actions_view_description"
+msgstr ""
+
+msgid "actions_workflow"
+msgstr "voir le workflow"
+
+msgid "actions_workflow_description"
+msgstr ""
+
+msgid "activate"
+msgstr "activer"
+
+msgid "activated"
+msgstr "activé"
+
+msgid "add"
+msgstr "ajouter"
+
+msgid "add EEType add_permission RQLExpression subject"
+msgstr "définir une expression RQL d'ajout"
+
+msgid "add EEType delete_permission RQLExpression subject"
+msgstr "définir une expression RQL de suppression"
+
+msgid "add EEType read_permission RQLExpression subject"
+msgstr "définir une expression RQL de lecture"
+
+msgid "add EEType update_permission RQLExpression subject"
+msgstr "définir une expression RQL de mise à jour"
+
+msgid "add EFRDef constrained_by EConstraint subject"
+msgstr "contrainte"
+
+msgid "add EFRDef relation_type ERType object"
+msgstr "définition d'attribut"
+
+msgid "add ENFRDef constrained_by EConstraint subject"
+msgstr "contrainte"
+
+msgid "add ENFRDef relation_type ERType object"
+msgstr "définition de relation"
+
+msgid "add EProperty for_user EUser object"
+msgstr "propriété"
+
+msgid "add ERType add_permission RQLExpression subject"
+msgstr "expression RQL d'ajout"
+
+msgid "add ERType delete_permission RQLExpression subject"
+msgstr "expression RQL de suppression"
+
+msgid "add ERType read_permission RQLExpression subject"
+msgstr "expression RQL de lecture"
+
+msgid "add EUser in_group EGroup object"
+msgstr "utilisateur"
+
+msgid "add EUser use_email EmailAddress subject"
+msgstr "ajouter une addresse email"
+
+msgid "add State allowed_transition Transition object"
+msgstr "ajouter un état en entrée"
+
+msgid "add State allowed_transition Transition subject"
+msgstr "ajouter une transition en sortie"
+
+msgid "add State state_of EEType object"
+msgstr "ajouter un état"
+
+msgid "add Transition condition RQLExpression subject"
+msgstr "ajouter une condition"
+
+msgid "add Transition destination_state State object"
+msgstr "ajouter une transition en entrée"
+
+msgid "add Transition destination_state State subject"
+msgstr "ajouter l'état de sortie"
+
+msgid "add Transition transition_of EEType object"
+msgstr "ajouter une transition"
+
+msgid "add a Bookmark"
+msgstr "ajouter un signet"
+
+msgid "add a Card"
+msgstr "ajouter une fiche"
+
+msgid "add a EConstraint"
+msgstr "ajouter une contrainte"
+
+msgid "add a EConstraintType"
+msgstr "ajouter un type de contrainte"
+
+msgid "add a EEType"
+msgstr "ajouter un type d'entité"
+
+msgid "add a EFRDef"
+msgstr "ajouter un type de relation"
+
+msgid "add a EGroup"
+msgstr "ajouter un groupe d'utilisateurs"
+
+msgid "add a ENFRDef"
+msgstr "ajouter une relation"
+
+msgid "add a EPermission"
+msgstr "ajouter une permission"
+
+msgid "add a EProperty"
+msgstr "ajouter une propriété"
+
+msgid "add a ERType"
+msgstr "ajouter un type de relation"
+
+msgid "add a EUser"
+msgstr "ajouter un utilisateur"
+
+msgid "add a EmailAddress"
+msgstr "ajouter une adresse email"
+
+msgid "add a RQLExpression"
+msgstr "ajouter une expression rql"
+
+msgid "add a State"
+msgstr "ajouter un état"
+
+msgid "add a TrInfo"
+msgstr "ajouter une information de transition"
+
+msgid "add a Transition"
+msgstr "ajouter une transition"
+
+msgid "add a new permission"
+msgstr "ajouter une permission"
+
+msgid "add relation"
+msgstr "ajouter une relation"
+
+msgid "add_perm"
+msgstr "ajout"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgid "add_permission"
+msgstr "permission d'ajouter"
+
+msgid "add_permission_object"
+msgstr "a la permission d'ajouter"
+
+#, python-format
+msgid "added %(etype)s #%(eid)s (%(title)s)"
+msgstr "ajout de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%"
+"(toeid)s"
+msgstr ""
+"ajout de la relation %(rtype)s de %(frometype)s #%(fromeid)s vers %(toetype)"
+"s #%(toeid)s"
+
+msgid "address"
+msgstr "adresse"
+
+msgid "alias"
+msgstr "alias"
+
+msgid "allowed transition from this state"
+msgstr "transition autorisée depuis cet état"
+
+msgid "allowed transitions from this state"
+msgstr "transitions autorisées depuis cet état"
+
+msgid "allowed_transition"
+msgstr "transition autorisée"
+
+msgid "allowed_transition_object"
+msgstr "états en entrée"
+
+msgid "am/pm calendar (month)"
+msgstr "calendrier am/pm (mois)"
+
+msgid "am/pm calendar (semester)"
+msgstr "calendrier am/pm (semestre)"
+
+msgid "am/pm calendar (week)"
+msgstr "calendrier am/pm (semaine)"
+
+msgid "am/pm calendar (year)"
+msgstr "calendrier am/pm (année)"
+
+msgid "an abstract for this card"
+msgstr "un résumé pour cette fiche"
+
+msgid "an electronic mail address associated to a short alias"
+msgstr "une addresse électronique associée à un alias"
+
+msgid "an error occured"
+msgstr "une erreur est survenue"
+
+msgid "an error occured while processing your request"
+msgstr "une erreur est survenue pendant le traitement de votre requête"
+
+msgid "an error occured, the request cannot be fulfilled"
+msgstr "une erreur est survenue, la requête ne peut être complétée"
+
+msgid "and linked"
+msgstr "et lié"
+
+msgid "anonymous"
+msgstr "anonyme"
+
+msgid "application entities"
+msgstr "entités applicatives"
+
+msgid "application schema"
+msgstr "Schéma de l'application"
+
+msgid "april"
+msgstr "avril"
+
+#, python-format
+msgid "at least one relation %s is required on %s(%s)"
+msgstr "au moins une relation %s est nécessaire sur %s(%s)"
+
+msgid "attribute"
+msgstr "attribut"
+
+msgid "august"
+msgstr "août"
+
+msgid "authentication failure"
+msgstr "Identifiant ou mot de passe incorrect"
+
+msgid "automatic"
+msgstr "automatique"
+
+msgid "bad value"
+msgstr "mauvaise valeur"
+
+msgid "base url"
+msgstr "url de base"
+
+msgid "bookmark has been removed"
+msgstr "le signet a été retiré"
+
+msgid "bookmark this page"
+msgstr "poser un signet ici"
+
+msgid "bookmarked_by"
+msgstr "utilisé par"
+
+msgid "bookmarked_by_object"
+msgstr "a pour signets"
+
+msgid "bookmarks"
+msgstr "signets"
+
+msgid "boxes"
+msgstr "boîtes"
+
+msgid "boxes_bookmarks_box"
+msgstr "boîte signets"
+
+msgid "boxes_bookmarks_box_description"
+msgstr "boîte contenant les signets de l'utilisateur"
+
+msgid "boxes_edit_box"
+msgstr "boîte d'actions"
+
+msgid "boxes_edit_box_description"
+msgstr ""
+"boîte affichant les différentes actions possibles sur les données affiches"
+
+msgid "boxes_filter_box"
+msgstr "filtrer"
+
+msgid "boxes_filter_box_description"
+msgstr "boîte permettant de filtrer parmi les résultats d'une recherche"
+
+msgid "boxes_possible_views_box"
+msgstr "boîte des vues possibles"
+
+msgid "boxes_possible_views_box_description"
+msgstr "boîte affichant les vues possibles pour les données courantes"
+
+msgid "boxes_rss"
+msgstr "icône RSS"
+
+msgid "boxes_rss_description"
+msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées"
+
+msgid "boxes_search_box"
+msgstr "boîte de recherche"
+
+msgid "boxes_search_box_description"
+msgstr "boîte avec un champ de recherche simple"
+
+msgid "boxes_startup_views_box"
+msgstr "boîte des vues de départs"
+
+msgid "boxes_startup_views_box_description"
+msgstr "boîte affichant les vues de départs de l'application"
+
+msgid "bug report sent"
+msgstr "rapport d'erreur envoyé"
+
+msgid "button_apply"
+msgstr "appliquer"
+
+msgid "button_cancel"
+msgstr "annuler"
+
+msgid "button_delete"
+msgstr "supprimer"
+
+msgid "button_ok"
+msgstr "valider"
+
+msgid "button_reset"
+msgstr "annuler les changements"
+
+msgid "by"
+msgstr "par"
+
+msgid "by relation"
+msgstr "via la relation"
+
+msgid "calendar"
+msgstr "afficher un calendrier"
+
+msgid "calendar (month)"
+msgstr "calendrier (mensuel)"
+
+msgid "calendar (semester)"
+msgstr "calendrier (semestriel)"
+
+msgid "calendar (week)"
+msgstr "calendrier (hebdo)"
+
+msgid "calendar (year)"
+msgstr "calendrier (annuel)"
+
+#, python-format
+msgid "can't change the %s attribute"
+msgstr "ne peut changer l'attribut %s"
+
+#, python-format
+msgid "can't connect to source %s, some data may be missing"
+msgstr "ne peut se connecter à la source %s, des données peuvent manquer"
+
+#, python-format
+msgid "can't display data, unexpected error: %s"
+msgstr "impossible d'afficher les données à cause de l'erreur suivante: %s"
+
+#, python-format
+msgid ""
+"can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has cardinality="
+"%(card)s"
+msgstr ""
+"ne peut mettre 'inlined' = %(inlined)s, %(stype)s %(rtype)s %(otype)s a pour "
+"cardinalité %(card)s"
+
+msgid "cancel select"
+msgstr "annuler la sélection"
+
+msgid "cancel this insert"
+msgstr "annuler cette insertion"
+
+msgid "canonical"
+msgstr "canonique"
+
+msgid "cardinality"
+msgstr "cardinalité"
+
+#, python-format
+msgid "changed state of %(etype)s #%(eid)s (%(title)s)"
+msgstr "changement de l'état de %(etype)s #%(eid)s (%(title)s)"
+
+msgid "changes applied"
+msgstr "changements appliqués"
+
+msgid "click on the box to cancel the deletion"
+msgstr "cliquer dans la zone d'édition pour annuler la suppression"
+
+msgid "comment"
+msgstr "commentaire"
+
+msgid "comment:"
+msgstr "commentaire :"
+
+msgid "comment_format"
+msgstr "format"
+
+msgid "components"
+msgstr "composants"
+
+msgid "components_appliname"
+msgstr "titre de l'application"
+
+msgid "components_appliname_description"
+msgstr "affiche le titre de l'application dans l'en-tête de page"
+
+msgid "components_applmessages"
+msgstr "messages applicatifs"
+
+msgid "components_applmessages_description"
+msgstr "affiche les messages applicatifs"
+
+msgid "components_breadcrumbs"
+msgstr "fil d'ariane"
+
+msgid "components_breadcrumbs_description"
+msgstr ""
+"affiche un chemin permettant de localiser la page courante dans le site"
+
+msgid "components_etypenavigation"
+msgstr "filtrage par type"
+
+msgid "components_etypenavigation_description"
+msgstr "permet de filtrer par type d'entité les résultats d'une recherche"
+
+msgid "components_help"
+msgstr "bouton aide"
+
+msgid "components_help_description"
+msgstr "le bouton d'aide, dans l'en-tête de page"
+
+msgid "components_loggeduserlink"
+msgstr "lien utilisateur"
+
+msgid "components_loggeduserlink_description"
+msgstr ""
+"affiche un lien vers le formulaire d'authentification pour les utilisateurs "
+"anonymes, sinon une boite contenant notamment des liens propres à "
+"l'utilisateur connectés"
+
+msgid "components_logo"
+msgstr "logo"
+
+msgid "components_logo_description"
+msgstr "le logo de l'application, dans l'en-tête de page"
+
+msgid "components_navigation"
+msgstr "navigation par page"
+
+msgid "components_navigation_description"
+msgstr ""
+"composant permettant de présenter sur plusieurs pages les requêtes renvoyant "
+"plus d'un certain nombre de résultat"
+
+msgid "components_rqlinput"
+msgstr "barre rql"
+
+msgid "components_rqlinput_description"
+msgstr "la barre de requête rql, dans l'en-tête de page"
+
+msgid "composite"
+msgstr "composite"
+
+msgid "condition"
+msgstr "condition"
+
+msgid "condition:"
+msgstr "condition :"
+
+msgid "condition_object"
+msgstr "condition de"
+
+msgid "confirm password"
+msgstr "confirmer le mot de passe"
+
+msgid "constrained_by"
+msgstr "contraint par"
+
+msgid "constrained_by_object"
+msgstr "contrainte de"
+
+msgid "constraint factory"
+msgstr "fabrique de contraintes"
+
+msgid "constraints"
+msgstr "contraintes"
+
+msgid "constraints applying on this relation"
+msgstr "contraintes s'appliquant à cette relation"
+
+msgid "content"
+msgstr "contenu"
+
+msgid "content_format"
+msgstr "format"
+
+msgid "contentnavigation"
+msgstr "composants contextuels"
+
+msgid "contentnavigation_breadcrumbs"
+msgstr "fil d'ariane"
+
+msgid "contentnavigation_breadcrumbs_description"
+msgstr ""
+"affiche un chemin permettant de localiser la page courante dans le site"
+
+msgid "contentnavigation_prevnext"
+msgstr "élément précedent / suivant"
+
+msgid "contentnavigation_prevnext_description"
+msgstr ""
+"affiche des liens permettant de passer d'une entité à une autre sur les "
+"entités implémentant l'interface \"précédent/suivant\"."
+
+msgid "contentnavigation_seealso"
+msgstr "voir aussi"
+
+msgid "contentnavigation_seealso_description"
+msgstr ""
+"section affichant les entités liées par la relation \"voir aussi\" si "
+"l'entité supporte cette relation."
+
+msgid "contentnavigation_wfhistory"
+msgstr "historique du workflow."
+
+msgid "contentnavigation_wfhistory_description"
+msgstr ""
+"section affichant l'historique du workflow pour les entités ayant un "
+"workflow."
+
+msgid "context"
+msgstr "contexte"
+
+msgid "context where this box should be displayed"
+msgstr "contexte dans lequel la boite devrait être affichée"
+
+msgid "context where this component should be displayed"
+msgstr "contexte où ce composant doit être affiché"
+
+msgid "control subject entity's relations order"
+msgstr "contrôle l'ordre des relations de l'entité sujet"
+
+msgid "copy"
+msgstr "copier"
+
+msgid "copy edition"
+msgstr "édition d'une copie"
+
+msgid ""
+"core relation giving to a group the permission to add an entity or relation "
+"type"
+msgstr ""
+"relation système donnant à un groupe la permission d'ajouter une entité ou "
+"une relation"
+
+msgid ""
+"core relation giving to a group the permission to delete an entity or "
+"relation type"
+msgstr ""
+"relation système donnant à un group la permission de supprimer une entité ou "
+"une relation"
+
+msgid ""
+"core relation giving to a group the permission to read an entity or relation "
+"type"
+msgstr ""
+"relation système donnant à un group la permission de lire une entité ou une "
+"relation"
+
+msgid "core relation giving to a group the permission to update an entity type"
+msgstr ""
+"relation système donnant à un groupe la permission de mettre à jour une "
+"entityé"
+
+msgid "core relation indicating a user's groups"
+msgstr ""
+"relation système indiquant les groupes auxquels appartient l'utilisateur"
+
+msgid ""
+"core relation indicating owners of an entity. This relation implicitly put "
+"the owner into the owners group for the entity"
+msgstr ""
+"relation système indiquant le(s) propriétaire(s) d'une entité. Cette "
+"relation place implicitement les utilisateurs liés dans le groupe des "
+"propriétaires pour cette entité"
+
+msgid "core relation indicating the original creator of an entity"
+msgstr "relation système indiquant le créateur d'une entité."
+
+msgid "core relation indicating the type of an entity"
+msgstr "relation système indiquant le type de l'entité"
+
+msgid "cost"
+msgstr "coût"
+
+msgid "could not connect to the SMTP server"
+msgstr "impossible de se connecter au serveur SMTP"
+
+msgid "create an index for quick search on this attribute"
+msgstr "créer un index pour accélérer les recherches sur cet attribut"
+
+msgid "create an index page"
+msgstr "créer une page d'accueil"
+
+msgid "created on"
+msgstr "créé le"
+
+msgid "created_by"
+msgstr "créé par"
+
+msgid "created_by_object"
+msgstr "a créé"
+
+msgid "creating EConstraint (EFRDef %(linkto)s constrained_by EConstraint)"
+msgstr "création contrainte pour l'attribut %(linkto)s"
+
+msgid "creating EConstraint (ENFRDef %(linkto)s constrained_by EConstraint)"
+msgstr "création contrainte pour la relation %(linkto)s"
+
+msgid "creating EFRDef (EFRDef relation_type ERType %(linkto)s)"
+msgstr "création attribut %(linkto)s"
+
+msgid "creating ENFRDef (ENFRDef relation_type ERType %(linkto)s)"
+msgstr "création relation %(linkto)s"
+
+msgid "creating EProperty (EProperty for_user EUser %(linkto)s)"
+msgstr "création d'une propriété pour l'utilisateur %(linkto)s"
+
+msgid "creating EUser (EUser in_group EGroup %(linkto)s)"
+msgstr "création d'un utilisateur à rajouter au groupe %(linkto)s"
+
+msgid "creating EmailAddress (EUser %(linkto)s use_email EmailAddress)"
+msgstr "création d'une adresse électronique pour l'utilisateur %(linkto)s"
+
+msgid "creating RQLExpression (EEType %(linkto)s add_permission RQLExpression)"
+msgstr "création d'une expression RQL pour la permission d'ajout de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s delete_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de suppression de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s read_permission RQLExpression)"
+msgstr "création d'une expression RQL pour la permission de lire %(linkto)s"
+
+msgid ""
+"creating RQLExpression (EEType %(linkto)s update_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de mise à jour de %(linkto)s"
+
+msgid "creating RQLExpression (ERType %(linkto)s add_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission d'ajout des relations %"
+"(linkto)s"
+
+msgid ""
+"creating RQLExpression (ERType %(linkto)s delete_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de suppression des "
+"relations %(linkto)s"
+
+msgid ""
+"creating RQLExpression (ERType %(linkto)s read_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de lire les relations %"
+"(linkto)s"
+
+msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)"
+msgstr "création d'une expression RQL pour la transition %(linkto)s"
+
+msgid "creating State (State allowed_transition Transition %(linkto)s)"
+msgstr "création d'un état pouvant aller vers la transition %(linkto)s"
+
+msgid "creating State (State state_of EEType %(linkto)s)"
+msgstr "création d'un état pour le type %(linkto)s"
+
+msgid "creating State (Transition %(linkto)s destination_state State)"
+msgstr "création d'un état destination de la transition %(linkto)s"
+
+msgid "creating Transition (State %(linkto)s allowed_transition Transition)"
+msgstr "création d'une transition autorisée depuis l'état %(linkto)s"
+
+msgid "creating Transition (Transition destination_state State %(linkto)s)"
+msgstr "création d'une transition vers l'état %(linkto)s"
+
+msgid "creating Transition (Transition transition_of EEType %(linkto)s)"
+msgstr "création d'une transition pour le type %(linkto)s"
+
+msgid "creation"
+msgstr "création"
+
+msgid "creation time of an entity"
+msgstr "date de création d'une entité"
+
+msgid "creation_date"
+msgstr "date de création"
+
+msgid "cstrtype"
+msgstr "type de constrainte"
+
+msgid "cstrtype_object"
+msgstr "utilisé par"
+
+msgid "csv entities export"
+msgstr "export d'entités en CSV"
+
+msgid "csv export"
+msgstr "export CSV"
+
+msgid "data directory url"
+msgstr "url du répertoire de données"
+
+msgid "date"
+msgstr "date"
+
+msgid "deactivate"
+msgstr "désactiver"
+
+msgid "deactivated"
+msgstr "désactivé"
+
+msgid "december"
+msgstr "décembre"
+
+msgid "default"
+msgstr "valeur par défaut"
+
+msgid "default text format for rich text fields."
+msgstr "format de texte par défaut pour les champs textes"
+
+msgid "defaultval"
+msgstr "valeur par défaut"
+
+msgid "define a CubicWeb user"
+msgstr "défini un utilisateur CubicWeb"
+
+msgid "define a CubicWeb users group"
+msgstr "défini un groupe d'utilisateur CubicWeb"
+
+msgid ""
+"define a final relation: link a final relation type from a non final entity "
+"to a final entity type. used to build the application schema"
+msgstr ""
+"définit une relation non finale: lie un type de relation non finaledepuis "
+"une entité vers un type d'entité non final. Utilisé pour construire le "
+"schéma de l'application"
+
+msgid ""
+"define a non final relation: link a non final relation type from a non final "
+"entity to a non final entity type. used to build the application schema"
+msgstr ""
+"définit une relation 'attribut', utilisé pour construire le schéma de "
+"l'application"
+
+msgid "define a relation type, used to build the application schema"
+msgstr ""
+"définit un type de relation, utilisé pour construire le schéma de "
+"l'application"
+
+msgid "define a rql expression used to define permissions"
+msgstr "RQL expression utilisée pour définir les droits d'accès"
+
+msgid "define a schema constraint"
+msgstr "définit une contrainte de schema"
+
+msgid "define a schema constraint type"
+msgstr "définit un type de contrainte de schema"
+
+msgid "define an entity type, used to build the application schema"
+msgstr ""
+"définit un type d'entité, utilisé pour construire le schéma de l'application"
+
+msgid ""
+"defines what's the property is applied for. You must select this first to be "
+"able to set value"
+msgstr ""
+"définit à quoi la propriété est appliquée. Vous devez sélectionner cela "
+"avant de pouvoir fixer une valeur"
+
+msgid "delete"
+msgstr "supprimer"
+
+msgid "delete this bookmark"
+msgstr "supprimer ce signet"
+
+msgid "delete this permission"
+msgstr "supprimer cette permission"
+
+msgid "delete this relation"
+msgstr "supprimer cette relation"
+
+msgid "delete_perm"
+msgstr "suppression"
+
+msgid "delete_permission"
+msgstr "permission de supprimer"
+
+msgid "delete_permission_object"
+msgstr "a la permission de supprimer"
+
+#, python-format
+msgid "deleted %(etype)s #%(eid)s (%(title)s)"
+msgstr "suppression de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%"
+"(toeid)s"
+msgstr ""
+"suppression de la relation %(rtype)s de %(frometype)s #%(fromeid)s vers %"
+"(toetype)s #%(toeid)s"
+
+msgid "depends on the constraint type"
+msgstr "dépend du type de contrainte"
+
+msgid "description"
+msgstr "description"
+
+msgid "description_format"
+msgstr "format"
+
+msgid "destination state for this transition"
+msgstr "états accessibles par cette transition"
+
+msgid "destination state of a transition"
+msgstr "état d'arrivée d'une transition"
+
+msgid "destination_state"
+msgstr "état de destination"
+
+msgid "destination_state_object"
+msgstr "destination de"
+
+msgid "detach attached file"
+msgstr "détacher le fichier existant"
+
+msgid "detailed schema view"
+msgstr "vue détaillée du schéma"
+
+msgid "display order of the action"
+msgstr "ordre d'affichage de l'action"
+
+msgid "display order of the box"
+msgstr "ordre d'affichage de la boîte"
+
+msgid "display order of the component"
+msgstr "ordre d'affichage du composant"
+
+msgid "display the action or not"
+msgstr "afficher l'action ou non"
+
+msgid "display the box or not"
+msgstr "afficher la boîte ou non"
+
+msgid "display the component or not"
+msgstr "afficher le composant ou non"
+
+msgid ""
+"distinct label to distinguate between other permission entity of the same "
+"name"
+msgstr ""
+"libellé permettant de distinguer cette permission des autres ayant le même "
+"nom"
+
+msgid "download"
+msgstr "télécharger"
+
+msgid "edit bookmarks"
+msgstr "éditer les signets"
+
+msgid "edit the index page"
+msgstr "éditer la page d'accueil"
+
+msgid "edition"
+msgstr "édition"
+
+msgid "eid"
+msgstr "eid"
+
+msgid "element copied"
+msgstr "élément copié"
+
+msgid "element created"
+msgstr "élément créé"
+
+msgid "element edited"
+msgstr "élément édité"
+
+msgid "email address to use for notification"
+msgstr "adresse email à utiliser pour la notification"
+
+msgid "emails successfully sent"
+msgstr "courriels envoyés avec succès"
+
+msgid "embed"
+msgstr "embarqué"
+
+msgid "embedding this url is forbidden"
+msgstr "l'inclusion de cette url est interdite"
+
+msgid "entities deleted"
+msgstr "entités supprimées"
+
+msgid "entity deleted"
+msgstr "entité supprimée"
+
+msgid ""
+"entity type that may be used to construct some advanced security "
+"configuration"
+msgstr ""
+"type d'entité à utiliser pour définir une configuration de sécurité avancée"
+
+msgid "entity types which may use this state"
+msgstr "type d'entités opuvant utiliser cet état"
+
+msgid "entity types which may use this transition"
+msgstr "entités qui peuvent utiliser cette transition"
+
+msgid "error while embedding page"
+msgstr "erreur pendant l'inclusion de la page"
+
+#, python-format
+msgid "error while handling __method: %s"
+msgstr "erreur survenue lors du traitement de formulaire (%s)"
+
+msgid "error while publishing ReST text"
+msgstr ""
+"une erreur s'est produite lors de l'interprétation du texte au format ReST"
+
+#, python-format
+msgid "error while querying source %s, some data may be missing"
+msgstr ""
+
+msgid "eta_date"
+msgstr "date de fin"
+
+msgid "expected:"
+msgstr "attendu :"
+
+msgid "expression"
+msgstr "expression"
+
+msgid "exprtype"
+msgstr "type de l'expression"
+
+msgid "external page"
+msgstr "page externe"
+
+msgid "february"
+msgstr "février"
+
+msgid "filter"
+msgstr "filtrer"
+
+msgid "final"
+msgstr "final"
+
+msgid "firstname"
+msgstr "prénom"
+
+msgid "follow"
+msgstr "suivre le lien"
+
+msgid "for_user"
+msgstr "pour l'utilisateur"
+
+msgid "for_user_object"
+msgstr "utilise les propriétés"
+
+msgid "friday"
+msgstr "vendredi"
+
+msgid "from"
+msgstr "de"
+
+msgid "from_entity"
+msgstr "de l'entité"
+
+msgid "from_entity_object"
+msgstr "relation sujet"
+
+msgid "from_state"
+msgstr "de l'état"
+
+msgid "from_state_object"
+msgstr "transitions depuis cet état"
+
+msgid "full text or RQL query"
+msgstr "texte à rechercher ou requête RQL"
+
+msgid "fulltext_container"
+msgstr "conteneur du texte indexé"
+
+msgid "fulltextindexed"
+msgstr "indexation du texte"
+
+msgid "generic plot"
+msgstr ""
+
+msgid "go back to the index page"
+msgstr "retourner sur la page d'accueil"
+
+msgid "granted to groups"
+msgstr "accordée aux groupes"
+
+msgid "graphical representation of the application'schema"
+msgstr "représentation graphique du schéma de l'application"
+
+#, python-format
+msgid "graphical schema for %s"
+msgstr "graphique du schéma pour %s"
+
+#, python-format
+msgid "graphical workflow for %s"
+msgstr "graphique du workflow pour %s"
+
+msgid "group in which a user should be to be allowed to pass this transition"
+msgstr ""
+"groupe dans lequel l'utilisateur doit être pour pouvoir passer la transition"
+
+msgid "groups"
+msgstr "groupes"
+
+msgid "groups allowed to add entities/relations of this type"
+msgstr "groupes autorisés à ajouter des entités/relations de ce type"
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr "groupes autorisés à supprimer des entités/relations de ce type"
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr "groupes autorisés à lire des entités/relations de ce type"
+
+msgid "groups allowed to update entities of this type"
+msgstr "groupes autorisés à mettre à jour les entités de ce type"
+
+msgid "groups grant permissions to the user"
+msgstr "les groupes donnent des permissions à l'utilisateur"
+
+msgid "groups to which the permission is granted"
+msgstr "groupes auquels cette permission est donnée"
+
+msgid "groups:"
+msgstr "groupes :"
+
+msgid "guests"
+msgstr "invités"
+
+msgid "hCalendar"
+msgstr "hCalendar"
+
+msgid "has_text"
+msgstr "contient le texte"
+
+msgid "help"
+msgstr "aide"
+
+msgid "hide filter form"
+msgstr "cacher le filtre"
+
+msgid "hide meta-data"
+msgstr "cacher les méta-données"
+
+msgid "home"
+msgstr "maison"
+
+msgid ""
+"how to format date and time in the ui (\"man strftime\" for format "
+"description)"
+msgstr ""
+"comment formater la date dans l'interface (\"man strftime\" pour la "
+"description du format)"
+
+msgid "how to format date in the ui (\"man strftime\" for format description)"
+msgstr ""
+"comment formater la date dans l'interface (\"man strftime\" pour la "
+"description du format)"
+
+msgid "how to format float numbers in the ui"
+msgstr "comment formater les nombres flottants dans l'interface"
+
+msgid "how to format time in the ui (\"man strftime\" for format description)"
+msgstr ""
+"comment formater l'heure dans l'interface (\"man strftime\" pour la "
+"description du format)"
+
+msgid "html class of the component"
+msgstr "classe HTML de ce composant"
+
+msgid "htmlclass"
+msgstr "classe html"
+
+msgid "i18n_login_popup"
+msgstr "s'authentifier"
+
+msgid "i18nprevnext_next"
+msgstr "suivant"
+
+msgid "i18nprevnext_previous"
+msgstr "précédent"
+
+msgid "i18nprevnext_up"
+msgstr "parent"
+
+msgid "iCalendar"
+msgstr "iCalendar"
+
+msgid "id of main template used to render pages"
+msgstr "id du template principal"
+
+msgid "identical_to"
+msgstr "identique à"
+
+msgid "identity"
+msgstr "est identique à"
+
+msgid "identity_object"
+msgstr "est identique à"
+
+msgid ""
+"if full text content of subject/object entity should be added to other side "
+"entity (the container)."
+msgstr ""
+"si le text indexé de l'entité sujet/objet doit être ajouté à l'entité à "
+"l'autre extrémité de la relation (le conteneur)."
+
+msgid "image"
+msgstr "image"
+
+msgid "in memory entity schema"
+msgstr "schéma de l'entité en mémoire"
+
+msgid "in memory relation schema"
+msgstr "schéma de la relation en mémoire"
+
+msgid "in_group"
+msgstr "dans le groupe"
+
+msgid "in_group_object"
+msgstr "membres"
+
+msgid "in_state"
+msgstr "état"
+
+msgid "in_state_object"
+msgstr "état de"
+
+msgid "incontext"
+msgstr "dans le contexte"
+
+#, python-format
+msgid "incorrect value (%(value)s) for type \"%(type)s\""
+msgstr "valeur %(value)s incorrecte pour le type \"%(type)s\""
+
+msgid "index"
+msgstr "index"
+
+msgid "index this attribute's value in the plain text index"
+msgstr "indexer la valeur de cet attribut dans l'index plein texte"
+
+msgid "indexed"
+msgstr "index"
+
+msgid "indicate the current state of an entity"
+msgstr "indique l'état courant d'une entité"
+
+msgid ""
+"indicate which state should be used by default when an entity using states "
+"is created"
+msgstr ""
+"indique quel état devrait être utilisé par défaut lorsqu'une entité est créée"
+
+#, python-format
+msgid "initial estimation %s"
+msgstr "estimation initiale %s"
+
+msgid "initial state for entities of this type"
+msgstr "état initial pour les entités de ce type"
+
+msgid "initial_state"
+msgstr "état initial"
+
+msgid "initial_state_object"
+msgstr "état initial de"
+
+msgid "inlined"
+msgstr "mise en ligne"
+
+msgid "internationalizable"
+msgstr "internationalisable"
+
+#, python-format
+msgid "invalid action %r"
+msgstr "action %r invalide"
+
+msgid "invalid date"
+msgstr "cette date n'est pas valide"
+
+msgid "is"
+msgstr "de type"
+
+msgid "is it an application entity type or not ?"
+msgstr "est-ce une entité applicative ou non ?"
+
+msgid "is it an application relation type or not ?"
+msgstr "est-ce une relation applicative ou non ?"
+
+msgid ""
+"is the subject/object entity of the relation composed of the other ? This "
+"implies that when the composite is deleted, composants are also deleted."
+msgstr ""
+"Est-ce que l'entité sujet/objet de la relation est une agrégation de "
+"l'autre ?Si c'est le cas, détruire le composite détruira ses composants "
+"également"
+
+msgid "is this attribute's value translatable"
+msgstr "est-ce que la valeur de cet attribut est traduisible ?"
+
+msgid "is this relation equivalent in both direction ?"
+msgstr "est que cette relation est équivalent dans les deux sens ?"
+
+msgid ""
+"is this relation physically inlined? you should know what you're doing if "
+"you are changing this!"
+msgstr ""
+"est ce que cette relation est mise en ligne dans la base de données ?vous "
+"devez savoir ce que vous faites si vous changez cela !"
+
+msgid "is_object"
+msgstr "a pour instance"
+
+msgid "january"
+msgstr "janvier"
+
+msgid "july"
+msgstr "juillet"
+
+msgid "june"
+msgstr "juin"
+
+msgid "label"
+msgstr "libellé"
+
+msgid "language of the user interface"
+msgstr "langue pour l'interface utilisateur"
+
+msgid "last connection date"
+msgstr "dernière date de connexion"
+
+msgid "last_login_time"
+msgstr "dernière date de connexion"
+
+msgid "latest modification time of an entity"
+msgstr "date de dernière modification d'une entité"
+
+msgid "latest update on"
+msgstr "dernière mise à jour"
+
+msgid "left"
+msgstr "gauche"
+
+msgid ""
+"link a property to the user which want this property customization. Unless "
+"you're a site manager, this relation will be handled automatically."
+msgstr ""
+"lie une propriété à l'utilisateur désirant cette personnalisation. A moins "
+"que vous ne soyez gestionnaire du site, cette relation est gérée "
+"automatiquement."
+
+msgid "link a relation definition to its object entity type"
+msgstr "lie une définition de relation à son type d'entité objet"
+
+msgid "link a relation definition to its relation type"
+msgstr "lie une définition de relation à son type d'entité"
+
+msgid "link a relation definition to its subject entity type"
+msgstr "lie une définition de relation à son type d'entité sujet"
+
+msgid "link a state to one or more entity type"
+msgstr "lier un état à une ou plusieurs entités"
+
+msgid "link a transition information to its object"
+msgstr "lié une enregistrement de transition vers l'objet associé"
+
+msgid "link a transition to one or more entity type"
+msgstr "lie une transition à un ou plusieurs types d'entités"
+
+msgid "link to each item in"
+msgstr "lier vers chaque élément dans"
+
+msgid "list"
+msgstr "liste"
+
+msgid "log in"
+msgstr "s'identifier"
+
+msgid "login"
+msgstr "identifiant"
+
+msgid "login_action"
+msgstr "identifiez vous"
+
+msgid "logout"
+msgstr "se déconnecter"
+
+#, python-format
+msgid "loop in %s relation (%s)"
+msgstr "boucle dans la relation %s (%s)"
+
+msgid "main informations"
+msgstr "Informations générales"
+
+msgid "mainvars"
+msgstr "variables principales"
+
+msgid "manage"
+msgstr "gestion du site"
+
+msgid "manage bookmarks"
+msgstr "gérer les signets"
+
+msgid "manage security"
+msgstr "gestion de la sécurité"
+
+msgid "managers"
+msgstr "administrateurs"
+
+msgid "march"
+msgstr "mars"
+
+msgid "maximum number of characters in short description"
+msgstr "nombre maximum de caractères dans les descriptions courtes"
+
+msgid "maximum number of entities to display in related combo box"
+msgstr "nombre maximum d'entités à afficher dans les listes déroulantes"
+
+msgid "maximum number of objects displayed by page of results"
+msgstr "nombre maximum d'entités affichées par pages"
+
+msgid "maximum number of related entities to display in the primary view"
+msgstr "nombre maximum d'entités liées à afficher dans la vue primaire"
+
+msgid "may"
+msgstr "mai"
+
+msgid "meta"
+msgstr "méta"
+
+msgid "milestone"
+msgstr "jalon"
+
+#, python-format
+msgid "missing parameters for entity %s"
+msgstr "paramètres manquants pour l'entité %s"
+
+msgid "modification_date"
+msgstr "date de modification"
+
+msgid "modify"
+msgstr "modifier"
+
+msgid "monday"
+msgstr "lundi"
+
+msgid "more actions"
+msgstr "plus d'actions"
+
+msgid "multiple edit"
+msgstr "édition multiple"
+
+msgid "name"
+msgstr "nom"
+
+msgid ""
+"name of the main variables which should be used in the selection if "
+"necessary (comma separated)"
+msgstr ""
+"nom des variables principaes qui devrait être utilisées dans la sélection si "
+"nécessaire (les séparer par des virgules)"
+
+msgid "name or identifier of the permission"
+msgstr "nom (identifiant) de la permission"
+
+msgid "navbottom"
+msgstr "bas de page"
+
+msgid "navcontentbottom"
+msgstr "bas de page du contenu principal"
+
+msgid "navcontenttop"
+msgstr "haut de page"
+
+msgid "navigation"
+msgstr "navigation"
+
+msgid "navtop"
+msgstr "haut de page du contenu principal"
+
+msgid "new"
+msgstr "nouveau"
+
+msgid "next_results"
+msgstr "résultats suivants"
+
+msgid "no"
+msgstr "non"
+
+msgid "no associated epermissions"
+msgstr "aucune permission spécifique n'est définie"
+
+msgid "no related project"
+msgstr "pas de projet rattaché"
+
+msgid "no selected entities"
+msgstr "pas d'entité sélectionnée"
+
+#, python-format
+msgid "no such entity type %s"
+msgstr "le type d'entité '%s' n'existe pas"
+
+msgid "no version information"
+msgstr "pas d'information de version"
+
+msgid "not authorized"
+msgstr "non autorisé"
+
+msgid "not specified"
+msgstr "non spécifié"
+
+msgid "not the initial state for this entity"
+msgstr "n'est pas l'état initial pour cette entité"
+
+msgid "nothing to edit"
+msgstr "rien à éditer"
+
+msgid "november"
+msgstr "novembre"
+
+msgid "object"
+msgstr "objet"
+
+msgid "october"
+msgstr "octobre"
+
+msgid "one month"
+msgstr "un mois"
+
+msgid "one week"
+msgstr "une semaine"
+
+msgid "oneline"
+msgstr "une ligne"
+
+msgid "only select queries are authorized"
+msgstr "seules les requêtes de sélections sont autorisées"
+
+msgid "order"
+msgstr "ordre"
+
+msgid "ordernum"
+msgstr "ordre"
+
+msgid "owned by"
+msgstr "appartient à"
+
+msgid "owned_by"
+msgstr "appartient à"
+
+msgid "owned_by_object"
+msgstr "possède"
+
+msgid "owners"
+msgstr "propriétaires"
+
+msgid "ownership"
+msgstr "propriété"
+
+msgid "ownerships have been changed"
+msgstr "les droits de propriété ont été modifiés"
+
+msgid "pageid-not-found"
+msgstr ""
+"des données nécessaires semblent expirées, veuillez recharger la page et "
+"recommencer."
+
+msgid "password"
+msgstr "mot de passe"
+
+msgid "password and confirmation don't match"
+msgstr "le mot de passe et la confirmation sont différents"
+
+msgid "path"
+msgstr "chemin"
+
+msgid "permission"
+msgstr "permission"
+
+msgid "permissions for this entity"
+msgstr "permissions pour cette entité"
+
+msgid "personnal informations"
+msgstr "informations personnelles"
+
+msgid "pick existing bookmarks"
+msgstr "récupérer des signets existants"
+
+msgid "pkey"
+msgstr "clé"
+
+msgid "please correct errors below"
+msgstr "veuillez corriger les erreurs ci-dessous"
+
+msgid "please correct the following errors:"
+msgstr "veuillez corriger les erreurs suivantes :"
+
+msgid "possible views"
+msgstr "vues possibles"
+
+msgid "preferences"
+msgstr "préférences"
+
+msgid "previous_results"
+msgstr "résultats précédents"
+
+msgid "primary"
+msgstr "primaire"
+
+msgid "primary_email"
+msgstr "adresse email principale"
+
+msgid "primary_email_object"
+msgstr "adresse email principale (object)"
+
+msgid "progress"
+msgstr "avancement"
+
+msgid "progress bar"
+msgstr "barre d'avancement"
+
+msgid "project"
+msgstr "projet"
+
+msgid "read"
+msgstr "lecture"
+
+msgid "read_perm"
+msgstr "lecture"
+
+msgid "read_permission"
+msgstr "permission de lire"
+
+msgid "read_permission_object"
+msgstr "a la permission de lire"
+
+#, python-format
+msgid "relation %(relname)s of %(ent)s"
+msgstr "relation %(relname)s de %(ent)s"
+
+msgid "relation_type"
+msgstr "type de relation"
+
+msgid "relation_type_object"
+msgstr "définition"
+
+msgid "relations deleted"
+msgstr "relations supprimées"
+
+msgid "relative url of the bookmarked page"
+msgstr "url relative de la page"
+
+msgid "remove this Bookmark"
+msgstr "supprimer ce signet"
+
+msgid "remove this Card"
+msgstr "supprimer cette fiche"
+
+msgid "remove this EConstraint"
+msgstr "supprimer cette contrainte"
+
+msgid "remove this EConstraintType"
+msgstr "supprimer ce type de contrainte"
+
+msgid "remove this EEType"
+msgstr "supprimer ce type d'entité"
+
+msgid "remove this EFRDef"
+msgstr "supprimer cet attribut"
+
+msgid "remove this EGroup"
+msgstr "supprimer ce groupe"
+
+msgid "remove this ENFRDef"
+msgstr "supprimer cette relation"
+
+msgid "remove this EPermission"
+msgstr "supprimer cette permission"
+
+msgid "remove this EProperty"
+msgstr "supprimer cette propriété"
+
+msgid "remove this ERType"
+msgstr "supprimer cette définition de relation"
+
+msgid "remove this EUser"
+msgstr "supprimer cet utilisateur"
+
+msgid "remove this EmailAddress"
+msgstr "supprimer cette adresse email"
+
+msgid "remove this RQLExpression"
+msgstr "supprimer cette expression rql"
+
+msgid "remove this State"
+msgstr "supprimer cet état"
+
+msgid "remove this TrInfo"
+msgstr "retirer cette information de transition"
+
+msgid "remove this Transition"
+msgstr "supprimer cette transition"
+
+msgid "require_group"
+msgstr "nécessite le groupe"
+
+msgid "require_group_object"
+msgstr "à les droits"
+
+msgid "required attribute"
+msgstr "attribut requis"
+
+msgid "required field"
+msgstr "champ requis"
+
+msgid ""
+"restriction part of a rql query. For entity rql expression, X and U are "
+"predefined respectivly to the current object and to the request user. For "
+"relation rql expression, S, O and U are predefined respectivly to the "
+"current relation'subject, object and to the request user. "
+msgstr ""
+"partie restriction de la requête rql. Pour une expression s'appliquant à une "
+"entité, X et U sont respectivement préféfinis à l'entité et à l'utilisateur "
+"courant. Pour une expression s'appliquant à une relation, S, O et U sont "
+"respectivement préféfinis au sujet/objet de la relation et à l'utilisateur "
+"courant."
+
+msgid "revert changes"
+msgstr "annuler les changements"
+
+msgid "right"
+msgstr "droite"
+
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr ""
+"expression RQL donnant le droit d'ajouter des entités/relations de ce type"
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr ""
+"expression RQL donnant le droit de supprimer des entités/relations de ce type"
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr ""
+"expression RQL donnant le droit de lire des entités/relations de ce type"
+
+msgid "rql expression allowing to update entities of this type"
+msgstr ""
+"expression RQL donnant le droit de modifier des entités/relations de ce type"
+
+msgid "rql expressions"
+msgstr "conditions rql"
+
+msgid "rss"
+msgstr "RSS"
+
+msgid "sample format"
+msgstr "exemple"
+
+msgid "saturday"
+msgstr "samedi"
+
+msgid "schema entities"
+msgstr "entités définissant le schéma"
+
+msgid "schema's permissions definitions"
+msgstr "permissions définies dans le schéma"
+
+msgid "search"
+msgstr "rechercher"
+
+msgid "searching for"
+msgstr "Recherche de"
+
+msgid "secondary"
+msgstr "secondaire"
+
+msgid "security"
+msgstr "sécurité"
+
+msgid "see them all"
+msgstr "les voir toutes"
+
+msgid "select"
+msgstr "sélectionner"
+
+msgid "select a"
+msgstr "sélectionner un"
+
+msgid "select a relation"
+msgstr "sélectionner une relation"
+
+msgid "select this entity"
+msgstr "sélectionner cette entité"
+
+msgid "semantic description of this attribute"
+msgstr "description sémantique de cet attribut"
+
+msgid "semantic description of this entity type"
+msgstr "description sémantique de ce type d'entité"
+
+msgid "semantic description of this relation"
+msgstr "description sémantique de cette relation"
+
+msgid "semantic description of this relation type"
+msgstr "description sémantique de ce type de relation"
+
+msgid "semantic description of this state"
+msgstr "description sémantique de cet état"
+
+msgid "semantic description of this transition"
+msgstr "description sémantique de cette transition"
+
+msgid "send email"
+msgstr "envoyer un courriel"
+
+msgid "september"
+msgstr "septembre"
+
+msgid "server debug information"
+msgstr "informations de déboguage serveur"
+
+msgid "server information"
+msgstr "informations serveur"
+
+msgid ""
+"should html fields being edited using fckeditor (a HTML WYSIWYG editor).  "
+"You should also select text/html as default text format to actually get "
+"fckeditor."
+msgstr ""
+"indique si les champs HTML doivent être éditer avec fckeditor (un\n"
+"éditer HTML WYSIWYG). Il est également conseill'de choisir text/html\n"
+"comme format de texte par défaut pour pouvoir utiliser fckeditor."
+
+#, python-format
+msgid "show %s results"
+msgstr "montrer %s résultats"
+
+msgid "show filter form"
+msgstr "afficher le filtre"
+
+msgid "show meta-data"
+msgstr "afficher les méta-données"
+
+msgid "site configuration"
+msgstr "configuration du site"
+
+msgid "site documentation"
+msgstr "documentation du site"
+
+msgid "site schema"
+msgstr "schéma du site"
+
+msgid "site title"
+msgstr "titre du site"
+
+msgid "site-wide property can't be set for user"
+msgstr "une propriété spécifique au site ne peut être propre à un utilisateur"
+
+msgid "sorry, the server is unable to handle this query"
+msgstr "désolé, le serveur ne peut traiter cette requête"
+
+msgid "startup views"
+msgstr "vues de départ"
+
+msgid "state"
+msgstr "état"
+
+msgid "state_of"
+msgstr "état de"
+
+msgid "state_of_object"
+msgstr "a pour état"
+
+msgid "status change"
+msgstr "changer l'état"
+
+msgid "status changed"
+msgstr "changement d'état"
+
+#, python-format
+msgid "status will change from %s to %s"
+msgstr "l'état va passer de %s à %s"
+
+msgid "subject"
+msgstr "sujet"
+
+msgid "subject/object cardinality"
+msgstr "cardinalité sujet/objet"
+
+msgid "sunday"
+msgstr "dimanche"
+
+msgid "surname"
+msgstr "nom"
+
+msgid "symetric"
+msgstr "symétrique"
+
+msgid "synopsis"
+msgstr "synopsis"
+
+msgid "system entities"
+msgstr "entités systèmes"
+
+msgid "table"
+msgstr "table"
+
+msgid "task progression"
+msgstr "avancement de la tâche"
+
+msgid "text"
+msgstr "text"
+
+msgid "text/cubicweb-page-template"
+msgstr "contenu dynamique"
+
+msgid "text/html"
+msgstr "html"
+
+msgid "text/plain"
+msgstr "texte pur"
+
+msgid "text/rest"
+msgstr "ReST"
+
+msgid "the prefered email"
+msgstr "l'adresse électronique principale"
+
+#, python-format
+msgid "the value \"%s\" is already used, use another one"
+msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur"
+
+msgid "this action is not reversible!"
+msgstr ""
+"Attention ! Cette opération va détruire les données de façon irréversible."
+
+msgid "this entity is currently owned by"
+msgstr "cette entité appartient à"
+
+msgid "this resource does not exist"
+msgstr "cette ressource est introuvable"
+
+msgid "thursday"
+msgstr "jeudi"
+
+msgid "timestamp of the latest source synchronization."
+msgstr "date de la dernière synchronisation avec la source."
+
+msgid "timetable"
+msgstr "emploi du temps"
+
+msgid "title"
+msgstr "titre"
+
+msgid "to"
+msgstr "à"
+
+msgid "to associate with"
+msgstr "pour associer à"
+
+msgid "to_entity"
+msgstr "vers l'entité"
+
+msgid "to_entity_object"
+msgstr "relation objet"
+
+msgid "to_state"
+msgstr "vers l'état"
+
+msgid "to_state_object"
+msgstr "transitions vers cette état"
+
+msgid "todo_by"
+msgstr "à faire par"
+
+msgid "transition is not allowed"
+msgstr "transition non permise"
+
+msgid "transition_of"
+msgstr "transition de"
+
+msgid "transition_of_object"
+msgstr "a pour transition"
+
+msgid "tuesday"
+msgstr "mardi"
+
+msgid "type"
+msgstr "type"
+
+msgid "ui"
+msgstr "propriétés génériques de l'interface"
+
+msgid "unauthorized value"
+msgstr "valeur non autorisée"
+
+msgid "unique identifier used to connect to the application"
+msgstr "identifiant unique utilisé pour se connecter à l'application"
+
+msgid "unknown property key"
+msgstr "clé de propriété inconnue"
+
+msgid "upassword"
+msgstr "mot de passe"
+
+msgid "update"
+msgstr "modification"
+
+msgid "update_perm"
+msgstr "modification"
+
+msgid "update_permission"
+msgstr "permission de modification"
+
+msgid "update_permission_object"
+msgstr "à la permission de modifier"
+
+#, python-format
+msgid "updated %(etype)s #%(eid)s (%(title)s)"
+msgstr "modification de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+msgid "use template languages"
+msgstr "utiliser les langages de template"
+
+msgid ""
+"use to define a transition from one or multiple states to a destination "
+"states in workflow's definitions."
+msgstr ""
+"utiliser dans une définition de processus pour ajouter une transition depuis "
+"un ou plusieurs états vers un état de destination."
+
+msgid "use_email"
+msgstr "adresse électronique"
+
+msgid "use_email_object"
+msgstr "adresse utilisée par"
+
+msgid "use_template_format"
+msgstr "utilisation du format 'cubicweb template'"
+
+msgid ""
+"used for cubicweb configuration. Once a property has been created you can't "
+"change the key."
+msgstr ""
+"utilisé pour la configuration de l'application. Une fois qu'une propriété a "
+"été créée, vous ne pouvez plus changez la clé associée"
+
+msgid ""
+"used to associate simple states to an entity type and/or to define workflows"
+msgstr "associe les états à un type d'entité pour définir un workflow"
+
+msgid "used to grant a permission to a group"
+msgstr "utiliser pour donner une permission à un groupe"
+
+#, python-format
+msgid ""
+"user %s has made the following change(s):\n"
+"\n"
+msgstr ""
+"l'utilisateur %s a effectué le(s) changement(s) suivant(s):\n"
+"\n"
+
+msgid ""
+"user for which this property is applying. If this relation is not set, the "
+"property is considered as a global property"
+msgstr ""
+"utilisateur a qui s'applique cette propriété. Si cette relation n'est pas "
+"spécifiée la propriété est considérée comme globale."
+
+msgid "user interface encoding"
+msgstr "encodage utilisé dans l'interface utilisateur"
+
+msgid "user preferences"
+msgstr "préférences utilisateur"
+
+msgid "users"
+msgstr "utilisateurs"
+
+msgid "users using this bookmark"
+msgstr "utilisateurs utilisant ce signet"
+
+msgid "validate modifications on selected items"
+msgstr "valider les modifications apportées aux éléments sélectionnés"
+
+msgid "validating..."
+msgstr "chargement en cours ..."
+
+msgid "value"
+msgstr "valeur"
+
+msgid "value associated to this key is not editable manually"
+msgstr "la valeur associée à cette clé n'est pas éditable manuellement"
+
+msgid "vcard"
+msgstr "vcard"
+
+msgid "view"
+msgstr "voir"
+
+msgid "view all"
+msgstr "voir tous"
+
+msgid "view detail for this entity"
+msgstr "voir les détails de cette entité"
+
+msgid "view workflow"
+msgstr "voir les états possibles"
+
+msgid "views"
+msgstr "vues"
+
+msgid "visible"
+msgstr "visible"
+
+msgid "wednesday"
+msgstr "mercredi"
+
+msgid "week"
+msgstr "sem."
+
+#, python-format
+msgid "welcome %s !"
+msgstr "bienvenue %s !"
+
+msgid "wf_info_for"
+msgstr "historique de"
+
+msgid "wf_info_for_object"
+msgstr "historique des transitions"
+
+msgid ""
+"when multiple addresses are equivalent (such as python-projects@logilab.org "
+"and python-projects@lists.logilab.org), set this to true on one of them "
+"which is the preferred form."
+msgstr ""
+"quand plusieurs adresses sont équivalentes (comme python-projects@logilab."
+"org et python-projects@lists.logilab.org), mettez cette propriété à vrai sur "
+"l'une d'entre-elle qui sera la forme canonique"
+
+msgid "wikiid"
+msgstr "identifiant wiki"
+
+#, python-format
+msgid "workflow for %s"
+msgstr "workflow pour %s"
+
+msgid "xbel"
+msgstr "xbel"
+
+msgid "xml"
+msgstr "xml"
+
+msgid "yes"
+msgstr "oui"
+
+msgid "you have been logged out"
+msgstr "vous avez été déconnecté"
+
+#~ msgid "%s constraint failed"
+#~ msgstr "La contrainte %s n'est pas satisfaite"
+
+#~ msgid "Loading"
+#~ msgstr "chargement"
+
+#~ msgid "Problem occured while setting new value"
+#~ msgstr "Un problème est survenu lors de la mise à jour"
+
+#~ msgid "and"
+#~ msgstr "et"
+
+#~ msgid "cancel edition"
+#~ msgstr "annuler l'édition"
+
+#~ msgid ""
+#~ "default language (look at the i18n directory of the application to see "
+#~ "available languages)"
+#~ msgstr ""
+#~ "langue par défaut (regarder le répertoire i18n de l'application pour voir "
+#~ "les langues disponibles)"
+
+#~ msgid "footer"
+#~ msgstr "pied de page"
+
+#~ msgid "header"
+#~ msgstr "en-tête de page"
+
+#~ msgid "iCal"
+#~ msgstr "iCal"
+
+#~ msgid "incorrect value for type \"%s\""
+#~ msgstr "valeur incorrecte pour le type \"%s\""
+
+#~ msgid "linked"
+#~ msgstr "lié"
+
+#~ msgid ""
+#~ "maximum number of related entities to display in in the restriction view"
+#~ msgstr ""
+#~ "nombre maximum d'entités liées à afficher dans la vue de restriction"
+
+#~ msgid "see also"
+#~ msgstr "voir aussi"
+
+#~ msgid "workflow history"
+#~ msgstr "historique du workflow"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/interfaces.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,234 @@
+"""Specific views for entities implementing IDownloadable
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.interface import Interface
+
+class IEmailable(Interface):
+    """interface for emailable entities"""
+    
+    def get_email(self):
+        """return email address"""
+
+    @classmethod
+    def allowed_massmail_keys(cls):
+        """returns a set of allowed email substitution keys
+
+        The default is to return the entity's attribute list but an
+        entity class might override this method to allow extra keys.
+        For instance, the Person class might want to return a `companyname`
+        key.
+        """
+
+    def as_email_context(self):
+        """returns the dictionary as used by the sendmail controller to
+        build email bodies.
+        
+        NOTE: the dictionary keys should match the list returned by the
+        `allowed_massmail_keys` method.
+        """
+
+
+class IWorkflowable(Interface):
+    """interface for entities dealing with a specific workflow"""
+
+    @property
+    def state(self):
+        """return current state"""
+
+    def change_state(self, stateeid, trcomment=None, trcommentformat=None):
+        """change the entity's state according to a state defined in given
+        parameters
+        """
+    
+    def can_pass_transition(self, trname):
+        """return true if the current user can pass the transition with the
+        given name
+        """
+    
+    def latest_trinfo(self):
+        """return the latest transition information for this entity
+        """
+
+class IProgress(Interface):
+    """something that has a cost, a state and a progression
+
+    Take a look at cubicweb.common.mixins.ProgressMixIn for some
+    default implementations
+    """
+
+    @property
+    def cost(self):
+        """the total cost"""
+
+    @property
+    def done(self):
+        """what is already done"""
+
+    @property
+    def todo(self):
+        """what remains to be done"""
+    
+    def progress_info(self):
+        """returns a dictionary describing progress/estimated cost of the
+        version.
+
+        mandatory keys are (''estimated', 'done', 'todo')
+        optional keys are ('notestimated', 'notestimatedcorrected',
+                           'estimatedcorrected')
+       'noestimated' and 'notestimatedcorrected' should default to 0
+       'estimatedcorrected' should default to 'estimated'
+       """
+
+    def finished(self):
+        """returns True if status is finished"""
+
+    def in_progress(self):
+        """returns True if status is not finished"""
+
+    def progress(self):
+        """returns the % progress of the task item"""
+        
+    
+class IMileStone(IProgress):
+    """represents an ITask's item"""
+    
+    parent_type = None # specify main task's type
+    
+    def get_main_task(self):
+        """returns the main ITask entity"""
+
+    def initial_prevision_date(self):
+        """returns the initial expected end of the milestone"""
+        
+    def eta_date(self):
+        """returns expected date of completion based on what remains
+        to be done
+        """
+
+    def completion_date(self):
+        """returns date on which the subtask has been completed"""
+
+    def contractors(self):
+        """returns the list of persons supposed to work on this task"""
+
+
+class ITree(Interface):
+
+    def parent(self):
+        """returns the parent entity"""
+
+    def children(self):
+        """returns the item's children"""
+
+    def __iter__(self):
+        """iterates over the item's children"""
+        
+    def is_leaf(self):
+        """returns true if this node as no child"""
+
+    def is_root(self):
+        """returns true if this node has no parent"""
+
+    def root(self):
+        """return the root object"""
+
+
+## web specific interfaces ####################################################
+
+
+class IPrevNext(Interface):
+    """interface for entities which can be linked to a previous and/or next
+    entity
+    """
+    
+    def next_entity(self):
+        """return the 'next' entity"""
+    def previous_entity(self):
+        """return the 'previous' entity"""
+
+
+class IBreadCrumbs(Interface):
+    """interface for entities which can be "located" on some path"""
+    
+    def breadcrumbs(self, view, recurs=False):
+        """return a list containing some:
+        
+        * tuple (url, label)
+        * entity
+        * simple label string
+
+        defining path from a root to the current view
+
+        the main view is given as argument so breadcrumbs may vary according
+        to displayed view (may be None). When recursing on a parent entity,
+        the `recurs` argument should be set to True.
+        """
+
+
+class IDownloadable(Interface):
+    """interface for downloadable entities"""
+    
+    def download_url(self): # XXX not really part of this interface
+        """return an url to download entity's content"""
+    def download_content_type(self):
+        """return MIME type of the downloadable content"""
+    def download_encoding(self):
+        """return encoding of the downloadable content"""
+    def download_file_name(self):
+        """return file name of the downloadable content"""
+    def download_data(self):
+        """return actual data of the downloadable content"""
+
+
+class IEmbedable(Interface):
+    """interface for embedable entities"""
+    
+    def embeded_url(self):
+        """embed action interface"""
+    
+class ICalendarable(Interface):
+    """interface for itms that do have a begin date 'start' and an end
+date 'stop'"""    
+    
+class ICalendarViews(Interface):
+    """calendar views interface"""
+    def matching_dates(self, begin, end):
+        """
+        :param begin: day considered as begin of the range (`DateTime`)
+        :param end: day considered as end of the range (`DateTime`)
+        
+        :return:
+          a list of dates (`DateTime`) in the range [`begin`, `end`] on which
+          this entity apply
+        """
+        
+class ITimetableViews(Interface):
+    """timetable views interface"""
+    def timetable_date(self):
+        """XXX explain
+        
+        :return: date (`DateTime`)
+        """
+
+class IGeocodable(Interface):
+    """interface required by geocoding views such as gmap-view"""
+
+    @property
+    def latitude(self):
+        """returns the latitude of the entity"""
+
+    @property
+    def longitude(self):
+        """returns the longitude of the entity"""
+
+    def marker_icon(self):
+        """returns the icon that should be used as the marker
+        (returns None for default)
+        """
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/man/cubicweb-ctl.1	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+.TH cubicweb\-ctl 1 "2008-01-14" cubicweb\-ctl
+.SH NAME
+.B cubicweb\-ctl
+\- cubicweb control utility
+
+
+.SH SYNOPSIS
+.B  cubicweb-ctl <command>
+[
+.I OPTIONS
+] [
+.I <command arguments>
+]
+
+.SH DESCRIPTION
+.PP
+CubicWeb utility to create and control instances, and more generally do all
+cubicweb's related tasks you can have to do using the command line. Available
+commands will depends on which part of the framework are installed.
+.PP
+Type
+.B "cubicweb\-ctl <command> \fB\-\-help\fR"
+for more information about a specific command.
+
+.SH COPYRIGHT 
+Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/md5crypt.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,116 @@
+#########################################################
+# md5crypt.py
+#
+# 0423.2000 by michal wallace http://www.sabren.com/
+# based on perl's Crypt::PasswdMD5 by Luis Munoz (lem@cantv.net)
+# based on /usr/src/libcrypt/crypt.c from FreeBSD 2.2.5-RELEASE
+#
+# MANY THANKS TO
+#
+#  Carey Evans - http://home.clear.net.nz/pages/c.evans/
+#  Dennis Marti - http://users.starpower.net/marti1/
+#
+#  For the patches that got this thing working!
+#
+# modification by logilab:
+# * remove usage of the string module
+# * don't include the magic string in the output string
+#   for true crypt.crypt compatibility
+#########################################################
+"""md5crypt.py - Provides interoperable MD5-based crypt() function
+
+SYNOPSIS
+
+        import md5crypt.py
+
+        cryptedpassword = md5crypt.md5crypt(password, salt);
+
+DESCRIPTION
+
+unix_md5_crypt() provides a crypt()-compatible interface to the
+rather new MD5-based crypt() function found in modern operating systems.
+It's based on the implementation found on FreeBSD 2.2.[56]-RELEASE and
+contains the following license in it:
+
+ "THE BEER-WARE LICENSE" (Revision 42):
+ <phk@login.dknet.dk> wrote this file.  As long as you retain this notice you
+ can do whatever you want with this stuff. If we meet some day, and you think
+ this stuff is worth it, you can buy me a beer in return.   Poul-Henning Kamp
+"""
+
+MAGIC = '$1$'                        # Magic string
+ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+
+import md5
+
+def to64 (v, n):
+    ret = ''
+    while (n - 1 >= 0):
+        n = n - 1
+        ret = ret + ITOA64[v & 0x3f]
+        v = v >> 6
+    return ret
+
+
+def crypt(pw, salt, magic=None):
+    if magic==None:
+        magic = MAGIC
+    # Take care of the magic string if present
+    if salt[:len(magic)] == magic:
+        salt = salt[len(magic):]
+    # salt can have up to 8 characters:
+    salt = salt.split('$', 1)[0]
+    salt = salt[:8]
+    ctx = pw + magic + salt
+    final = md5.md5(pw + salt + pw).digest()
+    for pl in range(len(pw),0,-16):
+        if pl > 16:
+            ctx = ctx + final[:16]
+        else:
+            ctx = ctx + final[:pl]
+    # Now the 'weird' xform (??)
+    i = len(pw)
+    while i:
+        if i & 1:
+            ctx = ctx + chr(0)  #if ($i & 1) { $ctx->add(pack("C", 0)); }
+        else:
+            ctx = ctx + pw[0]
+        i = i >> 1
+    final = md5.md5(ctx).digest()
+    # The following is supposed to make
+    # things run slower. 
+    # my question: WTF???
+    for i in range(1000):
+        ctx1 = ''
+        if i & 1:
+            ctx1 = ctx1 + pw
+        else:
+            ctx1 = ctx1 + final[:16]
+        if i % 3:
+            ctx1 = ctx1 + salt
+        if i % 7:
+            ctx1 = ctx1 + pw
+        if i & 1:
+            ctx1 = ctx1 + final[:16]
+        else:
+            ctx1 = ctx1 + pw
+        final = md5.md5(ctx1).digest()
+    # Final xform
+    passwd = ''
+    passwd = passwd + to64((int(ord(final[0])) << 16)
+                           |(int(ord(final[6])) << 8)
+                           |(int(ord(final[12]))),4)
+    passwd = passwd + to64((int(ord(final[1])) << 16)
+                           |(int(ord(final[7])) << 8)
+                           |(int(ord(final[13]))), 4)
+    passwd = passwd + to64((int(ord(final[2])) << 16)
+                           |(int(ord(final[8])) << 8)
+                           |(int(ord(final[14]))), 4)
+    passwd = passwd + to64((int(ord(final[3])) << 16)
+                           |(int(ord(final[9])) << 8)
+                           |(int(ord(final[15]))), 4)
+    passwd = passwd + to64((int(ord(final[4])) << 16)
+                           |(int(ord(final[10])) << 8)
+                           |(int(ord(final[5]))), 4)
+    passwd = passwd + to64((int(ord(final[11]))), 2)
+    return salt + '$' + passwd
Binary file misc/cwdesklets/gfx/bg.png has changed
Binary file misc/cwdesklets/gfx/border-left.png has changed
Binary file misc/cwdesklets/gfx/logo_cw.png has changed
Binary file misc/cwdesklets/gfx/rss.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwdesklets/rql_query.display	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<display window-flags="sticky, below" width="185" height="200">
+
+	<sensor id="r" module="rqlsensor"/>
+        <sensor id="FS" module="FontSelector,1,Sans 10  black"/>
+	
+	<!-- left border -->
+	<group x="0" width="5" height="200" bg-uri="gfx/border-left.png"/>
+	
+	<image x="10" y="0" uri="gfx/logo_erudi.png"/>
+
+        <array id="results" x="10" y="30"
+               watch="layout=r:layout, length=r:length">
+	  <label id="lbls1" color="black"
+		 on-enter="r:enter-line"
+		 on-leave="r:leave-line"  
+		 on-click="r:click-line"
+  		 watch="value=r:result, font=FS:font0, color=r:resultbg"/>
+        </array>
+
+</display>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwdesklets/rqlsensor/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,103 @@
+import webbrowser
+reload(webbrowser)
+
+from sensor.Sensor import Sensor
+from utils import datatypes, i18n
+
+from cubicweb.dbapi import connect
+
+_ = str
+
+class RQLSensor(Sensor):
+
+    def __init__(self, *args):
+        global _; _ = i18n.Translator("rql-desklet")
+        Sensor.__init__(self)
+        # define configuration
+        self._set_config_type("appid", datatypes.TYPE_STRING, "")
+        self._set_config_type("user", datatypes.TYPE_STRING, "")
+        self._set_config_type("passwd", datatypes.TYPE_SECRET_STRING, "")
+        self._set_config_type("rql", datatypes.TYPE_STRING, "")
+        self._set_config_type("url", datatypes.TYPE_STRING, "")
+        self._set_config_type("delay", datatypes.TYPE_STRING, "600")
+        # default timer
+        self._add_timer(20, self.__update)
+
+    def get_configurator(self):
+        configurator = self._new_configurator()
+        configurator.set_name(_("RQL"))
+        configurator.add_title(_("CubicWeb source settings"))
+        configurator.add_entry(_("ID",), "appid", _("The application id of this source"))
+        configurator.add_entry(_("User",), "user", _("The user to connect to this source"))
+        configurator.add_entry(_("Password",), "passwd", _("The user's password to connect to this source"))
+        configurator.add_entry(_("URL",), "url", _("The url of the web interface for this source"))
+        configurator.add_entry(_("RQL",), "rql", _("The rql query"))
+        configurator.add_entry(_("Update interval",), "delay", _("Delay in seconds between updates"))
+        return configurator
+
+
+    def call_action(self, action, path, args=[]):
+        index = path[-1]
+        output = self._new_output()
+#        import sys
+#        print >>sys.stderr, action, path, args
+        if action=="enter-line":
+            # change background
+            output.set('resultbg[%s]' % index, 'yellow')
+        elif action=="leave-line":
+            # change background
+            output.set('resultbg[%s]' % index, 'black')
+        elif action=="click-line":
+            # open url
+            output.set('resultbg[%s]' % index, 'black')
+            webbrowser.open(self._urls[index])
+        self._send_output(output)
+        
+    def __get_connection(self):
+        try:
+            return self._v_cnx
+        except AttributeError:
+            appid, user, passwd = self._get_config("appid"), self._get_config("user"), self._get_config("passwd")
+            cnx = connect(database=appid, user=user, password=passwd)
+            self._v_cnx = cnx
+            return cnx
+
+    def __run_query(self, output):
+        base = self._get_config('url')
+        rql = self._get_config('rql')
+        cnx = self.__get_connection()
+        cursor = cnx.cursor()
+        try:
+            rset = cursor.execute(rql)
+        except:
+            del self._v_cnx
+            raise
+        self._urls = []
+        output.set('layout', 'vertical, 14')        
+        output.set('length', rset.rowcount)        
+        i = 0
+        for line in rset:
+            output.set('result[%s]' % i, ', '.join([str(v) for v in line[1:]]))
+            output.set('resultbg[%s]' % i, 'black')
+            try:
+                self._urls.append(base % 'Any X WHERE X eid %s' % line[0])
+            except:
+                self._urls.append('')
+            i += 1
+    
+    def __update(self):
+        output = self._new_output()
+        try:
+            self.__run_query(output)
+        except Exception, ex:
+            import traceback
+            traceback.print_exc()
+            output.set('layout', 'vertical, 10')        
+            output.set('length', 1)        
+            output.set('result[0]', str(ex))
+        self._send_output(output)
+        self._add_timer(int(self._get_config('delay'))*1000, self.__update)
+
+        
+def new_sensor(args):
+    return RQLSensor(*args)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwdesklets/web_query.display	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<display window-flags="sticky, below" bg-color="#FFFFFF00">
+    <meta author="Sylvain Thénault" name="web_query" version="1.0" category="net/www">
+    
+    <!-- Sensors -->
+    <sensor id="w" module="webquery"/>
+
+    <!-- gfx -->
+    <image id="top_left_corner" uri="gfx/top_left_corner.png"/>
+    <image id="top_right_corner" relative-to="desklet, x" anchor="sw" uri="gfx/top_right_corner.png"/>
+    <image id="bottom_left_corner" relative-to="desklet, y" anchor="ne" uri="gfx/bottom_left_corner.png"/>
+    <image id="bottom_right_corner" relative-to="desklet, xy" anchor="nw" uri="gfx/bottom_right_corner.png"/>
+    <group relative-to="top_left_corner, x" height="10" bg-uri="gfx/top_edge.png" watch="width=s:border_width"/>
+    <group relative-to="bottom_left_corner, x" height="20" bg-uri="gfx/bottom_edge.png" watch="width=s:border_width"/>
+    <group relative-to="top_left_corner, y" width="14" bg-uri="gfx/left_edge.png" watch="height=s:border_height"/>
+    <group relative-to="top_right_corner, y" width="15" bg-uri="gfx/right_edge.png" watch="height=s:border_height"/>
+
+    <!-- Main -->
+    <group id="desklet" relative-to="top_left_corner, xy" bg-color="#ffffff11" on-click="s:show_or_hide_message_window" on-enter="s:show_message_window" on-leave="s:hide_message_window" on-menu="s:on_menu">
+        <group id="left_corner_spacer1" width="5" height="5"/>
+        <group id="header" relative-to="left_corner_spacer1, xy">
+            <label id="channel" watch="value=s:channel, font=f:font2, color=f:color2"/>
+            <label id="topic" relative-to="channel, x" x="15" watch="value=s:topic, font=f:font2, color=f:color2"/>
+        </group>
+        <group id="right_corner_spacer1" relative-to="header, x" width="5" height="5"/>
+        
+        <group id="horizontal_divider_1" relative-to="header, y" height="1" x="30" y="5" bg-color="#ffffff11" watch="width=s:horizontal_divider_width"/>
+        <group id="horizontal_divider_2" relative-to="horizontal_divider_1, y" height="2" bg-color="#ffffff33" watch="width=s:horizontal_divider_width"/>
+        <group id="horizontal_divider_3" relative-to="horizontal_divider_2, y" height="1" bg-color="#ffffff11" watch="width=s:horizontal_divider_width"/>
+        <group id="horizontal_divider_4" relative-to="horizontal_divider_3, y" height="1" bg-color="#00000033" watch="width=s:horizontal_divider_width"/>
+        
+        <label id="clients" relative-to="header, y" y="16" watch="value=s:clients, font=f:font1, color=f:color1" on-scroll="s:scroll_clients"/>
+        <label id="msg" relative-to="clients, x" x="15" watch="value=s:msg, font=f:font0, color=f:color0" on-scroll="s:scroll_history"/>
+        <group id="left_corner_spacer2" relative-to="clients, y" width="5" height="5"/>
+        <group id="right_corner_spacer2" relative-to="msg, xy" width="5" height="5"/>
+    </group>
+
+</display>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwfs/A_FAIRE	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+TACHES
+======
+
+-- écrire objet stocke/manipule les données
+
+-- extraire tests de chaîne de caractère
+
+* utiliser sqlite 
+
+* écrire fonction prend chemin en argument et renvoie contenu
+
+* extraire tests (chaîne de caractère) de spec 
+
+* utiliser yams pour schéma
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwfs/cwfs-spec.txt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,180 @@
+=======================
+ Specification cubicwebfs
+=======================
+
+Remarque: cubicwebfs c'est le siamois de yamsfs
+en fait c'est un yamsfs avec une interrogation
+de base RQL
+
+Modèle
+-------
+
+Description du modèle;
+::
+  societe
+  nom
+  ville
+
+  affaire
+  ref
+
+  document
+  annee 
+  mois 
+  jour
+  type {RAP,CLI,OFR,FCT}
+  fichier
+
+document concerne affaire
+affaire concerne societe
+
+Contenu de la base exemple
+---------------------------
+
+societe  | nom     | ville    |
+         | CETIAD  | Dijon    |
+         | EDF R&D | Clamart  |
+         | Logilab | Paris    |
+
+affaire  | ref    | concerne  |
+         | CTIA01 | CETIAD    |
+         | EDFR01 | EDF R&D   |
+         | EDFR02 | EDF R&D   |
+
+document | annee | mois | jour | type | concerne | fichier                                     |
+         | 2004  | 09   | 06   | PRE  | CTIA01   | depodoc/2004/09/CTIA01-040906-PRE-1-01.pdf  |  
+         | 2005  | 02   | 01   | CLI  | EDFR01   | depodoc/2005/02/EDFR01-050201-CLI-1-01.pdf  |  
+         | 2005  | 03   | 22   | OFR  | EDFR01   | depodoc/2005/02/EDFR01-050322-OFR-1-01.pdf  |  
+
+
+Exemples de chemins/recherches
+-------------------------------
+
+Cherche documents de mars 2005;
+::
+  /document/annee/2005/mois/03/
+
+
+Dont le contenu successif serait;
+
+Test::
+
+  $ ls /document
+  annee/	mois/		jour/		type/
+  affaire/	concerne/	CTIA01-040906-PRE-1-01.pdf
+  EDFR01-050201-CLI-1-01.pdf	EDFR01-050322-OFR-1-01.pdf	
+
+  $ ls /document/annee/
+  2004/		2005/
+
+  $ ls /document/annee/2005/
+  mois/		jour/		type/		affaire/
+  concerne/	EDFR01-050201-CLI-1-01.pdf	EDFR01-050322-OFR-1-01.pdf
+
+  $ ls /document/annee/2005/mois/
+  02/		03/
+
+  $ ls /document/annee/2005/mois/03/
+  jour/		type/		affaire/	concerne/
+  EDFR01-050322-OFR-1-01.pdf
+
+
+Question: est-ce que fichier/ ne va pas nous manquer ?
+
+
+Cherche documents relatifs à CTIA01; 
+::
+  /affaire/ref/CTIA01/document/
+
+Dont le contenu des répertoires successifs serait:
+
+Test::
+
+  $ ls /affaire/
+  ref/		societe/	concerne/	document/
+  concerne_par/	CTIA01		EDFR01		EDFR02
+
+  $ ls /affaire/ref/
+  CTIA01/	EDFR01/		EDFR02/
+
+  $ ls /affaire/ref/CTIA01/
+  societe/	concerne/	document/	concerne_par/
+  
+  $ ls /affaire/ref/CTIA01/document/
+  annee/	mois/		jour/		type/
+  CTIA01-040906-PRE-1-01.pdf
+
+
+Cherche documents des affaires qui concernent CETIAD;
+::
+  /societe/nom/CETIAD/affaire/document/
+
+Dont le contenu des répertoires successifs serait;
+
+Test::
+
+  $ ls /societe/
+  nom/		ville/		affaire/	concerne_par/
+  CETIAD	EDF R&D		Logilab
+
+  $ ls /societe/nom/
+  CETIAD	EDF R&D		Logilab
+
+  $ ls /societe/nom/CETIAD/
+  ville/	affaire/	concerne_par/	CETIAD		Logilab
+
+  $ ls /societe/nom/CETIAD/affaire/
+  ref/		societe/	concerne/	document/
+  concerne_par/	CTIA01		
+
+  $ ls /societe/nom/CETIAD/affaire/document/
+  annee/	mois/		jour/		type/
+  affaire/	concerne/	CTIA01-040906-PRE-1-01.pdf
+
+
+En particulier, pour la recherche ci-dessus on ne peut pas écrire;
+::
+  /document/affaire/concerne/societe/CETIAD/
+
+La logique est que si on est dans un répertoire document, il faut
+qu'il contienne des documents.
+
+Cherche documents de 2002 qui concernent des affaires 
+qui concernent CETIAD;
+::
+  /societe/CETIAD/affaire/document/annee/2002/
+
+Question: est-ce que les relations doivent être des composants
+du chemin ?
+Question : si les relations ne font pas partie du chemin, il faudrait
+pouvoir faire des recherches en utilisant des relations anonymes (ce
+qui est impossible en RQL par exemple);
+::
+  /document/affaire/... s'il existe plusieurs relations entre
+  les entités document et affaire, on ne peut pas s'en sortir
+
+Question: que va-t-il se passer pour des chemins du type;
+::
+  /affaire/CTIA*/document/
+
+Nicolas: à mon avis on a rien à faire, car c'est le shell qui
+s'en occupe. De la même façon, le système de fichier n'a pas
+à se préoccuper de ~/ et les programmes reçoivent pas le "qqch*"
+en argument, mais directement la liste.
+
+Attention: si jamais l'arborescence est sans fond, les
+commandes récursives vont prendre du temps...
+
+Attention: dans un premier temps, un système de fichiers en
+lecture seule est satisfaisant. on verra ensuite pour l'édition.
+pour l'édition, on peut s'inspirer du external editor de zope
+et avoir un format d'échange XML entre le serveur et l'éditeur.
+
+Le cas suivant est débile, faut-il l'interdire ?
+::
+  /document/affaire/societe/concerne_par/affaire/concerne_par/document
+
+
+NB: manque détail d'un cas comme /document/annee/2005/concerne/affaire/
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwfs/cwfs.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,156 @@
+class Schema :
+
+    def __init__(self, schema) :
+        self._schema = schema
+
+    def get_attrs(self, entity) :
+        return self._schema[entity][0]
+
+    def get_relations(self, entity) :
+        return self._schema[entity][1]
+
+    def get_attr_index(self, entity, attr) :
+        return list(self._schema[entity][0]).index(attr)
+
+SCHEMA = Schema({'societe': ( ('nom','ville'),
+                              [('concerne_par','affaire'),
+                               ] ),
+                 'affaire': ( ('ref',),
+                              [('concerne','societe'),
+                               ('concerne_par', 'document')
+                               ] ),
+                 'document':( ('fichier', 'annee','mois','jour','type'),
+                              [('concerne','affaire'),
+                               ] ),
+                 })
+
+    
+
+DATA = { 'societe': [ ('CETIAD', 'Dijon'),
+                      ('EDF_R&D', 'Clamart'),
+                      ('Logilab', 'Paris'),
+                      ],
+         'affaire': [ ('CTIA01', 'CETIAD'),
+                      ('EDFR01', 'EDF_R&D'),
+                      ('EDFR02', 'EDF_R&D'),
+                      ],
+         'document':[ ('CTIA01-040906-PRE-1-01.pdf','2004','09','06','PRE','CTIA01'),
+                      ('EDFR01-050201-CLI-1-01.pdf','2005','02','01','CLI','EDFR01'),
+                      ('EDFR01-050322-OFR-1-01.pdf','2005','03','22','OFR','EDFR01'),
+                      ],
+         }
+
+def get_data(entity, where=[]) :
+    for value in DATA[entity] :
+        for index, val in where :
+            if value[index] != val :
+                break
+        else :
+            yield value
+
+class PathParser :
+
+    def __init__(self, schema, path) :
+        self.schema = schema
+        self.path = path
+        self._components = iter([comp for comp in self.path.split('/') if comp])
+        self._entity = None
+        self._attr = None
+        self._rel = None
+        self._restrictions = []
+        
+    def parse(self) :
+        self._entity = self._components.next()
+        try:
+            self.process_entity()
+        except StopIteration :
+            pass
+
+    def process_entity(self) :
+        _next = self._components.next()
+        if _next in self.schema.get_attrs(self._entity) :
+            self._attr = _next
+            _next = self._components.next()
+            self._restrictions.append( (self._entity, self._attr, _next) )
+            self._attr = None
+            self._rel = None
+            self.process_entity()
+
+    def get_list(self) :
+        if self._rel :
+            return
+        elif self._attr :
+            where = []
+            for e,a,v in self._restrictions :
+                i = self.schema.get_attr_index(e, a)
+                where.append( (i,v) )
+            i = self.schema.get_attr_index(self._entity, self._attr)
+            for values in get_data(self._entity,where) :
+                yield values[i]+'/'
+        else :
+            attr_restrict = [a for e,a,v in self._restrictions]
+            for attr in self.schema.get_attrs(self._entity) :
+                if attr not in attr_restrict :
+                    yield attr+'/'
+            for data in DATA[self._entity]:
+                yield data[0]
+            for nom, entity in self.schema.get_relations(self._entity) :
+                yield nom+'/'
+                yield entity+'/'
+    
+def ls(path) :
+    p = PathParser(SCHEMA,path)
+    p.parse()
+    return list(p.get_list())
+
+
+class SytPathParser :
+
+    def __init__(self, schema, path) :
+        self.schema = schema
+        self.path = path
+        self._components = iter([comp for comp in self.path.split('/') if comp])
+        self._e_type = None
+        self._restrictions = []
+        self._alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
+        
+    def parse(self):
+        self._var = self._alphabet.pop(0)
+        self._e_type = self._components.next()
+        e_type = self._e_type.capitalize()
+        self._restrictions.append('%s is %s' % (self._var, e_type))
+        try:
+            self.process_entity()
+        except StopIteration :
+            pass
+        return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions))
+    
+    def process_entity(self) :
+        _next = self._components.next()
+        if _next in self.schema.get_attrs(self._e_type) :
+            attr = _next
+            try:
+                _next = self._components.next()
+                self._restrictions.append('%s %s %s' % (self._var, attr, _next))
+            except StopIteration:
+                a_var = self._alphabet.pop(0)
+                self._restrictions.append('%s %s %s' % (self._var, attr, a_var) )
+                self._var = a_var
+                raise
+        elif _next in [r for r,e in self.schema.get_relations(self._e_type)]:
+            rel = _next
+            r_var = self._alphabet.pop(0)
+            self._restrictions.append('%s %s %s' % (self._var, rel, r_var))
+            self._var = r_var
+            try:
+                _next = self._components.next()
+                self._restrictions.append('%s is %s' % (r_var, _next.capitalize()))
+            except StopIteration:
+                raise
+        self.process_entity()            
+
+        
+def to_rql(path) :
+    p = SytPathParser(SCHEMA,path)
+    return p.parse()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwfs/cwfs_test.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,46 @@
+from logilab.common.testlib import TestCase, unittest_main
+
+import cubicwebfs
+import sre
+
+def spec_parser(filename) :
+    """
+    extract tests from specification
+    """
+    sections = []
+    buffer = ""
+    in_section = False
+    for line in file(filename) :
+        if line.startswith('Test::'):
+            in_section = True
+            buffer = ""
+        elif in_section :
+            if line.startswith("  ") or not line.strip() :
+                buffer += line.lstrip()
+            else :
+                sections.append(buffer)
+                in_section = False
+    tests = []
+    for section in sections :
+        subsections = [t for t in section.strip().split('$ ls') if t]
+        for subsection in subsections :
+            path, results = subsection.splitlines()[0], subsection.splitlines()[1:]
+            path = path.strip()
+            items = set([i for i in sre.split('[\t\n]', '\n'.join(results)) if i])
+            tests.append((path, items))
+    return tests
+
+tests = spec_parser("cubicwebfs-spec.txt")
+
+class monTC(TestCase) :
+    pass
+
+for index, (path, results) in enumerate(tests) :
+    def f(self, p=path, r=results) :
+        res = set(cubicwebfs.ls(p))
+        self.assertEqual(r, res) #, 'en trop %s\nmanque %s' % (r-results,results-r))
+    f.__doc__ = "%s %s"%(index,path)
+    setattr(monTC,'test_%s'%index,f)
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cwzope/cwzope.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,31 @@
+from AccessControl import getSecurityManager 
+
+from cubicweb.dbapi import connect, Connection, Cursor
+from cubicweb.common.utils import ResultSet, ResultSetIterator, ResultSetRow, Entity
+
+Connection.__allow_access_to_unprotected_subobjects__ = 1
+Cursor.__allow_access_to_unprotected_subobjects__ = 1
+ResultSet.__allow_access_to_unprotected_subobjects__ = 1
+ResultSetIterator.__allow_access_to_unprotected_subobjects__ = 1
+ResultSetRow.__allow_access_to_unprotected_subobjects__ = 1
+Entity.__allow_access_to_unprotected_subobjects__ = 1
+
+CNX_CACHE = {}
+
+def get_connection(context, user=None, password=None,
+                   host=None, database=None, group='cubicweb'):
+    """get a connection on an cubicweb server"""
+    request = context.REQUEST
+    zope_user = getSecurityManager().getUser()
+    if user is None:
+        user = zope_user.getId()
+    key = (user, host, database)
+    try:
+        return CNX_CACHE[key]
+    except KeyError:
+        if password is None:
+            password = zope_user._getPassword()
+        cnx = connect(user, password, host, database, group)
+        CNX_CACHE[key] = cnx
+        return cnx
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.37.1_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+if 'Keyword' in schema:
+    synchronize_schema('Keyword')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.39.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+try:
+    # missing on some old databases
+    sql('CREATE INDEX entities_extid_idx ON entities(extid)')
+except:
+    pass # already exists
+checkpoint() 
+sql('CREATE INDEX entities_type_idx ON entities(type)')
+checkpoint()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.42.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+synchronize_rschema('created_by')
+synchronize_rschema('owned_by')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.42.1_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+if confirm('remove deprecated database constraints?'):
+    execute = session.system_sql
+    session.set_pool()
+    dbhelper = session.pool.source('system').dbhelper
+    cu = session.pool['system']
+    for table in dbhelper.list_tables(cu):
+        if table.endswith('_relation'):
+            try:
+                execute('ALTER TABLE %s DROP CONSTRAINT %s_fkey1' % (table, table))
+                execute('ALTER TABLE %s DROP CONSTRAINT %s_fkey2' % (table, table))
+            except:
+                continue
+    checkpoint()
+
+if 'inline_view' in schema:
+    # inline_view attribute should have been deleted for a while now....
+    drop_attribute('ENFRDef', 'inline_view')
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.43.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+synchronize_permissions('EmailAddress')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.44.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+change_relation_props('EFRDef', 'cardinality', 'String', internationalizable=True)
+change_relation_props('ENFRDef', 'cardinality', 'String', internationalizable=True)
+
+drop_relation_definition('EPermission', 'require_state', 'State')
+
+if confirm('cleanup require_permission relation'):
+    try:
+        newrschema = newschema.rschema('require_permission')
+    except KeyError:
+        newrschema = None
+    for rsubj, robj in schema.rschema('require_permission').rdefs():
+        if newrschema is None or not newrschema.has_rdef(rsubj, robj):
+            print 'removing', rsubj, 'require_permission', robj
+            drop_relation_definition(rsubj, 'require_permission', robj, ask_confirm=False)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.45.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+# following functions have been renamed, but keep old definition for bw compat
+sql('''CREATE AGGREGATE group_concat (
+  basetype = anyelement,
+  sfunc = array_append,
+  stype = anyarray,
+  finalfunc = comma_join,
+  initcond = '{}'
+)''')
+
+sql('''CREATE FUNCTION text_limit_size (fulltext text, maxsize integer) RETURNS text AS $$
+BEGIN
+    RETURN limit_size(fulltext, 'text/plain', maxsize);
+END
+$$ LANGUAGE plpgsql;
+''')
+
+
+synchronize_rschema('bookmarked_by')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.46.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+
+
+rql('SET X value "navtop" WHERE X pkey ~= "contentnavigation.%.context", X value "header"')
+rql('SET X value "navcontenttop" WHERE X pkey ~= "contentnavigation%.context", X value "incontext"')
+rql('SET X value "navcontentbottom" WHERE X pkey ~= "contentnavigation%.context", X value "footer"')
+checkpoint()
+
+if 'require_permission' in schema:
+    synchronize_rschema('require_permission')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.47.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+synchronize_permissions('primary_email')
+synchronize_rschema('wf_info_for')
+synchronize_rschema('use_email')
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.48.8_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+for etype in ('ERType', 'EFRDef', 'ENFRDef', 'EConstraint', 'EConstraintType'):
+    synchronize_permissions(etype)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.49.3_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+add_entity_type('Decimal')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/2.50.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+add_relation_type('specializes')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.0.0_Any.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+from cubicweb import CW_MIGRATION_MAP
+
+for pk, in rql('Any K WHERE X is EProperty, X pkey IN (%s), X pkey K'
+              % ','.join("'system.version.%s'" % cube for cube in CW_MIGRATION_MAP)):
+    cube = pk.split('.')[-1]
+    newk = pk.replace(cube, CW_MIGRATION_MAP[cube])
+    rql('SET X pkey %(newk)s WHERE X pkey %(oldk)s',
+        {'oldk': pk, 'newk': newk})
+    print 'renamed', pk, 'to', newk
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/bootstrapmigration_repository.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,43 @@
+"""allways executed before all others in server migration
+
+it should only include low level schema changes
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+if applcubicwebversion < (2, 47, 0) and cubicwebversion >= (2, 47, 0):
+    from cubicweb.server import schemaserial
+    schemaserial.HAS_FULLTEXT_CONTAINER = False
+    cnx.set_shared_data('do-not-insert-is_instance_of', True)
+    add_attribute('ERType', 'fulltext_container')
+    schemaserial.HAS_FULLTEXT_CONTAINER = True
+
+
+ 
+if applcubicwebversion < (2, 50, 0) and cubicwebversion >= (2, 50, 0):
+    cnx.set_shared_data('do-not-insert-is_instance_of', True)
+    add_relation_type('is_instance_of')
+    # fill the relation using an efficient sql query instead of using rql
+    sql('INSERT INTO is_instance_of_relation '
+	'  SELECT * from is_relation')
+    checkpoint()
+    cnx.set_shared_data('do-not-insert-is_instance_of', False)
+
+if applcubicwebversion < (2, 42, 0) and cubicwebversion >= (2, 42, 0):
+    sql('ALTER TABLE entities ADD COLUMN mtime TIMESTAMP')
+    sql('UPDATE entities SET mtime=CURRENT_TIMESTAMP')
+    sql('CREATE INDEX entities_mtime_idx ON entities(mtime)')
+    sql('''CREATE TABLE deleted_entities (
+  eid INTEGER PRIMARY KEY NOT NULL,
+  type VARCHAR(64) NOT NULL,
+  source VARCHAR(64) NOT NULL,
+  dtime TIMESTAMP NOT NULL,
+  extid VARCHAR(256)
+)''')
+    sql('CREATE INDEX deleted_entities_type_idx ON deleted_entities(type)')
+    sql('CREATE INDEX deleted_entities_dtime_idx ON deleted_entities(dtime)')
+    sql('CREATE INDEX deleted_entities_extid_idx ON deleted_entities(extid)')
+    checkpoint()
+   
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/postcreate.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,40 @@
+"""cubicweb post creation script, set user's workflow"""
+
+activatedeid = add_state(_('activated'), 'EUser', initial=True)
+deactivatedeid = add_state(_('deactivated'), 'EUser')
+add_transition(_('deactivate'), 'EUser',
+               (activatedeid,), deactivatedeid,
+               requiredgroups=('managers',))
+add_transition(_('activate'), 'EUser',
+               (deactivatedeid,), activatedeid,
+               requiredgroups=('managers',))
+
+# need this since we already have at least one user in the database (the default admin)
+rql('SET X in_state S WHERE X is EUser, S eid %s' % activatedeid)
+
+# create anonymous user if all-in-one config and anonymous user has been specified
+if hasattr(config, 'anonymous_user'):
+    anonlogin, anonpwd = config.anonymous_user()
+    if anonlogin:
+        rql('INSERT EUser X: X login %(login)s, X upassword %(pwd)s,'
+            'X in_state S, X in_group G WHERE G name "guests", S name "activated"',
+            {'login': unicode(anonlogin), 'pwd': anonpwd})
+
+cfg = config.persistent_options_configuration()
+if interactive_mode:
+    cfg.input_config(inputlevel=0)
+
+for section, options in cfg.options_by_section():
+    for optname, optdict, value in options:
+        key = '%s.%s' % (section, optname)
+        default = cfg.option_default(optname, optdict)
+        # only record values differing from default
+        if value != default:
+            rql('INSERT EProperty X: X pkey %(k)s, X value %(v)s', {'k': key, 'v': value})
+
+# add PERM_USE_TEMPLATE_FORMAT permission
+from cubicweb.schema import PERM_USE_TEMPLATE_FORMAT
+eid = add_entity('EPermission', name=PERM_USE_TEMPLATE_FORMAT,
+                 label=_('use template languages'))
+rql('SET X require_group G WHERE G name "managers", X eid %(x)s',
+    {'x': eid}, 'x')    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylintrc	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+[MESSAGES CONTROL]
+disable-msg = C0301
+
+[VARIABLES]
+# Enable / disable this checker
+enable-variables = yes
+additional-builtins = _, display_name
+
+[BASIC]
+required-attributes=
+attr-rgx = (w|[a-z_][a-z0-9_]{2,30})
+variable-rgx = (w|[a-z_][a-z0-9_]{2,30})$
+argument-rgx = (w|[a-z_][a-z0-9_]{2,30})$
+
+[TYPECHECK]
+generated-members=debug,info,notice,warning,error,critical,exception
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rset.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,540 @@
+"""The `ResultSet` class which is returned as result of a rql query
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached, clear_cache, copy_cache
+
+from rql import nodes
+
+from cubicweb import NotAnEntity
+    
+
+class ResultSet(object):
+    """a result set wrap a RQL query result. This object implements a partial
+    list protocol to allow direct use as a list of result rows.
+
+    :type rowcount: int
+    :ivar rowcount: number of rows in the result
+
+    :type rows: list
+    :ivar rows: list of rows of result
+
+    :type description: list
+    :ivar description:
+      result's description, using the same structure as the result itself
+
+    :type rql: str or unicode
+    :ivar rql: the original RQL query string
+    """
+    def __init__(self, results, rql, args=None, description=(), cachekey=None,
+                 rqlst=None):
+        self.rows = results
+        self.rowcount = results and len(results) or 0
+        # original query and arguments
+        self.rql = rql
+        self.args = args
+        self.cachekey = cachekey
+        # entity types for each cell (same shape as rows)
+        # maybe discarded if specified when the query has been executed
+        self.description = description
+        # parsed syntax tree
+        if rqlst is not None:
+            rqlst.schema = None # reset schema in case of pyro transfert
+        self._rqlst = rqlst
+        # set to (limit, offset) when a result set is limited using the
+        # .limit method
+        self.limited = None
+        # set by the cursor which returned this resultset
+        self.vreg = None
+        self.req = None
+   
+    def __str__(self):
+        if not self.rows:
+            return '<empty resultset %s>' % self.rql
+        return '<resultset %s (%s rows)>' % (self.rql, len(self.rows))
+    
+    def __repr__(self):
+        if not self.rows:
+            return '<empty resultset for %s>' % self.rql
+        if not self.description:
+            return '<resultset %s: %s>' % (self.rql, '\n'.join(str(r) for r in self.rows))
+        return '<resultset %s: %s>' % (self.rql,
+                                       '\n'.join('%s (%s)' % (r, d)
+                                                 for r, d in zip(self.rows, self.description)))
+
+    @cached
+    def possible_actions(self):
+        return self.vreg.possible_vobjects('actions', self.req, self)
+    
+    def __len__(self):
+        """returns the result set's size"""
+        return self.rowcount
+
+    def __nonzero__(self):
+        return self.rowcount
+    
+    def __getitem__(self, i):
+        """returns the ith element of the result set"""
+        return self.rows[i] #ResultSetRow(self.rows[i])
+    
+    def __getslice__(self, i, j):
+        """returns slice [i:j] of the result set"""
+        return self.rows[i:j]
+        
+    def __iter__(self):
+        """Returns an iterator over rows"""
+        return iter(self.rows)
+
+    def __add__(self, rset):
+        # XXX buggy implementation (.rql and .args attributes at least much
+        # probably differ)
+        # at least rql could be fixed now that we have union and sub-queries
+        # but I tend to think that since we have that, we should not need this
+        # method anymore (syt)
+        rset = ResultSet(self.rows+rset.rows, self.rql, self.args,
+                         self.description +rset.description)
+        return self.req.decorate_rset(rset)
+
+    def _prepare_copy(self, rows, descr):
+        rset = ResultSet(rows, self.rql, self.args, descr)
+        return self.req.decorate_rset(rset)
+
+    def transformed_rset(self, transformcb):
+        """ the result set according to a given column types
+
+        :type transormcb: callable(row, desc)
+        :param transformcb:
+          a callable which should take a row and its type description as
+          parameters, and return the transformed row and type description.
+          
+
+        :type col: int
+        :param col: the column index
+
+        :rtype: `ResultSet`
+        """
+        rows, descr = [], []
+        rset = self._prepare_copy(rows, descr)
+        for row, desc in zip(self.rows, self.description):
+            nrow, ndesc = transformcb(row, desc)
+            if ndesc: # transformcb returns None for ndesc to skip that row
+                rows.append(nrow)
+                descr.append(ndesc)
+        rset.rowcount = len(rows)
+        return rset
+
+    def filtered_rset(self, filtercb, col=0):
+        """filter the result set according to a given filtercb
+
+        :type filtercb: callable(entity)
+        :param filtercb:
+          a callable which should take an entity as argument and return
+          False if it should be skipped, else True
+
+        :type col: int
+        :param col: the column index
+
+        :rtype: `ResultSet`
+        """
+        rows, descr = [], []
+        rset = self._prepare_copy(rows, descr)
+        for i in xrange(len(self)):
+            if not filtercb(self.get_entity(i, col)):
+                continue
+            rows.append(self.rows[i])
+            descr.append(self.description[i])
+        rset.rowcount = len(rows)
+        return rset
+
+
+    def sorted_rset(self, keyfunc, reverse=False, col=0):
+        """sorts the result set according to a given keyfunc
+
+        :type keyfunc: callable(entity)
+        :param keyfunc:
+          a callable which should take an entity as argument and return
+          the value used to compare and sort
+
+        :type reverse: bool
+        :param reverse: if the result should be reversed
+
+        :type col: int
+        :param col: the column index. if col = -1, the whole row are used
+
+        :rtype: `ResultSet`
+        """
+        rows, descr = [], []
+        rset = self._prepare_copy(rows, descr)
+        if col >= 0:
+            entities = sorted(enumerate(self.entities(col)),
+                              key=lambda (i, e): keyfunc(e), reverse=reverse)
+        else:
+            entities = sorted(enumerate(self),
+                              key=lambda (i, e): keyfunc(e), reverse=reverse)
+
+        for index, entity in entities:
+            rows.append(self.rows[index])
+            descr.append(self.description[index])
+        rset.rowcount = len(rows)
+        return rset
+
+    def split_rset(self, keyfunc=None, col=0, return_dict=False):
+        """Splits the result set in multiple result set according to a given key
+    
+        :type keyfunc: callable(entity or FinalType)
+        :param keyfunc:
+          a callable which should take a value of the rset in argument and
+          return the value used to group the value. If not define, raw value
+          of the specified columns is used.
+
+        :type col: int
+        :param col: the column index. if col = -1, the whole row are used
+
+        :type return_dict: Boolean
+        :param return_dict: If true, the function return a mapping
+            (key -> rset) instead of a list of rset
+
+        :rtype: List of `ResultSet` or mapping of  `ResultSet`
+
+        """
+        result = []
+        mapping = {}
+        for idx, line in enumerate(self):
+            if col >= 0:
+                try:
+                    key = self.get_entity(idx,col)
+                except NotAnEntity:
+                    key = line[col]
+            else:
+                key = line
+            if keyfunc is not None:
+                key = keyfunc(key)
+
+            if key not in mapping:
+                rows, descr = [], []
+                rset = self._prepare_copy(rows, descr)
+                mapping[key] = rset
+                result.append(rset)
+            else:
+                rset = mapping[key]
+            rset.rows.append(self.rows[idx])
+            rset.description.append(self.description[idx])
+
+
+        for rset in result:
+            rset.rowcount = len(rset.rows)
+        if return_dict:
+            return mapping
+        else:
+            return result
+
+    def limit(self, limit, offset=0, inplace=False):
+        """limit the result set to the given number of rows optionaly starting
+        from an index different than 0
+
+        :type limit: int
+        :param limit: the maximum number of results
+
+        :type offset: int
+        :param offset: the offset index
+        
+        :type inplace: bool
+        :param inplace:
+          if true, the result set is modified in place, else a new result set
+          is returned and the original is left unmodified
+
+        :rtype: `ResultSet`
+        """
+        stop = limit+offset
+        rows = self.rows[offset:stop]
+        descr = self.description[offset:stop]
+        if inplace:
+            rset = self
+            rset.rows, rset.description = rows, descr
+            rset.rowcount = len(rows)
+            clear_cache(rset, 'description_struct')
+            if offset:
+                clear_cache(rset, 'get_entity')
+            # we also have to fix/remove from the request entity cache entities
+            # which get a wrong rset reference by this limit call
+            for entity in self.req.cached_entities():
+                if entity.rset is self:
+                    if offset <= entity.row < stop:
+                        entity.row = entity.row - offset
+                    else:
+                        self.req.drop_entity_cache(entity.eid)
+        else:
+            rset = self._prepare_copy(rows, descr)
+            if not offset:
+                # can copy built entity caches
+                copy_cache(rset, 'get_entity', self)
+        rset.limited = (limit, offset)
+        return rset
+    
+    def printable_rql(self, encoded=False):
+        """return the result set's origin rql as a string, with arguments
+        substitued
+        """
+        encoding = self.req.encoding
+        rqlstr = self.syntax_tree().as_string(encoding, self.args)
+        # sounds like we get encoded or unicode string due to a bug in as_string
+        if not encoded:
+            if isinstance(rqlstr, unicode):
+                return rqlstr
+            return unicode(rqlstr, encoding)
+        else: 
+            if isinstance(rqlstr, unicode):
+                return rqlstr.encode(encoding)
+            return rqlstr
+       
+    # client helper methods ###################################################
+
+    def entities(self, col=0):
+        """iter on entities with eid in the `col` column of the result set"""
+        for i in xrange(len(self)):
+            # may have None values in case of outer join (or aggregat on eid
+            # hacks)
+            if self.rows[i][col] is not None:
+                yield self.get_entity(i, col)
+
+    @cached
+    def get_entity(self, row, col=None):
+        """special method for query retreiving a single entity, returns a
+        partially initialized Entity instance.
+        
+        WARNING: due to the cache wrapping this function, you should NEVER
+                 give row as a named parameter (i.e. rset.get_entity(req, 0)
+                 is OK but rset.get_entity(row=0, req=req) isn't
+
+        :type row,col: int, int
+        :param row,col:
+          row and col numbers localizing the entity among the result's table
+
+        :return: the partially initialized `Entity` instance
+        """
+        if col is None:
+            from warnings import warn
+            msg = 'col parameter will become mandatory in future version'
+            warn(msg, DeprecationWarning, stacklevel=3)
+            col = 0
+        etype = self.description[row][col]
+        try:
+            eschema = self.vreg.schema.eschema(etype)
+            if eschema.is_final():
+                raise NotAnEntity(etype)
+        except KeyError:
+            raise NotAnEntity(etype)
+        return self._build_entity(row, col)
+
+    def _build_entity(self, row, col, _localcache=None):
+        """internal method to get a single entity, returns a
+        partially initialized Entity instance.
+
+        partially means that only attributes selected in the RQL
+        query will be directly assigned to the entity.
+        
+        :type row,col: int, int
+        :param row,col:
+          row and col numbers localizing the entity among the result's table
+
+        :return: the partially initialized `Entity` instance
+        """
+        req = self.req
+        if req is None:
+            raise AssertionError('dont call get_entity with no req on the result set')
+        rowvalues = self.rows[row]
+        eid = rowvalues[col]
+        assert eid is not None
+        # return cached entity if exists. This also avoids potential recursion
+        # XXX should we consider updating a cached entity with possible
+        #     new attributes found in this resultset ?
+        try:
+            if hasattr(req, 'is_super_session'):
+                # this is a Session object which is not caching entities, so we
+                # have to use a local cache to avoid recursion pb
+                if _localcache is None:
+                    _localcache = {}
+                return _localcache[eid]
+            else:
+                return req.entity_cache(eid)
+        except KeyError:
+            pass
+        # build entity instance
+        etype = self.description[row][col]
+        entity = self.vreg.etype_class(etype)(req, self, row, col)
+        entity.set_eid(eid)
+        # cache entity
+        if _localcache is not None:
+            _localcache[eid] = entity
+        req.set_entity_cache(entity)
+        eschema = entity.e_schema
+        # try to complete the entity if there are some additional columns
+        if len(rowvalues) > 1:
+            rqlst = self.syntax_tree()
+            if rqlst.TYPE == 'select':
+                # UNION query, find the subquery from which this entity has been
+                # found
+                rqlst = rqlst.locate_subquery(col, etype, self.args)
+            # take care, due to outer join support, we may find None
+            # values for non final relation
+            for i, attr, x in attr_desc_iterator(rqlst, col):
+                if x == 'subject':
+                    rschema = eschema.subject_relation(attr)
+                    if rschema.is_final():
+                        entity[attr] = rowvalues[i]
+                        continue
+                    tetype = rschema.objects(etype)[0]
+                    card = rschema.rproperty(etype, tetype, 'cardinality')[0]
+                else:
+                    rschema = eschema.object_relation(attr)
+                    tetype = rschema.subjects(etype)[0]
+                    card = rschema.rproperty(tetype, etype, 'cardinality')[1]
+                # only keep value if it can't be multivalued
+                if card in '1?':
+                    if rowvalues[i] is None:
+                        if x == 'subject':
+                            rql = 'Any Y WHERE X %s Y, X eid %s'
+                        else:
+                            rql = 'Any Y WHERE Y %s X, X eid %s'
+                        rrset = ResultSet([], rql % (attr, entity.eid))
+                        req.decorate_rset(rrset)
+                    else:
+                        rrset = self._build_entity(row, i, _localcache).as_rset()
+                    entity.set_related_cache(attr, x, rrset)
+        return entity
+
+    @cached
+    def syntax_tree(self):
+        """get the syntax tree for the source query. 
+
+        :rtype: rql.stmts.Statement
+        :return: the RQL syntax tree of the originating query
+        """
+        if self._rqlst:
+            rqlst = self._rqlst.copy()
+            # to avoid transport overhead when pyro is used, the schema has been
+            # unset from the syntax tree
+            rqlst.schema = self.vreg.schema
+            self.vreg.rqlhelper.annotate(rqlst)
+        else:
+            rqlst = self.vreg.parse(self.req, self.rql, self.args)
+        return rqlst
+        
+    @cached
+    def column_types(self, col):
+        """return the list of different types in the column with the given col
+        index default to 0 (ie the first column)
+        
+        :type col: int
+        :param col: the index of the desired column
+
+        :rtype: list
+        :return: the different entities type found in the column
+        """
+        return frozenset(struc[-1][col] for struc in self.description_struct())
+
+    @cached
+    def description_struct(self):
+        """return a list describing sequence of results with the same
+        description, e.g. :
+        [[0, 4, ('Bug',)]
+        [[0, 4, ('Bug',), [5, 8, ('Story',)]
+        [[0, 3, ('Project', 'Version',)]]
+        """
+        result = []
+        last = None
+        for i, row in enumerate(self.description):
+            if row != last:
+                if last is not None:
+                    result[-1][1] = i - 1
+                result.append( [i, None, row] )
+                last = row
+        if last is not None:
+            result[-1][1] = i
+        return result
+
+    @cached
+    def related_entity(self, row, col):
+        """try to get the related entity to extract format information if any"""
+        locate_query_col = col
+        rqlst = self.syntax_tree()
+        etype = self.description[row][col]
+        if self.vreg.schema.eschema(etype).is_final():
+            # final type, find a better (ambiguous) one
+            for i in xrange(len(rqlst.children[0].selection)):
+                if i == col:
+                    continue
+                coletype = self.description[row][i]
+                if coletype is None:
+                    continue
+                if not self.vreg.schema.eschema(coletype).is_final():
+                    etype = coletype
+                    locate_query_col = i
+                    if len(self.column_types(i)) > 1:
+                        break
+        # UNION query, find the subquery from which this entity has been
+        # found
+        select = rqlst.locate_subquery(locate_query_col, etype, self.args)
+        try:
+            myvar = select.selection[col].variable
+        except AttributeError:
+            # no .selection attribute is available
+            return None, None
+        rel = myvar.main_relation()
+        if rel is not None:
+            index = rel.children[0].variable.selected_index()
+            if index is not None:
+                return self.get_entity(row, index), rel.r_type
+        return None, None
+
+    @cached
+    def searched_text(self):
+        """returns the searched text in case of full-text search
+
+        :return: searched text or `None` if the query is not
+                 a full-text query
+        """
+        rqlst = self.syntax_tree()
+        for rel in rqlst.iget_nodes(nodes.Relation):
+            if rel.r_type == 'has_text':
+                __, rhs = rel.get_variable_parts()
+                return rhs.eval(self.args)
+        return None
+        
+
+def attr_desc_iterator(rqlst, index=0):
+    """return an iterator on a list of 2-uple (index, attr_relation)
+    localizing attribute relations of the main variable in a result's row
+
+    :type rqlst: rql.stmts.Select
+    :param rqlst: the RQL syntax tree to describe
+
+    :return:
+      a generator on (index, relation, target) describing column being
+      attribute of the main variable
+    """
+    main = rqlst.selection[index]
+    for i, term in enumerate(rqlst.selection):
+        if i == index:
+            continue
+        try:
+            # XXX rewritten const
+            var = term.variable
+        except AttributeError:
+            continue
+        #varname = var.name
+        for ref in var.references():
+            rel = ref.relation()
+            if rel is None or rel.is_types_restriction():
+                continue
+            lhs, rhs = rel.get_variable_parts()
+            if main.is_equivalent(lhs):
+                if rhs.is_equivalent(term):
+                    yield (i, rel.r_type, 'subject')
+            elif main.is_equivalent(rhs):
+                if lhs.is_equivalent(term):
+                    yield (i, rel.r_type, 'object')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,957 @@
+"""classes to define schemas for CubicWeb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import warnings
+import re
+from logging import getLogger
+
+from logilab.common.decorators import cached, clear_cache
+from logilab.common.compat import any
+
+from yams import BadSchemaDefinition, buildobjs as ybo
+from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema
+from yams.constraints import BaseConstraint, StaticVocabularyConstraint
+from yams.reader import (CONSTRAINTS, RelationFileReader, PyFileReader,
+                         SchemaLoader)
+
+from rql import parse, nodes, RQLSyntaxError, TypeResolverException
+
+from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized
+
+_ = unicode
+
+BASEGROUPS = ('managers', 'users', 'guests', 'owners')
+
+LOGGER = getLogger('cubicweb.schemaloader')
+
+# schema entities created from serialized schema have an eid rproperty
+ybo.ETYPE_PROPERTIES += ('eid',)
+ybo.RTYPE_PROPERTIES += ('eid',)
+ybo.RDEF_PROPERTIES += ('eid',)
+
+def bw_normalize_etype(etype):
+    if etype in ETYPE_NAME_MAP:
+        from warnings import warn
+        msg = '%s has been renamed to %s, please update your code' % (
+            etype, ETYPE_NAME_MAP[etype])            
+        warn(msg, DeprecationWarning, stacklevel=4)
+        etype = ETYPE_NAME_MAP[etype]
+    return etype
+
+# monkey path yams.builder.RelationDefinition to support a new wildcard type '@'
+# corresponding to system entity (ie meta but not schema)
+def _actual_types(self, schema, etype):
+    # two bits of error checking & reporting :
+    if type(etype) not in (str, list, tuple):
+        raise RuntimeError, ('Entity types must not be instances but strings or'
+                             ' list/tuples thereof. Ex. (bad, good) : '
+                             'SubjectRelation(Foo), SubjectRelation("Foo"). '
+                             'Hence, %r is not acceptable.' % etype)
+    # real work :
+    if etype == '**':
+        return self._pow_etypes(schema)
+    if isinstance(etype, (tuple, list)):
+        return etype
+    if '*' in etype or '@' in etype:
+        assert len(etype) in (1, 2)
+        etypes = ()
+        if '*' in etype:
+            etypes += tuple(self._wildcard_etypes(schema))
+        if '@' in etype:
+            etypes += tuple(system_etypes(schema))
+        return etypes
+    return (etype,)
+ybo.RelationDefinition._actual_types = _actual_types
+
+def display_name(req, key, form=''):
+    """return a internationalized string for the key (schema entity or relation
+    name) in a given form
+    """
+    assert form in ('', 'plural', 'subject', 'object')
+    if form == 'subject':
+        form = ''
+    if form:
+        key = key + '_' + form
+    # ensure unicode
+    # added .lower() in case no translation are available
+    return unicode(req._(key)).lower()
+__builtins__['display_name'] = display_name
+
+def ERSchema_display_name(self, req, form=''):
+    """return a internationalized string for the entity/relation type name in
+    a given form
+    """
+    return display_name(req, self.type, form)
+ERSchema.display_name = ERSchema_display_name
+
+@cached
+def ERSchema_get_groups(self, action):
+    """return the groups authorized to perform <action> on entities of
+    this type
+
+    :type action: str
+    :param action: the name of a permission
+
+    :rtype: tuple
+    :return: names of the groups with the given permission
+    """
+    assert action in self.ACTIONS, action
+    #assert action in self._groups, '%s %s' % (self, action)
+    try:
+        return frozenset(g for g in self._groups[action] if isinstance(g, basestring))
+    except KeyError:
+        return ()
+ERSchema.get_groups = ERSchema_get_groups
+
+def ERSchema_set_groups(self, action, groups):
+    """set the groups allowed to perform <action> on entities of this type. Don't
+    change rql expressions for the same action.
+
+    :type action: str
+    :param action: the name of a permission
+
+    :type groups: list or tuple
+    :param groups: names of the groups granted to do the given action
+    """
+    assert action in self.ACTIONS, action
+    clear_cache(self, 'ERSchema_get_groups')
+    self._groups[action] = tuple(groups) + self.get_rqlexprs(action)
+ERSchema.set_groups = ERSchema_set_groups
+
+@cached
+def ERSchema_get_rqlexprs(self, action):
+    """return the rql expressions representing queries to check the user is allowed
+    to perform <action> on entities of this type
+
+    :type action: str
+    :param action: the name of a permission
+
+    :rtype: tuple
+    :return: the rql expressions with the given permission
+    """
+    assert action in self.ACTIONS, action
+    #assert action in self._rqlexprs, '%s %s' % (self, action)
+    try:
+        return tuple(g for g in self._groups[action] if not isinstance(g, basestring))
+    except KeyError:
+        return ()
+ERSchema.get_rqlexprs = ERSchema_get_rqlexprs
+
+def ERSchema_set_rqlexprs(self, action, rqlexprs):
+    """set the rql expression allowing to perform <action> on entities of this type. Don't
+    change groups for the same action.
+
+    :type action: str
+    :param action: the name of a permission
+
+    :type rqlexprs: list or tuple
+    :param rqlexprs: the rql expressions allowing the given action
+    """
+    assert action in self.ACTIONS, action
+    clear_cache(self, 'ERSchema_get_rqlexprs')
+    self._groups[action] = tuple(self.get_groups(action)) + tuple(rqlexprs)
+ERSchema.set_rqlexprs = ERSchema_set_rqlexprs
+
+def ERSchema_set_permissions(self, action, permissions):
+    """set the groups and rql expressions allowing to perform <action> on
+    entities of this type
+
+    :type action: str
+    :param action: the name of a permission
+
+    :type permissions: tuple
+    :param permissions: the groups and rql expressions allowing the given action
+    """
+    assert action in self.ACTIONS, action
+    clear_cache(self, 'ERSchema_get_rqlexprs')
+    clear_cache(self, 'ERSchema_get_groups')
+    self._groups[action] = tuple(permissions)
+ERSchema.set_permissions = ERSchema_set_permissions
+
+def ERSchema_has_perm(self, session, action, *args, **kwargs):
+    """return true if the action is granted globaly or localy"""
+    try:
+        self.check_perm(session, action, *args, **kwargs)
+        return True
+    except Unauthorized:
+        return False
+ERSchema.has_perm = ERSchema_has_perm
+
+def ERSchema_has_local_role(self, action):
+    """return true if the action *may* be granted localy (eg either rql
+    expressions or the owners group are used in security definition)
+
+    XXX this method is only there since we don't know well how to deal with
+    'add' action checking. Also find a better name would be nice.
+    """
+    assert action in self.ACTIONS, action
+    if self.get_rqlexprs(action):
+        return True
+    if action in ('update', 'delete'):
+        return self.has_group(action, 'owners')
+    return False
+ERSchema.has_local_role = ERSchema_has_local_role
+
+
+def system_etypes(schema):
+    """return system entity types only: skip final, schema and application entities
+    """
+    for eschema in schema.entities():
+        if eschema.is_final() or eschema.schema_entity() or not eschema.meta:
+            continue
+        yield eschema.type
+
+# Schema objects definition ###################################################
+
+class CubicWebEntitySchema(EntitySchema):
+    """a entity has a type, a set of subject and or object relations
+    the entity schema defines the possible relations for a given type and some
+    constraints on those relations
+    """
+    def __init__(self, schema=None, edef=None, eid=None, **kwargs):
+        super(CubicWebEntitySchema, self).__init__(schema, edef, **kwargs)
+        if eid is None and edef is not None:
+            eid = getattr(edef, 'eid', None)
+        self.eid = eid
+        # take care: no _groups attribute when deep-copying
+        if getattr(self, '_groups', None): 
+            for groups in self._groups.itervalues():
+                for group_or_rqlexpr in groups:
+                    if isinstance(group_or_rqlexpr, RRQLExpression):
+                        msg = "can't use RRQLExpression on an entity type, use an ERQLExpression (%s)"
+                        raise BadSchemaDefinition(msg % self.type)
+            
+    def attribute_definitions(self):
+        """return an iterator on attribute definitions
+        
+        attribute relations are a subset of subject relations where the
+        object's type is a final entity
+        
+        an attribute definition is a 2-uple :
+        * name of the relation
+        * schema of the destination entity type
+        """
+        iter = super(CubicWebEntitySchema, self).attribute_definitions()
+        for rschema, attrschema in iter:
+            if rschema.type == 'has_text':
+                continue
+            yield rschema, attrschema
+            
+    def add_subject_relation(self, rschema):
+        """register the relation schema as possible subject relation"""
+        super(CubicWebEntitySchema, self).add_subject_relation(rschema)
+        self._update_has_text()
+
+    def del_subject_relation(self, rtype):
+        super(CubicWebEntitySchema, self).del_subject_relation(rtype)
+        self._update_has_text(False)
+        
+    def _update_has_text(self, need_has_text=None):
+        may_need_has_text, has_has_text = False, False
+        for rschema in self.subject_relations():
+            if rschema.is_final():
+                if rschema == 'has_text':
+                    has_has_text = True
+                elif self.rproperty(rschema, 'fulltextindexed'):
+                    may_need_has_text = True
+            elif rschema.fulltext_container:
+                if rschema.fulltext_container == 'subject':
+                    may_need_has_text = True
+                else:
+                    need_has_text = False
+        for rschema in self.object_relations():
+            if rschema.fulltext_container:
+                if rschema.fulltext_container == 'object':
+                    may_need_has_text = True
+                else:
+                    need_has_text = False
+                    break
+        if need_has_text is None:
+            need_has_text = may_need_has_text
+        if need_has_text and not has_has_text:
+            rdef = ybo.RelationDefinition(self.type, 'has_text', 'String')
+            self.schema.add_relation_def(rdef)
+        elif not need_has_text and has_has_text:
+            self.schema.del_relation_def(self.type, 'has_text', 'String')
+            
+    def schema_entity(self):
+        """return True if this entity type is used to build the schema"""
+        return self.type in self.schema.schema_entity_types()
+
+    def rich_text_fields(self):
+        """return an iterator on (attribute, format attribute) of rich text field
+
+        (the first tuple element containing the text and the second the text format)
+        """
+        for rschema, _ in self.attribute_definitions():
+            if rschema.type.endswith('_format'):
+                for constraint in self.constraints(rschema):
+                    if isinstance(constraint, FormatConstraint):
+                        yield self.subject_relation(rschema.type[:-7]), rschema
+                        break
+                    
+    def check_perm(self, session, action, eid=None):
+        # NB: session may be a server session or a request object
+        user = session.user
+        # check user is in an allowed group, if so that's enough
+        # internal sessions should always stop there
+        if user.matching_groups(self.get_groups(action)):
+            return
+        # if 'owners' in allowed groups, check if the user actually owns this
+        # object, if so that's enough
+        if eid is not None and 'owners' in self.get_groups(action) and \
+               user.owns(eid):
+            return
+        # else if there is some rql expressions, check them
+        if any(rqlexpr.check(session, eid)
+               for rqlexpr in self.get_rqlexprs(action)):
+            return        
+        raise Unauthorized(action, str(self))
+
+    def rql_expression(self, expression, mainvars=None, eid=None):
+        """rql expression factory"""
+        return ERQLExpression(expression, mainvars, eid)
+    
+class CubicWebRelationSchema(RelationSchema):
+    RelationSchema._RPROPERTIES['eid'] = None
+    _perms_checked = False
+    
+    def __init__(self, schema=None, rdef=None, eid=None, **kwargs):
+        if rdef is not None:
+            # if this relation is inlined
+            self.inlined = rdef.inlined
+        super(CubicWebRelationSchema, self).__init__(schema, rdef, **kwargs)
+        if eid is None and rdef is not None:
+            eid = getattr(rdef, 'eid', None)
+        self.eid = eid
+                    
+        
+    def update(self, subjschema, objschema, rdef):
+        super(CubicWebRelationSchema, self).update(subjschema, objschema, rdef)
+        if not self._perms_checked and self._groups:
+            for action, groups in self._groups.iteritems():
+                for group_or_rqlexpr in groups:
+                    if action == 'read' and \
+                           isinstance(group_or_rqlexpr, RQLExpression):
+                        msg = "can't use rql expression for read permission of "\
+                              "a relation type (%s)"
+                        raise BadSchemaDefinition(msg % self.type)
+                    elif self.final and isinstance(group_or_rqlexpr, RRQLExpression):
+                        if self.schema.reading_from_database:
+                            # we didn't have final relation earlier, so turn
+                            # RRQLExpression into ERQLExpression now
+                            rqlexpr = group_or_rqlexpr
+                            newrqlexprs = [x for x in self.get_rqlexprs(action) if not x is rqlexpr]
+                            newrqlexprs.append(ERQLExpression(rqlexpr.expression,
+                                                              rqlexpr.mainvars,
+                                                              rqlexpr.eid))
+                            self.set_rqlexprs(action, newrqlexprs) 
+                        else:
+                            msg = "can't use RRQLExpression on a final relation "\
+                                  "type (eg attribute relation), use an ERQLExpression (%s)"
+                            raise BadSchemaDefinition(msg % self.type)
+                    elif not self.final and \
+                             isinstance(group_or_rqlexpr, ERQLExpression):
+                        msg = "can't use ERQLExpression on a relation type, use "\
+                              "a RRQLExpression (%s)"
+                        raise BadSchemaDefinition(msg % self.type)
+            self._perms_checked = True
+            
+    def cardinality(self, subjtype, objtype, target):
+        card = self.rproperty(subjtype, objtype, 'cardinality')
+        return (target == 'subject' and card[0]) or \
+               (target == 'object' and card[1])
+    
+    def schema_relation(self):
+        return self.type in ('relation_type', 'from_entity', 'to_entity',
+                             'constrained_by', 'cstrtype')
+    
+    def physical_mode(self):
+        """return an appropriate mode for physical storage of this relation type:
+        * 'subjectinline' if every possible subject cardinalities are 1 or ?
+        * 'objectinline' if 'subjectinline' mode is not possible but every
+          possible object cardinalities are 1 or ?
+        * None if neither 'subjectinline' and 'objectinline'
+        """
+        assert not self.final
+        return self.inlined and 'subjectinline' or None
+
+    def check_perm(self, session, action, *args, **kwargs):
+        # NB: session may be a server session or a request object check user is
+        # in an allowed group, if so that's enough internal sessions should
+        # always stop there
+        if session.user.matching_groups(self.get_groups(action)):
+            return 
+        # else if there is some rql expressions, check them
+        if any(rqlexpr.check(session, *args, **kwargs)
+               for rqlexpr in self.get_rqlexprs(action)):
+            return
+        raise Unauthorized(action, str(self))
+
+    def rql_expression(self, expression, mainvars=None, eid=None):
+        """rql expression factory"""
+        if self.is_final():
+            return ERQLExpression(expression, mainvars, eid)
+        return RRQLExpression(expression, mainvars, eid)
+
+    
+class CubicWebSchema(Schema):
+    """set of entities and relations schema defining the possible data sets
+    used in an application
+
+
+    :type name: str
+    :ivar name: name of the schema, usually the application identifier
+    
+    :type base: str
+    :ivar base: path of the directory where the schema is defined
+    """
+    reading_from_database = False    
+    entity_class = CubicWebEntitySchema
+    relation_class = CubicWebRelationSchema
+
+    def __init__(self, *args, **kwargs):
+        self._eid_index = {}
+        super(CubicWebSchema, self).__init__(*args, **kwargs)
+        ybo.register_base_types(self)
+        rschema = self.add_relation_type(ybo.RelationType('eid', meta=True))
+        rschema.final = True
+        rschema.set_default_groups()
+        rschema = self.add_relation_type(ybo.RelationType('has_text', meta=True))
+        rschema.final = True
+        rschema.set_default_groups()
+        rschema = self.add_relation_type(ybo.RelationType('identity', meta=True))
+        rschema.final = False
+        rschema.set_default_groups()
+        
+    def schema_entity_types(self):
+        """return the list of entity types used to build the schema"""
+        return frozenset(('EEType', 'ERType', 'EFRDef', 'ENFRDef',
+                          'EConstraint', 'EConstraintType', 'RQLExpression',
+                          # XXX those are not really "schema" entity types
+                          #     but we usually don't want them as @* targets
+                          'EProperty', 'EPermission', 'State', 'Transition'))
+        
+    def add_entity_type(self, edef):
+        edef.name = edef.name.encode()
+        edef.name = bw_normalize_etype(edef.name)
+        assert re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name), repr(edef.name)
+        eschema = super(CubicWebSchema, self).add_entity_type(edef)
+        if not eschema.is_final():
+            # automatically add the eid relation to non final entity types
+            rdef = ybo.RelationDefinition(eschema.type, 'eid', 'Int',
+                                          cardinality='11', uid=True)
+            self.add_relation_def(rdef)
+            rdef = ybo.RelationDefinition(eschema.type, 'identity', eschema.type)
+            self.add_relation_def(rdef)
+        self._eid_index[eschema.eid] = eschema
+        return eschema
+        
+    def add_relation_type(self, rdef):
+        rdef.name = rdef.name.lower().encode()
+        rschema = super(CubicWebSchema, self).add_relation_type(rdef)
+        self._eid_index[rschema.eid] = rschema
+        return rschema
+    
+    def add_relation_def(self, rdef):
+        """build a part of a relation schema
+        (i.e. add a relation between two specific entity's types)
+
+        :type subject: str
+        :param subject: entity's type that is subject of the relation
+
+        :type rtype: str
+        :param rtype: the relation's type (i.e. the name of the relation)
+
+        :type obj: str
+        :param obj: entity's type that is object of the relation
+
+        :rtype: RelationSchema
+        :param: the newly created or just completed relation schema
+        """
+        rdef.name = rdef.name.lower()
+        rdef.subject = bw_normalize_etype(rdef.subject)
+        rdef.object = bw_normalize_etype(rdef.object)
+        super(CubicWebSchema, self).add_relation_def(rdef)
+        try:
+            self._eid_index[rdef.eid] = (self.eschema(rdef.subject),
+                                         self.rschema(rdef.name),
+                                         self.eschema(rdef.object))
+        except AttributeError:
+            pass # not a serialized schema
+    
+    def del_relation_type(self, rtype):
+        rschema = self.rschema(rtype)
+        self._eid_index.pop(rschema.eid, None)
+        super(CubicWebSchema, self).del_relation_type(rtype)
+    
+    def del_relation_def(self, subjtype, rtype, objtype):
+        for k, v in self._eid_index.items():
+            if v == (subjtype, rtype, objtype):
+                del self._eid_index[k]
+        super(CubicWebSchema, self).del_relation_def(subjtype, rtype, objtype)
+        
+    def del_entity_type(self, etype):
+        eschema = self.eschema(etype)
+        self._eid_index.pop(eschema.eid, None)
+        # deal with has_text first, else its automatic deletion (see above)
+        # may trigger an error in ancestor's del_entity_type method
+        if 'has_text' in eschema.subject_relations():
+            self.del_relation_def(etype, 'has_text', 'String')
+        super(CubicWebSchema, self).del_entity_type(etype)
+        
+    def schema_by_eid(self, eid):
+        return self._eid_index[eid]
+
+
+# Possible constraints ########################################################
+
+class RQLVocabularyConstraint(BaseConstraint):
+    """the rql vocabulary constraint :
+
+    limit the proposed values to a set of entities returned by a rql query,
+    but this is not enforced at the repository level
+    
+     restriction is additional rql restriction that will be added to
+     a predefined query, where the S and O variables respectivly represent
+     the subject and the object of the relation
+    """
+    
+    def __init__(self, restriction):
+        self.restriction = restriction
+
+    def serialize(self):
+        return self.restriction
+    
+    def deserialize(cls, value):
+        return cls(value)
+    deserialize = classmethod(deserialize)
+    
+    def check(self, entity, rtype, value):
+        """return true if the value satisfy the constraint, else false"""
+        # implemented as a hook in the repository
+        return 1
+
+    def repo_check(self, session, eidfrom, rtype, eidto):
+        """raise ValidationError if the relation doesn't satisfy the constraint
+        """
+        pass # this is a vocabulary constraint, not enforce
+    
+    def __str__(self):
+        return self.restriction
+
+    def __repr__(self):
+        return '<%s : %s>' % (self.__class__.__name__, repr(self.restriction))
+
+
+class RQLConstraint(RQLVocabularyConstraint):
+    """the rql constraint is similar to the RQLVocabularyConstraint but
+    are also enforced at the repository level
+    """
+    def exec_query(self, session, eidfrom, eidto):
+        rql = 'Any S,O WHERE S eid %(s)s, O eid %(o)s, ' + self.restriction
+        return session.unsafe_execute(rql, {'s': eidfrom, 'o': eidto},
+                                      ('s', 'o'), build_descr=False)
+    def error(self, eid, rtype, msg):
+        raise ValidationError(eid, {rtype: msg})
+        
+    def repo_check(self, session, eidfrom, rtype, eidto):
+        """raise ValidationError if the relation doesn't satisfy the constraint
+        """
+        if not self.exec_query(session, eidfrom, eidto):
+            # XXX at this point dunno if the validation error `occured` on
+            #     eidfrom or eidto (from user interface point of view)
+            self.error(eidfrom, rtype, 'constraint %s failed' % self)
+
+
+class RQLUniqueConstraint(RQLConstraint):
+    """the unique rql constraint check that the result of the query isn't
+    greater than one
+    """
+    def repo_check(self, session, eidfrom, rtype, eidto):
+        """raise ValidationError if the relation doesn't satisfy the constraint
+        """
+        if len(self.exec_query(session, eidfrom, eidto)) > 1:
+            # XXX at this point dunno if the validation error `occured` on
+            #     eidfrom or eidto (from user interface point of view)
+            self.error(eidfrom, rtype, 'unique constraint %s failed' % self)
+
+    
+def split_expression(rqlstring):
+    for expr in rqlstring.split(','):
+        for word in expr.split():
+            yield word
+            
+def normalize_expression(rqlstring):
+    """normalize an rql expression to ease schema synchronization (avoid
+    suppressing and reinserting an expression if only a space has been added/removed
+    for instance)
+    """
+    return u', '.join(' '.join(expr.split()) for expr in rqlstring.split(','))
+
+
+class RQLExpression(object):
+    def __init__(self, expression, mainvars, eid):
+        self.eid = eid # eid of the entity representing this rql expression
+        if not isinstance(mainvars, unicode):
+            mainvars = unicode(mainvars)
+        self.mainvars = mainvars
+        self.expression = normalize_expression(expression)
+        try:
+            self.rqlst = parse(self.full_rql, print_errors=False).children[0]
+        except RQLSyntaxError:
+            raise RQLSyntaxError(expression)
+        for mainvar in mainvars.split(','):
+            if len(self.rqlst.defined_vars[mainvar].references()) <= 2:
+                LOGGER.warn('You did not use the %s variable in your RQL expression %s',
+                            mainvar, self)
+    
+    def __str__(self):
+        return self.full_rql
+    def __repr__(self):
+        return '%s(%s)' % (self.__class__.__name__, self.full_rql)
+        
+    def __deepcopy__(self, memo):
+        return self.__class__(self.expression, self.mainvars)
+    def __getstate__(self):
+        return (self.expression, self.mainvars)
+    def __setstate__(self, state):
+        self.__init__(*state)
+        
+    @cached
+    def transform_has_permission(self):
+        found = None
+        rqlst = self.rqlst
+        for var in rqlst.defined_vars.itervalues():
+            for varref in var.references():
+                rel = varref.relation()
+                if rel is None:
+                    continue
+                try:
+                    prefix, action, suffix = rel.r_type.split('_')
+                except ValueError:
+                    continue
+                if prefix != 'has' or suffix != 'permission' or \
+                       not action in ('add', 'delete', 'update', 'read'):
+                    continue
+                if found is None:
+                    found = []
+                    rqlst.save_state()
+                assert rel.children[0].name == 'U'
+                objvar = rel.children[1].children[0].variable
+                rqlst.remove_node(rel)
+                selected = [v.name for v in rqlst.get_selected_variables()]
+                if objvar.name not in selected:
+                    colindex = len(selected)
+                    rqlst.add_selected(objvar)
+                else:
+                    colindex = selected.index(objvar.name)
+                found.append((action, objvar, colindex))
+                # remove U eid %(u)s if U is not used in any other relation
+                uvrefs = rqlst.defined_vars['U'].references()
+                if len(uvrefs) == 1:
+                    rqlst.remove_node(uvrefs[0].relation())
+        if found is not None:
+            rql = rqlst.as_string()
+            if len(rqlst.selection) == 1 and isinstance(rqlst.where, nodes.Relation):
+                # only "Any X WHERE X eid %(x)s" remaining, no need to execute the rql
+                keyarg = rqlst.selection[0].name.lower()
+            else:
+                keyarg = None
+            rqlst.recover()
+            return rql, found, keyarg
+        return rqlst.as_string(), None, None
+        
+    def _check(self, session, **kwargs):
+        """return True if the rql expression is matching the given relation
+        between fromeid and toeid
+
+        session may actually be a request as well
+        """
+        if self.eid is not None:
+            key = (self.eid, tuple(sorted(kwargs.iteritems())))
+            try:
+                return session.local_perm_cache[key]
+            except KeyError:
+                pass
+        rql, has_perm_defs, keyarg = self.transform_has_permission()
+        if keyarg is None:
+            # on the server side, use unsafe_execute, but this is not available
+            # on the client side (session is actually a request)
+            execute = getattr(session, 'unsafe_execute', session.execute)
+            # XXX what if 'u' in kwargs
+            cachekey = kwargs.keys()
+            kwargs['u'] = session.user.eid
+            try:
+                rset = execute(rql, kwargs, cachekey, build_descr=True)
+            except NotImplementedError:
+                self.critical('cant check rql expression, unsupported rql %s', rql)
+                if self.eid is not None:
+                    session.local_perm_cache[key] = False
+                return False
+            except TypeResolverException, ex:
+                # some expression may not be resolvable with current kwargs
+                # (type conflict)
+                self.warning('%s: %s', rql, str(ex))
+                if self.eid is not None:
+                    session.local_perm_cache[key] = False
+                return False
+        else:
+            rset = session.eid_rset(kwargs[keyarg])
+        # if no special has_*_permission relation in the rql expression, just
+        # check the result set contains something
+        if has_perm_defs is None:
+            if rset:
+                if self.eid is not None:
+                    session.local_perm_cache[key] = True
+                return True
+        elif rset:
+            # check every special has_*_permission relation is satisfied
+            get_eschema = session.vreg.schema.eschema
+            try:
+                for eaction, var, col in has_perm_defs:
+                    for i in xrange(len(rset)):
+                        eschema = get_eschema(rset.description[i][col])
+                        eschema.check_perm(session, eaction, rset[i][col])
+                if self.eid is not None:
+                    session.local_perm_cache[key] = True
+                return True
+            except Unauthorized:
+                pass
+        if self.eid is not None:
+            session.local_perm_cache[key] = False
+        return False
+    
+    @property
+    def minimal_rql(self):
+        return 'Any %s WHERE %s' % (self.mainvars, self.expression)
+
+
+class ERQLExpression(RQLExpression):
+    def __init__(self, expression, mainvars=None, eid=None):
+        RQLExpression.__init__(self, expression, mainvars or 'X', eid)
+        # syntax tree used by read security (inserted in queries when necessary
+        self.snippet_rqlst = parse(self.minimal_rql, print_errors=False).children[0]
+
+    @property
+    def full_rql(self):
+        rql = self.minimal_rql
+        rqlst = getattr(self, 'rqlst', None) # may be not set yet
+        if rqlst is not None:
+            defined = rqlst.defined_vars
+        else:
+            defined = set(split_expression(self.expression))
+        if 'X' in defined:
+            rql += ', X eid %(x)s'
+        if 'U' in defined:
+            rql += ', U eid %(u)s'
+        return rql
+    
+    def check(self, session, eid=None):
+        if 'X' in self.rqlst.defined_vars:
+            if eid is None:
+                return False
+            return self._check(session, x=eid)
+        return self._check(session)
+    
+PyFileReader.context['ERQLExpression'] = ERQLExpression
+        
+class RRQLExpression(RQLExpression):
+    def __init__(self, expression, mainvars=None, eid=None):
+        if mainvars is None:
+            defined = set(split_expression(expression))
+            mainvars = []
+            if 'S' in defined:
+                mainvars.append('S')
+            if 'O' in defined:
+                mainvars.append('O')
+            if not mainvars:
+                raise Exception('unable to guess selection variables')
+            mainvars = ','.join(mainvars)
+        RQLExpression.__init__(self, expression, mainvars, eid)
+
+    @property
+    def full_rql(self):
+        rql = self.minimal_rql
+        rqlst = getattr(self, 'rqlst', None) # may be not set yet
+        if rqlst is not None:
+            defined = rqlst.defined_vars
+        else:
+            defined = set(split_expression(self.expression))
+        if 'S' in defined:
+            rql += ', S eid %(s)s'
+        if 'O' in defined:
+            rql += ', O eid %(o)s'
+        if 'U' in defined:
+            rql += ', U eid %(u)s'
+        return rql
+    
+    def check(self, session, fromeid=None, toeid=None):
+        kwargs = {}
+        if 'S' in self.rqlst.defined_vars:
+            if fromeid is None:
+                return False
+            kwargs['s'] = fromeid
+        if 'O' in self.rqlst.defined_vars:
+            if toeid is None:
+                return False
+            kwargs['o'] = toeid
+        return self._check(session, **kwargs)
+        
+PyFileReader.context['RRQLExpression'] = RRQLExpression
+
+        
+# schema loading ##############################################################
+
+class CubicWebRelationFileReader(RelationFileReader):
+    """cubicweb specific relation file reader, handling additional RQL
+    constraints on a relation definition
+    """
+    
+    def handle_constraint(self, rdef, constraint_text):
+        """arbitrary constraint is an rql expression for cubicweb"""
+        if not rdef.constraints:
+            rdef.constraints = []
+        rdef.constraints.append(RQLVocabularyConstraint(constraint_text))
+
+    def process_properties(self, rdef, relation_def):
+        if 'inline' in relation_def:
+            rdef.inlined = True
+        RelationFileReader.process_properties(self, rdef, relation_def)
+
+        
+CONSTRAINTS['RQLConstraint'] = RQLConstraint
+CONSTRAINTS['RQLUniqueConstraint'] = RQLUniqueConstraint
+CONSTRAINTS['RQLVocabularyConstraint'] = RQLVocabularyConstraint
+PyFileReader.context.update(CONSTRAINTS)
+
+
+class BootstrapSchemaLoader(SchemaLoader):
+    """cubicweb specific schema loader, loading only schema necessary to read
+    the persistent schema
+    """
+    schemacls = CubicWebSchema
+    SchemaLoader.file_handlers.update({'.rel' : CubicWebRelationFileReader,
+                                       })
+
+    def load(self, config, path=()):
+        """return a Schema instance from the schema definition read
+        from <directory>
+        """
+        self.lib_directory = config.schemas_lib_dir()
+        return super(BootstrapSchemaLoader, self).load(
+            path, config.appid, register_base_types=False)
+    
+    def _load_definition_files(self, cubes=None):
+        # bootstraping, ignore cubes
+        for filepath in self.include_schema_files('bootstrap'):
+            self.info('loading %s', filepath)
+            self.handle_file(filepath)
+        
+    def unhandled_file(self, filepath):
+        """called when a file without handler associated has been found"""
+        self.warning('ignoring file %r', filepath)
+
+
+class CubicWebSchemaLoader(BootstrapSchemaLoader):
+    """cubicweb specific schema loader, automatically adding metadata to the
+    application's schema
+    """
+
+    def load(self, config):
+        """return a Schema instance from the schema definition read
+        from <directory>
+        """
+        self.info('loading %s schemas', ', '.join(config.cubes()))
+        path = reversed([config.apphome] + config.cubes_path())
+        return super(CubicWebSchemaLoader, self).load(config, path=path)
+
+    def _load_definition_files(self, cubes):
+        for filepath in (self.include_schema_files('bootstrap')
+                         + self.include_schema_files('base')
+                         + self.include_schema_files('Bookmark')
+                         + self.include_schema_files('Card')):
+            self.info('loading %s', filepath)
+            self.handle_file(filepath)
+        for cube in cubes:
+            for filepath in self.get_schema_files(cube):
+                self.info('loading %s', filepath)
+                self.handle_file(filepath)
+
+
+# _() is just there to add messages to the catalog, don't care about actual
+# translation
+PERM_USE_TEMPLATE_FORMAT = _('use_template_format')
+
+class FormatConstraint(StaticVocabularyConstraint):
+    need_perm_formats = (_('text/cubicweb-page-template'),
+                         )
+    regular_formats = (_('text/rest'),
+                       _('text/html'),
+                       _('text/plain'),
+                       )
+    def __init__(self):
+        pass
+    def serialize(self):
+        """called to make persistent valuable data of a constraint"""
+        return None
+
+    @classmethod
+    def deserialize(cls, value):
+        """called to restore serialized data of a constraint. Should return
+        a `cls` instance
+        """
+        return cls()
+    
+    def vocabulary(self, entity=None):
+        if entity and entity.req.user.has_permission(PERM_USE_TEMPLATE_FORMAT):
+            return self.regular_formats + self.need_perm_formats
+        return self.regular_formats
+    
+    def __str__(self):
+        return 'value in (%s)' % u', '.join(repr(unicode(word)) for word in self.vocabulary())
+    
+    
+format_constraint = FormatConstraint()
+CONSTRAINTS['FormatConstraint'] = FormatConstraint
+PyFileReader.context['format_constraint'] = format_constraint
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader'))
+set_log_methods(BootstrapSchemaLoader, getLogger('cubicweb.bootstrapschemaloader'))
+set_log_methods(RQLExpression, getLogger('cubicweb.schema'))
+
+# XXX monkey patch PyFileReader.import_erschema until bw_normalize_etype is
+# necessary
+orig_import_erschema = PyFileReader.import_erschema
+def bw_import_erschema(self, ertype, schemamod=None, instantiate=True):
+    return orig_import_erschema(self, bw_normalize_etype(ertype), schemamod, instantiate)
+PyFileReader.import_erschema = bw_import_erschema
+    
+# XXX itou for some Statement methods
+from rql import stmts
+orig_get_etype = stmts.ScopeNode.get_etype
+def bw_get_etype(self, name):
+    return orig_get_etype(self, bw_normalize_etype(name))
+stmts.ScopeNode.get_etype = bw_get_etype
+
+orig_add_main_variable_delete = stmts.Delete.add_main_variable
+def bw_add_main_variable_delete(self, etype, vref):
+    return orig_add_main_variable_delete(self, bw_normalize_etype(etype), vref)
+stmts.Delete.add_main_variable = bw_add_main_variable_delete
+
+orig_add_main_variable_insert = stmts.Insert.add_main_variable
+def bw_add_main_variable_insert(self, etype, vref):
+    return orig_add_main_variable_insert(self, bw_normalize_etype(etype), vref)
+stmts.Insert.add_main_variable = bw_add_main_variable_insert
+
+orig_set_statement_type = stmts.Select.set_statement_type
+def bw_set_statement_type(self, etype):
+    return orig_set_statement_type(self, bw_normalize_etype(etype))
+stmts.Select.set_statement_type = bw_set_statement_type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/Bookmark.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+
+class Bookmark(MetaUserEntityType):
+    """define an entity type, used to build the application schema"""
+    title = String(required=True, maxsize=128)
+    path  = String(maxsize=512, required=True,
+                   description=_("relative url of the bookmarked page"))
+    
+    bookmarked_by = SubjectRelation('EUser',
+                                    description=_("users using this bookmark"))
+    
+
+class bookmarked_by(MetaUserRelationType):
+    permissions = {'read':   ('managers', 'users', 'guests',),
+                   # test user in users group to avoid granting permission to anonymous user
+                   'add':    ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')),
+                   'delete': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')),
+                   }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/Card.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+from cubicweb.schema import format_constraint
+
+class Card(EntityType):
+    """a card is a textual content used as documentation, reference, procedure reminder"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', 'users'),
+        'delete': ('managers', 'owners'),
+        'update': ('managers', 'owners',),
+        }
+    
+    title    = String(required=True, fulltextindexed=True, maxsize=256)
+    synopsis = String(fulltextindexed=True, maxsize=512,
+                      description=_("an abstract for this card"))
+    content_format = String(meta=True, internationalizable=True, maxsize=50,
+                            default='text/rest', constraints=[format_constraint])
+    content  = String(fulltextindexed=True)
+    wikiid = String(maxsize=64, indexed=True)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/_regproc.sql.mysql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,22 @@
+/* -*- sql -*- 
+
+   mysql specific registered procedures, 
+
+*/
+
+/* XXX limit_size version dealing with format as postgres version does.
+   XXX mysql doesn't support overloading, each function should have a different name
+       
+   NOTE: fulltext renamed since it cause a mysql name conflict
+ */
+
+CREATE FUNCTION text_limit_size(vfulltext TEXT, maxsize INT)
+RETURNS TEXT
+NO SQL
+BEGIN
+    IF LENGTH(vfulltext) < maxsize THEN
+       RETURN vfulltext;
+    ELSE
+       RETURN SUBSTRING(vfulltext from 1 for maxsize) || '...';
+    END IF;
+END ;;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/_regproc.sql.postgres	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+/* -*- sql -*- 
+
+   postgres specific registered procedures, 
+   require the plpgsql language installed 
+
+*/
+
+CREATE FUNCTION comma_join (anyarray) RETURNS text AS $$
+    SELECT array_to_string($1, ', ')
+$$ LANGUAGE SQL;;
+
+CREATE AGGREGATE group_concat (
+  basetype = anyelement,
+  sfunc = array_append,
+  stype = anyarray,
+  finalfunc = comma_join,
+  initcond = '{}'
+);;
+
+
+
+CREATE FUNCTION limit_size (fulltext text, format text, maxsize integer) RETURNS text AS $$
+DECLARE
+    plaintext text;
+BEGIN
+    IF char_length(fulltext) < maxsize THEN
+       RETURN fulltext;
+    END IF;
+    IF format = 'text/html' OR format = 'text/xhtml' OR format = 'text/xml' THEN
+       plaintext := regexp_replace(fulltext, '<[\\w/][^>]+>', '', 'g');
+    ELSE
+       plaintext := fulltext;
+    END IF;
+    IF char_length(plaintext) < maxsize THEN
+       RETURN plaintext;
+    ELSE
+       RETURN substring(plaintext from 1 for maxsize) || '...';
+    END IF;
+END
+$$ LANGUAGE plpgsql;;
+
+
+CREATE FUNCTION text_limit_size (fulltext text, maxsize integer) RETURNS text AS $$
+BEGIN
+    RETURN limit_size(fulltext, 'text/plain', maxsize);
+END
+$$ LANGUAGE plpgsql;;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/base.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,314 @@
+"""core CubicWeb schema, but not necessary at bootstrap time
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.schema import format_constraint
+
+
+class EUser(RestrictedEntityType):
+    """define a CubicWeb user"""
+    permissions = {
+        'read':   ('managers', 'users', ERQLExpression('X identity U')),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        'update': ('managers', ERQLExpression('X identity U, NOT U in_group G, G name "guests"'),),
+        }
+
+    login     = String(required=True, unique=True, maxsize=64,
+                       description=_('unique identifier used to connect to the application'))
+    upassword = Password(required=True) # password is a reserved word for mysql
+    firstname = String(maxsize=64)
+    surname   = String(maxsize=64)
+    last_login_time  = Datetime(description=_('last connection date'))
+    # allowing an email to be the primary email of multiple entities is necessary for
+    # test at least :-/    
+    primary_email = SubjectRelation('EmailAddress', cardinality='??',
+                                    description=_('email address to use for notification'))
+    use_email     = SubjectRelation('EmailAddress', cardinality='*?', composite='subject')
+
+    in_group = SubjectRelation('EGroup', cardinality='+*',
+                               constraints=[RQLConstraint('NOT O name "owners"')],
+                               description=_('groups grant permissions to the user'))
+    in_state = SubjectRelation('State', cardinality='1*',
+                               # XXX automatize this
+                               constraints=[RQLConstraint('S is ET, O state_of ET')],
+                               description=_('account state'))
+    wf_info_for = ObjectRelation('TrInfo', cardinality='1*', composite='object')
+
+
+class EmailAddress(MetaEntityType):
+    """an electronic mail address associated to a short alias"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests',), # XXX if P use_email X, U has_read_permission P
+        'add':    ('managers', 'users',),
+        'delete': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')),
+        'update': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')),
+        }
+    
+    alias   = String(fulltextindexed=True, maxsize=56)
+    address = String(required=True, fulltextindexed=True, 
+                     indexed=True, unique=True, maxsize=128)
+    canonical = Boolean(default=False,
+                        description=_('when multiple addresses are equivalent \
+(such as python-projects@logilab.org and python-projects@lists.logilab.org), set this \
+to true on one of them which is the preferred form.'))
+    identical_to = SubjectRelation('EmailAddress')
+
+class use_email(RelationType):
+    """"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers', RRQLExpression('U has_update_permission S'),),
+        'delete': ('managers', RRQLExpression('U has_update_permission S'),),
+        }
+    fulltext_container = 'subject'
+
+class primary_email(RelationType):
+    """the prefered email"""
+    permissions = use_email.permissions
+    
+class identical_to(RelationType):
+    """identical_to"""
+    symetric = True
+    permissions = {
+        'read':   ('managers', 'users', 'guests',),
+        # XXX should have update permissions on both subject and object,
+        #     though by doing this we will probably have no way to add
+        #     this relation in the web ui. The easiest way to acheive this
+        #     is probably to be able to have "U has_update_permission O" as
+        #     RQLConstraint of the relation definition, though this is not yet
+        #     possible
+        'add':    ('managers', RRQLExpression('U has_update_permission S'),),
+        'delete': ('managers', RRQLExpression('U has_update_permission S'),),
+        }
+
+class in_group(MetaRelationType):
+    """core relation indicating a user's groups"""
+    meta = False
+    
+class owned_by(MetaRelationType):
+    """core relation indicating owners of an entity. This relation
+    implicitly put the owner into the owners group for the entity
+    """
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('S owned_by U'),), 
+        'delete': ('managers', RRQLExpression('S owned_by U'),),
+        }
+    # 0..n cardinality for entities created by internal session (no attached user)
+    # and to support later deletion of a user which has created some entities
+    cardinality = '**'
+    subject = '**'
+    object = 'EUser'
+    
+class created_by(MetaRelationType):
+    """core relation indicating the original creator of an entity"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+    # 0..1 cardinality for entities created by internal session (no attached user)
+    # and to support later deletion of a user which has created some entities
+    cardinality = '?*' 
+    subject = '**'
+    object = 'EUser'
+
+    
+class creation_date(MetaAttributeRelationType):
+    """creation time of an entity"""
+    cardinality = '11'
+    subject = '**'
+    object = 'Datetime'
+
+class modification_date(MetaAttributeRelationType):
+    """latest modification time of an entity"""
+    cardinality = '11'
+    subject = '**'
+    object = 'Datetime'
+
+
+class State(MetaEntityType):
+    """used to associate simple states to an entity type and/or to define
+    workflows
+    """
+    name = String(required=True, indexed=True, internationalizable=True,
+                  maxsize=256)
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/rest', constraints=[format_constraint])
+    description = String(fulltextindexed=True,
+                         description=_('semantic description of this state'))
+    
+    state_of = SubjectRelation('EEType', cardinality='+*',
+                    description=_('entity types which may use this state'),
+                    constraints=[RQLConstraint('O final FALSE')])
+    allowed_transition = SubjectRelation('Transition', cardinality='**',
+                                         constraints=[RQLConstraint('S state_of ET, O transition_of ET')],
+                                         description=_('allowed transitions from this state'))
+    
+    initial_state = ObjectRelation('EEType', cardinality='?*',
+                                   # S initial_state O, O state_of S
+                                   constraints=[RQLConstraint('O state_of S')],
+                                   description=_('initial state for entities of this type'))
+
+
+class Transition(MetaEntityType):
+    """use to define a transition from one or multiple states to a destination
+    states in workflow's definitions.
+    """
+    name = String(required=True, indexed=True, internationalizable=True,
+                  maxsize=256)
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/rest', constraints=[format_constraint])
+    description = String(fulltextindexed=True,
+                         description=_('semantic description of this transition'))
+    condition = SubjectRelation('RQLExpression', cardinality='*?', composite='subject',
+                                description=_('a RQL expression which should return some results, '
+                                              'else the transition won\'t be available. '
+                                              'This query may use X and U variables '
+                                              'that will respectivly represents '
+                                              'the current entity and the current user'))
+    
+    require_group = SubjectRelation('EGroup', cardinality='**',
+                                    description=_('group in which a user should be to be '
+                                                  'allowed to pass this transition'))
+    transition_of = SubjectRelation('EEType', cardinality='+*',
+                                    description=_('entity types which may use this transition'),
+                                    constraints=[RQLConstraint('O final FALSE')])
+    destination_state = SubjectRelation('State', cardinality='?*',
+                                        constraints=[RQLConstraint('S transition_of ET, O state_of ET')],
+                                        description=_('destination state for this transition'))
+
+
+class TrInfo(MetaEntityType):
+    from_state = SubjectRelation('State', cardinality='?*')
+    to_state = SubjectRelation('State', cardinality='1*')
+    comment_format = String(meta=True, internationalizable=True, maxsize=50,
+                            default='text/rest', constraints=[format_constraint])
+    comment = String(fulltextindexed=True)
+    # get actor and date time using owned_by and creation_date
+
+
+class from_state(MetaRelationType):
+    inlined = True
+class to_state(MetaRelationType):
+    inlined = True
+class wf_info_for(MetaRelationType):
+    """link a transition information to its object"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests',),# RRQLExpression('U has_read_permission O')),
+        'add':    (), # handled automatically, no one should add one explicitly
+        'delete': ('managers',), # RRQLExpression('U has_delete_permission O')
+        }
+    inlined = True
+    composite = 'object'
+    fulltext_container = composite
+    
+class state_of(MetaRelationType):
+    """link a state to one or more entity type"""
+class transition_of(MetaRelationType):
+    """link a transition to one or more entity type"""
+    
+class initial_state(MetaRelationType):
+    """indicate which state should be used by default when an entity using
+    states is created
+    """
+    inlined = True
+
+class destination_state(MetaRelationType):
+    """destination state of a transition"""
+    inlined = True
+    
+class allowed_transition(MetaRelationType):
+    """allowed transition from this state"""
+
+class in_state(UserRelationType):
+    """indicate the current state of an entity"""
+    meta = True
+    # not inlined intentionnaly since when using ldap sources, user'state
+    # has to be stored outside the EUser table
+    
+    # add/delete perms given to managers/users, after what most of the job
+    # is done by workflow enforcment
+    
+
+class EProperty(EntityType):
+    """used for cubicweb configuration. Once a property has been created you
+    can't change the key.
+    """
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', 'users',),
+        'update': ('managers', 'owners',),
+        'delete': ('managers', 'owners',),
+        }
+    meta = True
+    # key is a reserved word for mysql
+    pkey = String(required=True, internationalizable=True, maxsize=256,
+                  description=_('defines what\'s the property is applied for. '
+                                'You must select this first to be able to set '
+                                'value'))
+    value = String(internationalizable=True, maxsize=256)
+    
+    for_user = SubjectRelation('EUser', cardinality='?*', composite='object',
+                               description=_('user for which this property is '
+                                             'applying. If this relation is not '
+                                             'set, the property is considered as'
+                                             ' a global property'))
+
+
+class for_user(MetaRelationType):
+    """link a property to the user which want this property customization. Unless
+    you're a site manager, this relation will be handled automatically.
+    """
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+    inlined = True
+
+
+class EPermission(MetaEntityType):
+    """entity type that may be used to construct some advanced security configuration
+    """
+    name = String(required=True, indexed=True, internationalizable=True, maxsize=100,
+                  description=_('name or identifier of the permission'))
+    label = String(required=True, internationalizable=True, maxsize=100,
+                   description=_('distinct label to distinguate between other permission entity of the same name'))
+    require_group = SubjectRelation('EGroup', 
+                                    description=_('groups to which the permission is granted'))
+
+# explicitly add X require_permission EPermission for each entity that should have
+# configurable security
+class require_permission(RelationType):
+    """link a permission to the entity. This permission should be used in the
+    security definition of the entity's type to be useful.
+    """
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+    
+class require_group(MetaRelationType):
+    """used to grant a permission to a group"""
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+
+    
+class see_also(RelationType):
+    """generic relation to link one entity to another"""
+    symetric = True
+
+class ECache(EntityType):
+    name = String(required=True, unique=True, indexed=True, 
+                  description=_('name of the cache'))
+    timestamp = Datetime(default='NOW')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemas/bootstrap.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,245 @@
+"""core CubicWeb schema necessary for bootstrapping the actual application's schema
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from cubicweb.schema import format_constraint
+
+
+# not restricted since as "is" is handled as other relations, guests need
+# access to this
+class EEType(MetaEntityType):
+    """define an entity type, used to build the application schema"""
+    name = String(required=True, indexed=True, internationalizable=True,
+                  unique=True, maxsize=64)
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/plain', constraints=[format_constraint])
+    description = String(internationalizable=True,
+                         description=_('semantic description of this entity type'))
+    meta = Boolean(description=_('is it an application entity type or not ?'))
+    # necessary to filter using RQL
+    final = Boolean(description=_('automatic'))
+
+
+class ERType(MetaEntityType):
+    """define a relation type, used to build the application schema"""
+    name = String(required=True, indexed=True, internationalizable=True,
+                  unique=True, maxsize=64)
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/plain', constraints=[format_constraint])
+    description = String(internationalizable=True,
+                         description=_('semantic description of this relation type'))
+    meta = Boolean(description=_('is it an application relation type or not ?'))
+    symetric = Boolean(description=_('is this relation equivalent in both direction ?'))
+    inlined = Boolean(description=_('is this relation physically inlined? you should know what you\'re doing if you are changing this!'))
+    fulltext_container = String(description=_('if full text content of subject/object entity '
+                                              'should be added to other side entity (the container).'),
+                                vocabulary=('', _('subject'), _('object')),
+                                maxsize=8, default=None)
+    final = Boolean(description=_('automatic'))
+
+
+class EFRDef(MetaEntityType):
+    """define a final relation: link a final relation type from a non final
+    entity to a final entity type. 
+
+    used to build the application schema
+    """
+    relation_type = SubjectRelation('ERType', cardinality='1*',
+                                    constraints=[RQLConstraint('O final TRUE')],
+                                    composite='object')
+    from_entity = SubjectRelation('EEType', cardinality='1*',
+                                  constraints=[RQLConstraint('O final FALSE')],
+                                  composite='object')
+    to_entity = SubjectRelation('EEType', cardinality='1*',
+                                constraints=[RQLConstraint('O final TRUE')],
+                                composite='object')
+    constrained_by = SubjectRelation('EConstraint', cardinality='*1', composite='subject')
+    
+    cardinality = String(maxsize=2, internationalizable=True,
+                         vocabulary=[_('?1'), _('11'), _('??'), _('1?')], 
+                         description=_('subject/object cardinality'))
+    ordernum = Int(description=('control subject entity\'s relations order'), default=0)
+    
+    indexed = Boolean(description=_('create an index for quick search on this attribute'))
+    fulltextindexed = Boolean(description=_('index this attribute\'s value in the plain text index'))
+    internationalizable = Boolean(description=_('is this attribute\'s value translatable'))
+    defaultval = String(maxsize=256)
+    
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/plain', constraints=[format_constraint])
+    description = String(internationalizable=True,
+                         description=_('semantic description of this attribute'))
+    
+
+CARDINALITY_VOCAB = [_('?*'), _('1*'), _('+*'), _('**'), 
+                     _('?+'), _('1+'), _('++'), _('*+'), 
+                     _('?1'), _('11'), _('+1'), _('*1'),
+                     _('??'), _('1?'), _('+?'), _('*?')]
+
+class ENFRDef(MetaEntityType):
+    """define a non final relation: link a non final relation type from a non
+    final entity to a non final entity type. 
+
+    used to build the application schema
+    """
+    relation_type = SubjectRelation('ERType', cardinality='1*',
+                                    constraints=[RQLConstraint('O final FALSE')],
+                                    composite='object')
+    from_entity = SubjectRelation('EEType', cardinality='1*',
+                                  constraints=[RQLConstraint('O final FALSE')],
+                                  composite='object')
+    to_entity = SubjectRelation('EEType', cardinality='1*',
+                                constraints=[RQLConstraint('O final FALSE')],
+                                composite='object')
+    constrained_by = SubjectRelation('EConstraint', cardinality='*1', composite='subject')
+    
+    cardinality = String(maxsize=2, internationalizable=True,
+                         vocabulary=CARDINALITY_VOCAB,
+                         description=_('subject/object cardinality'))
+    ordernum = Int(description=_('control subject entity\'s relations order'),
+                   default=0)
+    composite = String(description=_('is the subject/object entity of the relation '
+                                     'composed of the other ? This implies that when '
+                                     'the composite is deleted, composants are also '
+                                     'deleted.'),
+                       vocabulary=('', _('subject'), _('object')),
+                       maxsize=8, default=None)
+    
+    description_format = String(meta=True, internationalizable=True, maxsize=50,
+                                default='text/plain', constraints=[format_constraint])
+    description = String(internationalizable=True,
+                         description=_('semantic description of this relation'))
+    
+
+# not restricted since it has to be read when checking allowed transitions
+class RQLExpression(MetaEntityType):
+    """define a rql expression used to define permissions"""
+    exprtype = String(required=True, vocabulary=['ERQLExpression', 'RRQLExpression'])
+    mainvars = String(maxsize=8,
+                      description=_('name of the main variables which should be '
+                                    'used in the selection if necessary (comma '
+                                    'separated)'))
+    expression = String(required=True, 
+                        description=_('restriction part of a rql query. '
+                                      'For entity rql expression, X and U are '
+                                      'predefined respectivly to the current object and to '
+                                      'the request user. For relation rql expression, '
+                                      'S, O and U are predefined respectivly to the current '
+                                      'relation\'subject, object and to '
+                                      'the request user. '))
+
+    read_permission = ObjectRelation(('EEType', 'ERType'), cardinality='+?', composite='subject',
+                                      description=_('rql expression allowing to read entities/relations of this type'))
+    add_permission = ObjectRelation(('EEType', 'ERType'), cardinality='*?', composite='subject',
+                                     description=_('rql expression allowing to add entities/relations of this type'))
+    delete_permission = ObjectRelation(('EEType', 'ERType'), cardinality='*?', composite='subject',
+                                        description=_('rql expression allowing to delete entities/relations of this type'))
+    update_permission = ObjectRelation('EEType', cardinality='*?', composite='subject',
+                                        description=_('rql expression allowing to update entities of this type'))
+    
+
+class EConstraint(MetaEntityType):
+    """define a schema constraint"""
+    cstrtype = SubjectRelation('EConstraintType', cardinality='1*')
+    value = String(description=_('depends on the constraint type'))
+
+
+class EConstraintType(MetaEntityType):
+    """define a schema constraint type"""
+    name = String(required=True, indexed=True, internationalizable=True,
+                  unique=True, maxsize=64)
+
+
+# not restricted since it has to be read when checking allowed transitions
+class EGroup(MetaEntityType):
+    """define a CubicWeb users group"""
+    name = String(required=True, indexed=True, internationalizable=True,
+                  unique=True, maxsize=64)
+
+    read_permission = ObjectRelation(('EEType', 'ERType'), cardinality='+*',
+                                      description=_('groups allowed to read entities/relations of this type'))
+    add_permission = ObjectRelation(('EEType', 'ERType'),
+                                     description=_('groups allowed to add entities/relations of this type'))
+    delete_permission = ObjectRelation(('EEType', 'ERType'),
+                                        description=_('groups allowed to delete entities/relations of this type'))
+    update_permission = ObjectRelation('EEType',
+                                        description=_('groups allowed to update entities of this type'))
+    
+    
+    
+class relation_type(MetaRelationType):
+    """link a relation definition to its relation type"""
+    inlined = True
+class from_entity(MetaRelationType):
+    """link a relation definition to its subject entity type"""
+    inlined = True
+class to_entity(MetaRelationType):
+    """link a relation definition to its object entity type"""
+    inlined = True
+class constrained_by(MetaRelationType):
+    """constraints applying on this relation"""
+    
+class cstrtype(MetaRelationType):
+    """constraint factory"""
+    inlined = True
+
+class read_permission(MetaRelationType):
+    """core relation giving to a group the permission to read an entity or
+    relation type
+    """
+class add_permission(MetaRelationType):
+    """core relation giving to a group the permission to add an entity or
+    relation type
+    """
+class delete_permission(MetaRelationType):
+    """core relation giving to a group the permission to delete an entity or
+    relation type
+    """
+class update_permission(MetaRelationType):
+    """core relation giving to a group the permission to update an entity type
+    """
+
+
+class is_(MetaRelationType):
+    """core relation indicating the type of an entity
+    """
+    name = 'is'
+    # don't explicitly set composite here, this is handled anyway
+    #composite = 'object'
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    (),
+        'delete': (),
+        }
+    cardinality = '1*'
+    subject = '**'
+    object = 'EEType'
+
+class is_instance_of(MetaRelationType):
+    """core relation indicating the types (including specialized types)
+    of an entity
+    """
+    # don't explicitly set composite here, this is handled anyway
+    #composite = 'object'
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    (),
+        'delete': (),
+        }
+    cardinality = '+*'
+    subject = '**'
+    object = 'EEType'
+
+class specializes(MetaRelationType):
+    name = 'specializes'
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+    cardinality = '?*'
+    subject = 'EEType'
+    object = 'EEType'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schemaviewer.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,227 @@
+"""an helper class to display CubicWeb schema using ureports
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.ureports import Section, Title, Table, Link, Span, Text
+from yams.schema2dot import CARD_MAP    
+
+_ = unicode
+I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')]
+
+class SchemaViewer(object):
+    """return an ureport layout for some part of a schema"""
+    def __init__(self, req=None, encoding=None):
+        self.req = req
+        if req is not None:
+            self.req.add_css('cubicweb.schema.css')
+            self._possible_views = req.vreg.possible_views
+            if not encoding:
+                encoding = req.encoding
+        else:
+            self._possible_views = lambda x: ()
+        self.encoding = encoding
+        
+    def format_acls(self, schema, access_types):
+        """return a layout displaying access control lists"""
+        data = [self.req._('access type'), self.req._('groups')]
+        for access_type in access_types:
+            data.append(self.req._(access_type))
+            acls = [self.req._(group) for group in schema.get_groups(access_type)]
+            acls += (rqlexp.expression for rqlexp in schema.get_rqlexprs(access_type))
+            data.append(', '.join(acls))
+        return Section(children=(Table(cols=2, cheaders=1, rheaders=1, children=data),),
+                       klass='acl')
+
+        
+    def visit_schema(self, schema, display_relations=0,
+                     skiprels=(), skipmeta=True):
+        """get a layout for a whole schema"""
+        title = Title(self.req._('Schema %s') % schema.name,
+                      klass='titleUnderline')
+        layout = Section(children=(title,))
+        esection = Section(children=(Title(self.req._('Entities'),
+                                           klass='titleUnderline'),))
+        layout.append(esection)
+        entities = [eschema for eschema in schema.entities()
+                    if not eschema.is_final()]
+        if skipmeta:
+            entities = [eschema for eschema in entities
+                        if not eschema.meta]
+        keys = [(eschema.type, eschema) for eschema in entities]
+        for key, eschema in sorted(keys):
+            esection.append(self.visit_entityschema(eschema, skiprels))
+        if display_relations:
+            title = Title(self.req._('Relations'), klass='titleUnderline')
+            rsection = Section(children=(title,)) 
+            layout.append(rsection)
+            relations = [rschema for rschema in schema.relations()
+                         if not (rschema.is_final() or rschema.type in skiprels)]
+            if skipmeta:
+                relations = [rschema for rschema in relations
+                             if not rschema.meta]
+            keys = [(rschema.type, rschema) for rschema in relations]
+            for key, rschema in sorted(keys):
+                relstr = self.visit_relationschema(rschema)
+                rsection.append(relstr)
+        return layout
+
+    def _entity_attributes_data(self, eschema):
+        _ = self.req._
+        data = [_('attribute'), _('type'), _('default'), _('constraints')]
+        for rschema, aschema in eschema.attribute_definitions():
+            if not (rschema.has_local_role('read') or rschema.has_perm(self.req, 'read')):
+                continue
+            aname = rschema.type
+            if aname == 'eid':
+                continue            
+            data.append('%s (%s)' % (aname, _(aname)))
+            data.append(_(aschema.type))
+            defaultval = eschema.default(aname)
+            if defaultval is not None:
+                default = self.to_string(defaultval)
+            elif eschema.rproperty(rschema, 'cardinality')[0] == '1':
+                default = _('required field')
+            else:
+                default = ''
+            data.append(default)
+            constraints = rschema.rproperty(eschema.type, aschema.type,
+                                            'constraints')
+            data.append(', '.join(str(constr) for constr in constraints))
+        return data
+
+    def eschema_link_url(self, eschema):
+        return self.req.build_url('eetype/%s?vid=eschema' % eschema)
+    
+    def rschema_link_url(self, rschema):
+        return self.req.build_url('ertype/%s?vid=eschema' % rschema)
+
+    def possible_views(self, etype):
+        rset = self.req.etype_rset(etype)
+        return [v for v in self._possible_views(self.req, rset)
+                if v.category != 'startupview']
+
+    def stereotype(self, name):
+        return Span((' <<%s>>' % name,), klass='stereotype')
+    
+    def visit_entityschema(self, eschema, skiprels=()):
+        """get a layout for an entity schema"""
+        etype = eschema.type
+        layout = Section(children=' ', klass='clear')
+        layout.append(Link(etype,'&nbsp;' , id=etype)) # anchor
+        title = Link(self.eschema_link_url(eschema), etype)
+        if eschema.meta:
+            stereotype = self.stereotype('meta')
+            boxchild = [Section(children=(title, ' (%s)'%eschema.display_name(self.req), stereotype), klass='title')]
+        else:
+            boxchild = [Section(children=(title, ' (%s)'%eschema.display_name(self.req)), klass='title')]
+        table = Table(cols=4, rheaders=1, 
+                      children=self._entity_attributes_data(eschema))
+        boxchild.append(Section(children=(table,), klass='body'))
+        data = []
+        data.append(Section(children=boxchild, klass='box'))
+        data.append(Section(children='', klass='vl'))
+        data.append(Section(children='', klass='hl'))
+        t_vars = []
+        rels = []
+        first = True
+        for rschema, targetschemas, x in eschema.relation_definitions():
+            if rschema.type in skiprels:
+                continue
+            if not (rschema.has_local_role('read') or rschema.has_perm(self.req, 'read')):
+                continue
+            rschemaurl = self.rschema_link_url(rschema)
+            for oeschema in targetschemas:
+                label = rschema.type
+                if x == 'subject':
+                    cards = rschema.rproperty(eschema, oeschema, 'cardinality')
+                else:
+                    cards = rschema.rproperty(oeschema, eschema, 'cardinality')
+                    cards = cards[::-1]
+                label = '%s %s (%s) %s' % (CARD_MAP[cards[1]], label, display_name(self.req, label, x), CARD_MAP[cards[0]])
+                rlink = Link(rschemaurl, label)
+                elink = Link(self.eschema_link_url(oeschema), oeschema.type)
+                if first:
+                    t_vars.append(Section(children=(elink,), klass='firstvar'))
+                    rels.append(Section(children=(rlink,), klass='firstrel'))
+                    first = False
+                else:
+                    t_vars.append(Section(children=(elink,), klass='var'))
+                    rels.append(Section(children=(rlink,), klass='rel'))
+        data.append(Section(children=rels, klass='rels'))
+        data.append(Section(children=t_vars, klass='vars'))
+        layout.append(Section(children=data, klass='entityAttributes'))
+        if eschema.is_final(): # stop here for final entities
+            return layout
+        _ = self.req._
+        if self.req.user.matching_groups('managers'):
+            layout.append(self.format_acls(eschema, ('read', 'add', 'delete', 'update')))
+            # possible views for this entity type
+            views = [_(view.title) for view in self.possible_views(etype)]
+            layout.append(Section(children=(Table(cols=1, rheaders=1,
+                                                  children=[_('views')]+views),),
+                                  klass='views'))
+        return layout
+    
+    def visit_relationschema(self, rschema, title=True):
+        """get a layout for a relation schema"""
+        _ = self.req._
+        title = Link(self.rschema_link_url(rschema), rschema.type)
+        stereotypes = []
+        if rschema.meta:
+            stereotypes.append('meta')
+        if rschema.symetric:
+            stereotypes.append('symetric')
+        if rschema.inlined:
+            stereotypes.append('inlined')
+        title = Section(children=(title, ' (%s)'%rschema.display_name(self.req)), klass='title')
+        if stereotypes:
+            title.append(self.stereotype(','.join(stereotypes)))
+        layout = Section(children=(title,), klass='schema')
+        data = [_('from'), _('to')]
+        schema = rschema.schema
+        rschema_objects = rschema.objects()
+        if rschema_objects:
+            # might be empty
+            properties = [p for p in rschema.rproperty_defs(rschema_objects[0])
+                          if not p in ('cardinality', 'composite', 'eid')]
+        else:
+            properties = []
+        data += [_(prop) for prop in properties]
+        cols = len(data)
+        done = set()
+        for subjtype, objtypes in rschema.associations():
+            for objtype in objtypes:
+                if (subjtype, objtype) in done:
+                    continue
+                done.add((subjtype, objtype))
+                if rschema.symetric:
+                    done.add((objtype, subjtype))
+                data.append(Link(self.eschema_link_url(schema[subjtype]), subjtype))
+                data.append(Link(self.eschema_link_url(schema[objtype]), objtype))
+                for prop in properties:
+                    val = rschema.rproperty(subjtype, objtype, prop)
+                    if val is None:
+                        val = ''
+                    elif isinstance(val, (list, tuple)):
+                        val = ', '.join(str(v) for v in val)
+                    elif val and isinstance(val, basestring):
+                        val = _(val)
+                    else:
+                        val = str(val)
+                    data.append(Text(val))
+        table = Table(cols=cols, rheaders=1, children=data)
+        layout.append(Section(children=(table,), klass='relationDefinition'))
+        if not self.req.cnx.anonymous_connection:
+            layout.append(self.format_acls(rschema, ('read', 'add', 'delete')))
+        layout.append(Section(children='', klass='clear'))
+        return layout
+
+    def to_string(self, value):
+        """used to converte arbitrary values to encoded string"""
+        if isinstance(value, unicode):
+            return value.encode(self.encoding, 'replace')
+        return str(value)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,192 @@
+"""Server subcube of cubicweb : defines objects used only on the server
+(repository) side
+
+This module contains functions to initialize a new repository.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from os.path import join, exists
+
+from logilab.common.modutils import LazyObject
+
+# server debugging flag
+DEBUG = False
+
+def init_repository(config, interactive=True, drop=False, vreg=None):
+    """initialise a repository database by creating tables add filling them
+    with the minimal set of entities (ie at least the schema, base groups and
+    a initial user)
+    """
+    from glob import glob
+    from cubicweb.schema import BASEGROUPS
+    from cubicweb.dbapi import in_memory_cnx
+    from cubicweb.server.repository import Repository
+    from cubicweb.server.utils import manager_userpasswd
+    from cubicweb.server.sqlutils import sqlexec, sqlschema, sqldropschema
+    # configuration to avoid db schema loading and user'state checking
+    # on connection
+    read_application_schema = config.read_application_schema
+    bootstrap_schema = config.bootstrap_schema
+    config.read_application_schema = False
+    config.creating = True
+    config.bootstrap_schema = True
+    config.consider_user_state = False
+    config.set_language = False
+    # only enable the system source at initialization time + admin which is not
+    # an actual source but contains initial manager account information
+    config.enabled_sources = ('system', 'admin')
+    repo = Repository(config, vreg=vreg)
+    assert len(repo.sources) == 1, repo.sources
+    schema = repo.schema
+    sourcescfg = config.sources()
+    print 'creating necessary tables into the system source'
+    source = sourcescfg['system']
+    driver = source['db-driver']
+    sqlcnx = repo.system_source.get_connection()
+    sqlcursor = sqlcnx.cursor()
+    def execute(sql, args=None):
+        repo.system_source.doexec(sqlcursor, sql, args)
+    if drop:
+        dropsql = sqldropschema(schema, driver)
+        try:
+            sqlexec(dropsql, execute)
+        except Exception, ex:
+            print 'drop failed, skipped (%s)' % ex
+            sqlcnx.rollback()
+    # schema entities and relations tables
+    # can't skip entities table even if system source doesn't support them,
+    # they are used sometimes by generated sql. Keeping them empty is much
+    # simpler than fixing this...
+    if sqlcnx.logged_user != source['db-user']:
+        schemasql = sqlschema(schema, driver, user=source['db-user'])
+    else:
+        schemasql = sqlschema(schema, driver)
+        #skip_entities=[str(e) for e in schema.entities()
+        #               if not repo.system_source.support_entity(str(e))])
+    sqlexec(schemasql, execute)
+    # install additional driver specific sql files
+    for fpath in glob(join(config.schemas_lib_dir(), '*.sql.%s' % driver)):
+        print 'install', fpath
+        sqlexec(open(fpath).read(), execute, False, delimiter=';;')
+    for directory in config.cubes_path():
+        for fpath in glob(join(directory, 'schema', '*.sql.%s' % driver)):
+            print 'install', fpath
+            sqlexec(open(fpath).read(), execute, False, delimiter=';;')
+    sqlcursor.close()
+    sqlcnx.commit()
+    sqlcnx.close()
+    session = repo.internal_session()
+    try:
+        login = unicode(sourcescfg['admin']['login'])
+        pwd = sourcescfg['admin']['password']
+    except KeyError:
+        if interactive:
+            msg = 'enter login and password of the initial manager account'
+            login, pwd = manager_userpasswd(msg=msg, confirm=True)
+        else:
+            login, pwd = unicode(source['db-user']), source['db-password']
+    print 'inserting default user and groups'
+    needisfix = []
+    for group in BASEGROUPS:
+        rset = session.execute('INSERT EGroup X: X name %(name)s',
+                               {'name': unicode(group)})
+        needisfix.append( (rset.rows[0][0], rset.description[0][0]) )
+    rset = session.execute('INSERT EUser X: X login %(login)s, X upassword %(pwd)s',
+                           {'login': login, 'pwd': pwd})
+    needisfix.append( (rset.rows[0][0], rset.description[0][0]) )
+    session.execute('SET U in_group G WHERE G name "managers"')
+    session.commit()
+    # reloging using the admin user
+    config._cubes = None # avoid assertion error
+    repo, cnx = in_memory_cnx(config, login, pwd)
+    assert len(repo.sources) == 1, repo.sources
+    handler = config.migration_handler(schema, interactive=False,
+                                       cnx=cnx, repo=repo)
+    initialize_schema(config, schema, handler)
+    # admin user and groups have been added before schema entities, fix the 'is'
+    # relation
+    for eid, etype in needisfix:
+        handler.session.unsafe_execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
+                                       {'x': eid, 'name': etype}, 'x')
+    # insert versions
+    handler.cmd_add_entity('EProperty', pkey=u'system.version.cubicweb',
+                           value=unicode(config.cubicweb_version()))
+    for cube in config.cubes():
+        handler.cmd_add_entity('EProperty', 
+                               pkey=u'system.version.%s' % cube.lower(),
+                               value=unicode(config.cube_version(cube)))
+    # yoo !
+    cnx.commit()
+    config.enabled_sources = None
+    for uri, source_config in config.sources().items():
+        if uri in ('admin', 'system'):
+            # not an actual source or init_creating already called
+            continue
+        source = repo.get_source(uri, source_config)
+        source.init_creating()
+    cnx.commit()
+    cnx.close()
+    session.close()
+    # restore initial configuration
+    config.creating = False
+    config.read_application_schema = read_application_schema
+    config.bootstrap_schema = bootstrap_schema
+    config.consider_user_state = True
+    config.set_language = True
+    print 'application %s initialized' % config.appid
+
+
+def initialize_schema(config, schema, mhandler, event='create'):
+    from cubicweb.server.schemaserial import serialize_schema
+    paths = [p for p in config.cubes_path() + [config.apphome]
+             if exists(join(p, 'migration'))]
+    # execute cubicweb's pre<event> script
+    mhandler.exec_event_script('pre%s' % event)
+    # execute cubes pre<event> script if any
+    for path in reversed(paths):
+        mhandler.exec_event_script('pre%s' % event, path)
+    # enter application'schema into the database
+    serialize_schema(mhandler.rqlcursor, schema)
+    # execute cubicweb's post<event> script
+    mhandler.exec_event_script('post%s' % event)
+    # execute cubes'post<event> script if any
+    for path in reversed(paths):
+        mhandler.exec_event_script('post%s' % event, path)
+
+def set_debug(debugmode):
+    global DEBUG
+    DEBUG = debugmode
+
+def debugged(func):
+    """decorator to activate debug mode"""
+    def wrapped(*args, **kwargs):
+        global DEBUG
+        DEBUG = True
+        try:
+            return func(*args, **kwargs)
+        finally:
+            DEBUG = False
+    return wrapped
+
+# sqlite'stored procedures have to be registered at connexion opening time
+SQL_CONNECT_HOOKS = {}
+
+# add to this set relations which should have their add security checking done
+# *BEFORE* adding the actual relation (done after by default)
+BEFORE_ADD_RELATIONS = set(('owned_by',))
+
+# add to this set relations which should have their add security checking done
+# *at COMMIT TIME* (done after by default)
+ON_COMMIT_ADD_RELATIONS = set(())
+
+# available sources registry
+SOURCE_TYPES = {'native': LazyObject('cubicweb.server.sources.native', 'NativeSQLSource'),
+                # XXX private sources installed by an external cube
+                'pyrorql': LazyObject('cubicweb.server.sources.pyrorql', 'PyroRQLSource'),
+                'ldapuser': LazyObject('cubicweb.server.sources.ldapuser', 'LDAPUserSource'),
+                }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/checkintegrity.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,280 @@
+"""Check integrity of a CubicWeb repository. Hum actually only the system database
+is checked.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+
+from mx.DateTime import now
+from logilab.common.shellutils import ProgressBar
+
+def has_eid(sqlcursor, eid, eids):
+    """return true if the eid is a valid eid"""
+    if eids.has_key(eid):
+        return eids[eid]
+    sqlcursor.execute('SELECT type, source FROM entities WHERE eid=%s' % eid)
+    try:
+        etype, source = sqlcursor.fetchone()
+    except:
+        eids[eid] = False
+        return False
+    if source and source != 'system':
+        # XXX what to do...
+        eids[eid] = True
+        return True
+    sqlcursor.execute('SELECT * FROM %s WHERE eid=%s' % (etype, eid))
+    result = sqlcursor.fetchall()
+    if len(result) == 0:
+        eids[eid] = False
+        return False
+    elif len(result) > 1:
+        msg = '  More than one entity with eid %s exists in source !'
+        print >> sys.stderr, msg % eid
+        print >> sys.stderr, '  WARNING : Unable to fix this, do it yourself !'
+    eids[eid] = True
+    return True
+
+# XXX move to yams?
+def etype_fti_containers(eschema, _done=None):
+    if _done is None:
+        _done = set()
+    _done.add(eschema)
+    containers = tuple(eschema.fulltext_containers())
+    if containers:
+        for rschema, target in containers:
+            if target == 'object':
+                targets = rschema.objects(eschema)
+            else:
+                targets = rschema.subjects(eschema)
+            for targeteschema in targets:
+                if targeteschema in _done:
+                    continue
+                _done.add(targeteschema)
+                for container in etype_fti_containers(targeteschema, _done):
+                    yield container
+    else:
+        yield eschema
+    
+def reindex_entities(schema, session):
+    """reindex all entities in the repository"""
+    # deactivate modification_date hook since we don't want them
+    # to be updated due to the reindexation
+    from cubicweb.server.hooks import (setmtime_before_update_entity,
+                                       uniquecstrcheck_before_modification)
+    from cubicweb.server.repository import FTIndexEntityOp
+    repo = session.repo
+    repo.hm.unregister_hook(setmtime_before_update_entity,
+                            'before_update_entity', '')
+    repo.hm.unregister_hook(uniquecstrcheck_before_modification,
+                            'before_update_entity', '')
+    etypes = set()
+    for eschema in schema.entities():
+        if eschema.is_final():
+            continue
+        indexable_attrs = tuple(eschema.indexable_attributes()) # generator
+        if not indexable_attrs:
+            continue
+        for container in etype_fti_containers(eschema):
+            etypes.add(container)
+    print 'Reindexing entities of type %s' % \
+          ', '.join(sorted(str(e) for e in etypes))
+    pb = ProgressBar(len(etypes) + 1)
+    # first monkey patch Entity.check to disable validation
+    from cubicweb.common.entity import Entity
+    _check = Entity.check
+    Entity.check = lambda self, creation=False: True
+    # clear fti table first
+    session.system_sql('DELETE FROM %s' % session.repo.system_source.dbhelper.fti_table)
+    pb.update()
+    # reindex entities by generating rql queries which set all indexable
+    # attribute to their current value
+    for eschema in etypes:
+        for entity in session.execute('Any X WHERE X is %s' % eschema).entities():
+            FTIndexEntityOp(session, entity=entity)
+        pb.update()
+    # restore Entity.check
+    Entity.check = _check
+
+    
+def check_schema(session):
+    """check serialized schema"""
+    print 'Checking serialized schema'
+    unique_constraints = ('SizeConstraint', 'FormatConstraint',
+                          'VocabularyConstraint', 'RQLConstraint',
+                          'RQLVocabularyConstraint')
+    rql = ('Any COUNT(X),RN,EN,ECTN GROUPBY RN,EN,ECTN ORDERBY 1 '
+           'WHERE X is Econstraint, R constrained_by X, '
+           'R relation_type RT, R from_entity ET, RT name RN, '
+           'ET name EN, X cstrtype ECT, ECT name ECTN')
+    for count, rn, en, cstrname in session.execute(rql):
+        if count == 1:
+            continue
+        if cstrname in unique_constraints:
+            print "ERROR: got %s %r constraints on relation %s.%s" % (
+                count, cstrname, en, rn)
+
+
+    
+def check_text_index(schema, session, eids, fix=1):
+    """check all entities registered in the text index"""
+    print 'Checking text index'
+    cursor = session.system_sql('SELECT uid FROM appears;')
+    for row in cursor.fetchall():
+        eid = row[0]
+        if not has_eid(cursor, eid, eids):
+            msg = '  Entity with eid %s exists in the text index but in no source'
+            print >> sys.stderr, msg % eid,
+            if fix:
+                session.system_sql('DELETE FROM appears WHERE uid=%s;' % eid)
+                print >> sys.stderr, ' [FIXED]'
+            else:
+                print >> sys.stderr
+
+
+def check_entities(schema, session, eids, fix=1):
+    """check all entities registered in the repo system table"""
+    print 'Checking entities system table'
+    cursor = session.system_sql('SELECT eid FROM entities;')
+    for row in cursor.fetchall():
+        eid = row[0]
+        if not has_eid(cursor, eid, eids):
+            msg = '  Entity with eid %s exists in the system table but in no source'
+            print >> sys.stderr, msg % eid,
+            if fix:
+                session.system_sql('DELETE FROM entities WHERE eid=%s;' % eid)
+                print >> sys.stderr, ' [FIXED]'
+            else:
+                print >> sys.stderr
+    print 'Checking entities tables'
+    for eschema in schema.entities():
+        if eschema.is_final():
+            continue
+        cursor = session.system_sql('SELECT eid FROM %s;' % eschema.type)
+        for row in cursor.fetchall():
+            eid = row[0]
+            # eids is full since we have fetched everyting from the entities table,
+            # no need to call has_eid
+            if not eid in eids or not eids[eid]:
+                msg = '  Entity with eid %s exists in the %s table but not in the system table'
+                print >> sys.stderr, msg % (eid, eschema.type),
+                if fix:
+                    session.system_sql('DELETE FROM %s WHERE eid=%s;' % (eschema.type, eid))
+                    print >> sys.stderr, ' [FIXED]'
+                else:
+                    print >> sys.stderr
+                
+            
+def bad_related_msg(rtype, target, eid, fix):
+    msg = '  A relation %s with %s eid %s exists but no such entity in sources'
+    print >> sys.stderr, msg % (rtype, target, eid),
+    if fix:
+        print >> sys.stderr, ' [FIXED]'
+    else:
+        print >> sys.stderr
+    
+    
+def check_relations(schema, session, eids, fix=1):
+    """check all relations registered in the repo system table"""
+    print 'Checking relations'
+    for rschema in schema.relations():
+        if rschema.is_final():
+            continue
+        rtype = rschema.type
+        if rtype == 'identity':
+            continue
+        if rschema.inlined:
+            for subjtype in rschema.subjects():
+                cursor = session.system_sql('SELECT %s FROM %s WHERE %s IS NOT NULL;'
+                                            % (rtype, subjtype, rtype))
+                for row in cursor.fetchall():
+                    eid = row[0]
+                    if not has_eid(cursor, eid, eids):
+                        bad_related_msg(rtype, 'object', eid, fix)
+                        if fix:
+                            session.system_sql('UPDATE %s SET %s = NULL WHERE eid=%s;'
+                                               % (subjtype, rtype, eid))
+            continue
+        cursor = session.system_sql('SELECT eid_from FROM %s_relation;' % rtype)
+        for row in cursor.fetchall():
+            eid = row[0]
+            if not has_eid(cursor, eid, eids):
+                bad_related_msg(rtype, 'subject', eid, fix)
+                if fix:
+                    session.system_sql(
+                        'DELETE FROM %s_relations WHERE eid_from=%s;' % (rtype, eid))
+        cursor = session.system_sql('SELECT eid_to FROM %s_relation;' % rtype)
+        for row in cursor.fetchall():
+            eid = row[0]
+            if not has_eid(cursor, eid, eids):
+                bad_related_msg(rtype, 'object', eid, fix)
+                if fix:
+                    session.system_sql('DELETE FROM relations WHERE eid_to=%s;' % eid)
+
+
+def check_metadata(schema, session, eids, fix=1):
+    """check entities has required metadata
+
+    FIXME: rewrite using RQL queries ?
+    """
+    print 'Checking metadata'
+    cursor = session.system_sql("SELECT DISTINCT type FROM entities;")
+    for etype, in cursor.fetchall():
+        for rel, default in ( ('creation_date', now()),
+                              ('modification_date', now()), ):
+            cursor = session.system_sql("SELECT eid FROM %s "
+                                        "WHERE %s is NULL" % (etype, rel))
+            for eid, in cursor.fetchall():
+                msg = '  %s with eid %s has no %s'
+                print >> sys.stderr, msg % (etype, eid, rel),
+                if fix:
+                    session.system_sql("UPDATE %s SET %s=%(default)s WHERE eid=%s ;"
+                                       % (etype, rel, eid), {'default': default})
+                    print >> sys.stderr, ' [FIXED]'
+                else:
+                    print >> sys.stderr
+    cursor = session.system_sql('SELECT MIN(eid) FROM euser;')
+    default_user_eid = cursor.fetchone()[0]
+    assert default_user_eid is not None, 'no user defined !'
+    for rel, default in ( ('owned_by', default_user_eid), ):
+        cursor = session.system_sql("SELECT eid, type FROM entities "
+                                    "WHERE NOT EXISTS "
+                                    "(SELECT 1 FROM %s_relation WHERE eid_from=eid);"
+                                    % rel)
+        for eid, etype in cursor.fetchall():
+            msg = '  %s with eid %s has no %s relation'
+            print >> sys.stderr, msg % (etype, eid, rel),
+            if fix:
+                session.system_sql('INSERT INTO %s_relation VALUES (%s, %s) ;'
+                                   % (rel, eid, default))
+                print >> sys.stderr, ' [FIXED]'
+            else:
+                print >> sys.stderr
+
+
+def check(repo, cnx, checks, reindex, fix):
+    """check integrity of application's repository,
+    using given user and password to locally connect to the repository
+    (no running cubicweb server needed)
+    """
+    session = repo._get_session(cnx.sessionid, setpool=True)
+    # yo, launch checks
+    if checks:
+        eids_cache = {}
+        for check in checks:
+            check_func = globals()['check_%s' % check]
+            check_func(repo.schema, session, eids_cache, fix=fix)
+        if fix:
+            cnx.commit()
+        else:
+            print
+        if not fix:
+            print 'WARNING: Diagnostic run, nothing has been corrected'
+    if reindex:
+        cnx.rollback()
+        session.set_pool()
+        reindex_entities(repo.schema, session)
+        cnx.commit()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/hookhelper.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,110 @@
+"""helper functions for application hooks
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from smtplib import SMTP
+from threading import Lock
+
+from cubicweb import RepositoryError
+from cubicweb.server.pool import Operation, SingleLastOperation
+
+
+def entity_name(session, eid):
+    """return the "name" attribute of the entity with the given eid"""
+    return entity_attr(session, eid, 'name')
+
+def entity_attr(session, eid, attr):
+    """return an arbitrary attribute of the entity with the given eid"""
+    rset = session.execute('Any N WHERE X eid %%(x)s, X %s N' % attr,
+                           {'x': eid}, 'x')
+    return rset[0][0]
+
+def rproperty(session, rtype, eidfrom, eidto, rprop):
+    rschema = session.repo.schema[rtype]
+    subjtype = session.describe(eidfrom)[0]
+    objtype = session.describe(eidto)[0]
+    return rschema.rproperty(subjtype, objtype, rprop)
+
+def check_internal_entity(session, eid, internal_names):
+    """check that the entity's name is not in the internal_names list.
+    raise a RepositoryError if so, else return the entity's name
+    """
+    name = entity_name(session, eid)
+    if name in internal_names:
+        raise RepositoryError('%s entity can\'t be deleted' % name)
+    return name
+
+def get_user_sessions(repo, ueid):
+    for session in repo._sessions.values():
+        if ueid == session.user.eid:
+            yield session
+        
+
+# mail related ################################################################
+
+SMTP_LOCK = Lock()
+
+class SendMailOp(SingleLastOperation):
+    def __init__(self, session, msg=None, recipients=None, **kwargs):
+        # may not specify msg yet, as
+        # `cubicweb.sobjects.supervision.SupervisionMailOp`
+        if msg is not None:
+            assert recipients
+            self.to_send = [(msg, recipients)]
+        else:
+            assert recipients is None
+            self.to_send = []
+        super(SendMailOp, self).__init__(session, **kwargs) 
+       
+    def register(self, session):
+        previous = super(SendMailOp, self).register(session)
+        if previous:
+            self.to_send = previous.to_send + self.to_send
+        
+    def commit_event(self):
+        self.repo.threaded_task(self.sendmails)
+
+    def sendmails(self):        
+        server, port = self.config['smtp-host'], self.config['smtp-port']
+        SMTP_LOCK.acquire()
+        try:
+            try:
+                smtp = SMTP(server, port)
+            except Exception, ex:
+                self.exception("can't connect to smtp server %s:%s (%s)",
+                               server, port, ex)
+                return
+            heloaddr = '%s <%s>' % (self.config['sender-name'],
+                                    self.config['sender-addr'])
+            for msg, recipients in self.to_send:
+                try:
+                    smtp.sendmail(heloaddr, recipients, msg.as_string())
+                except Exception, ex:
+                    self.exception("error sending mail to %s (%s)",
+                                   recipients, ex)
+            smtp.close()
+        finally:
+            SMTP_LOCK.release()
+            
+
+# state related ###############################################################
+
+def previous_state(session, eid):
+    """return the state of the entity with the given eid,
+    usually since it's changing in the current transaction. Due to internal
+    relation hooks, the relation may has been deleted at this point, so
+    we have handle that
+    """
+    for eidfrom, rtype, eidto in reversed(session.query_data('pendingrelations', ())):
+        if rtype == 'in_state' and eidfrom == eid:
+            rset = session.execute('Any S,N WHERE S eid %(x)s, S name N',
+                                   {'x': eidto}, 'x')
+            return rset.get_entity(0, 0)
+    rset = session.execute('Any S,N WHERE X eid %(x)s, X in_state S, S name N',
+                           {'x': eid}, 'x')
+    if rset:
+        return rset.get_entity(0, 0)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,567 @@
+"""Core hooks: check schema validity, unsure we are not deleting necessary
+entities...
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from mx.DateTime import now
+
+from cubicweb import UnknownProperty, ValidationError, BadConnectionId
+
+from cubicweb.common.uilib import soup2xhtml
+
+from cubicweb.server.pool import Operation, LateOperation, PreCommitOperation
+from cubicweb.server.hookhelper import (check_internal_entity, previous_state,
+                                     get_user_sessions, rproperty)
+from cubicweb.server.repository import FTIndexEntityOp
+
+def relation_deleted(session, eidfrom, rtype, eidto):
+    session.add_query_data('pendingrelations', (eidfrom, rtype, eidto))
+    
+
+# base meta-data handling #####################################################
+
+def setctime_before_add_entity(session, entity):
+    """before create a new entity -> set creation and modification date
+ 
+    this is a conveniency hook, you shouldn't have to disable it
+    """
+    if not 'creation_date' in entity:
+        entity['creation_date'] = now()
+    if not 'modification_date' in entity:
+        entity['modification_date'] = now()
+
+def setmtime_before_update_entity(session, entity):
+    """update an entity -> set modification date"""
+    if not 'modification_date' in entity:
+        entity['modification_date'] = now()
+        
+class SetCreatorOp(PreCommitOperation):
+        
+    def precommit_event(self):
+        if self.eid in self.session.query_data('pendingeids', ()):
+            # entity have been created and deleted in the same transaction
+            return
+        ueid = self.session.user.eid
+        execute = self.session.unsafe_execute
+        if not execute('Any X WHERE X created_by U, X eid %(x)s',
+                       {'x': self.eid}, 'x'): 
+            execute('SET X created_by U WHERE X eid %(x)s, U eid %(u)s',
+                    {'x': self.eid, 'u': ueid}, 'x')
+
+def setowner_after_add_entity(session, entity):
+    """create a new entity -> set owner and creator metadata"""
+    asession = session.actual_session()
+    if not asession.is_internal_session:
+        session.unsafe_execute('SET X owned_by U WHERE X eid %(x)s, U eid %(u)s',
+                               {'x': entity.eid, 'u': asession.user.eid}, 'x')
+        SetCreatorOp(asession, eid=entity.eid)
+
+def setis_after_add_entity(session, entity):
+    """create a new entity -> set is relation"""
+    session.unsafe_execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
+                           {'x': entity.eid, 'name': entity.id}, 'x')
+    # XXX < 2.50 bw compat
+    if not session.get_shared_data('do-not-insert-is_instance_of'):
+        basetypes = entity.e_schema.ancestors() + [entity.e_schema]
+        session.unsafe_execute('SET X is_instance_of E WHERE X eid %%(x)s, E name IN (%s)' %
+                               ','.join("'%s'" % str(etype) for etype in basetypes),
+                               {'x': entity.eid}, 'x')
+
+def setowner_after_add_user(session, entity):
+    """when a user has been created, add owned_by relation on itself"""
+    session.unsafe_execute('SET X owned_by X WHERE X eid %(x)s',
+                           {'x': entity.eid}, 'x')
+
+def fti_update_after_add_relation(session, eidfrom, rtype, eidto):
+    """sync fulltext index when relevant relation is added. Reindexing the
+    contained entity is enough since it will implicitly reindex the container
+    entity.
+    """
+    ftcontainer = session.repo.schema.rschema(rtype).fulltext_container
+    if ftcontainer == 'subject':
+        FTIndexEntityOp(session, entity=session.entity(eidto))
+    elif ftcontainer == 'object':
+        FTIndexEntityOp(session, entity=session.entity(eidfrom))
+
+def fti_update_after_delete_relation(session, eidfrom, rtype, eidto):
+    """sync fulltext index when relevant relation is deleted. Reindexing both
+    entities is necessary.
+    """
+    if session.repo.schema.rschema(rtype).fulltext_container:
+        FTIndexEntityOp(session, entity=session.entity(eidto))
+        FTIndexEntityOp(session, entity=session.entity(eidfrom))
+    
+class SyncOwnersOp(PreCommitOperation):
+        
+    def precommit_event(self):
+        self.session.unsafe_execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,'
+                                    'NOT EXISTS(X owned_by U, X eid %(x)s)',
+                                    {'c': self.compositeeid, 'x': self.composedeid},
+                                    ('c', 'x'))
+        
+def sync_owner_after_add_composite_relation(session, eidfrom, rtype, eidto):
+    """when adding composite relation, the composed should have the same owners
+    has the composite
+    """
+    if rtype == 'wf_info_for':
+        # skip this special composite relation
+        return
+    composite = rproperty(session, rtype, eidfrom, eidto, 'composite')
+    if composite == 'subject':
+        SyncOwnersOp(session, compositeeid=eidfrom, composedeid=eidto)
+    elif composite == 'object':
+        SyncOwnersOp(session, compositeeid=eidto, composedeid=eidfrom)
+    
+def _register_metadata_hooks(hm):
+    """register meta-data related hooks on the hooks manager"""
+    hm.register_hook(setctime_before_add_entity, 'before_add_entity', '')
+    hm.register_hook(setmtime_before_update_entity, 'before_update_entity', '')
+    hm.register_hook(setowner_after_add_entity, 'after_add_entity', '')
+    hm.register_hook(sync_owner_after_add_composite_relation, 'after_add_relation', '')
+    hm.register_hook(fti_update_after_add_relation, 'after_add_relation', '')
+    hm.register_hook(fti_update_after_delete_relation, 'after_delete_relation', '')
+    if 'is' in hm.schema:
+        hm.register_hook(setis_after_add_entity, 'after_add_entity', '')
+    if 'EUser' in hm.schema:
+        hm.register_hook(setowner_after_add_user, 'after_add_entity', 'EUser')
+            
+# core hooks ##################################################################
+    
+class DelayedDeleteOp(PreCommitOperation):
+    """delete the object of composite relation except if the relation
+    has actually been redirected to another composite
+    """
+        
+    def precommit_event(self):
+        session = self.session
+        if not self.eid in session.query_data('pendingeids', ()):
+            etype = session.describe(self.eid)[0]
+            session.unsafe_execute('DELETE %s X WHERE X eid %%(x)s, NOT %s'
+                                   % (etype, self.relation),
+                                   {'x': self.eid}, 'x')
+    
+def handle_composite_before_del_relation(session, eidfrom, rtype, eidto):
+    """delete the object of composite relation"""
+    composite = rproperty(session, rtype, eidfrom, eidto, 'composite')
+    if composite == 'subject':
+        DelayedDeleteOp(session, eid=eidto, relation='Y %s X' % rtype)
+    elif composite == 'object':
+        DelayedDeleteOp(session, eid=eidfrom, relation='X %s Y' % rtype)
+
+def before_del_group(session, eid):
+    """check that we don't remove the owners group"""
+    check_internal_entity(session, eid, ('owners',))
+
+
+# schema validation hooks #####################################################
+        
+class CheckConstraintsOperation(LateOperation):
+    """check a new relation satisfy its constraints
+    """
+    def precommit_event(self):
+        eidfrom, rtype, eidto = self.rdef
+        # first check related entities have not been deleted in the same
+        # transaction
+        pending = self.session.query_data('pendingeids', ())
+        if eidfrom in pending:
+            return
+        if eidto in pending:
+            return
+        for constraint in self.constraints:
+            try:
+                constraint.repo_check(self.session, eidfrom, rtype, eidto)
+            except NotImplementedError:
+                self.critical('can\'t check constraint %s, not supported',
+                              constraint)
+    
+    def commit_event(self):
+        pass
+    
+def cstrcheck_after_add_relation(session, eidfrom, rtype, eidto):
+    """check the relation satisfy its constraints
+
+    this is delayed to a precommit time operation since other relation which
+    will make constraint satisfied may be added later.
+    """
+    constraints = rproperty(session, rtype, eidfrom, eidto, 'constraints')
+    if constraints:
+        CheckConstraintsOperation(session, constraints=constraints,
+                                  rdef=(eidfrom, rtype, eidto))
+
+def uniquecstrcheck_before_modification(session, entity):
+    eschema = entity.e_schema
+    for attr, val in entity.items():
+        if val is None:
+            continue
+        if eschema.subject_relation(attr).is_final() and \
+               eschema.has_unique_values(attr):
+            rql = '%s X WHERE X %s %%(val)s' % (entity.e_schema, attr)
+            rset = session.unsafe_execute(rql, {'val': val})
+            if rset and rset[0][0] != entity.eid:
+                msg = session._('the value "%s" is already used, use another one')
+                raise ValidationError(entity.eid, {attr: msg % val})
+
+
+
+
+class tidy_html_fields(object):
+    """tidy HTML in rich text strings
+
+    FIXME: (adim) the whole idea of having a class is to store the
+    event type. There might be another way to get dynamically the
+    event inside the hook function.
+    """
+    # FIXME hooks manager use func_name to register
+    func_name = 'tidy_html_field'
+    
+    def __init__(self, event):
+        self.event = event
+
+    def __call__(self, session, entity):
+        for attr in entity.formatted_attrs():
+            value = entity.get(attr)
+            # text was not changed
+            if self.event == 'before_add_entity':
+                fmt = entity.get('%s_format' % attr)
+            else:
+                fmt = entity.get_value('%s_format' % attr)
+            if value and fmt == 'text/html':
+                entity[attr] = soup2xhtml(value, session.encoding)
+
+
+class CheckRequiredRelationOperation(LateOperation):
+    """checking relation cardinality has to be done after commit in
+    case the relation is being replaced
+    """
+    eid, rtype = None, None
+    
+    def precommit_event(self):
+        # recheck pending eids
+        if self.eid in self.session.query_data('pendingeids', ()):
+            return
+        if self.session.unsafe_execute(*self._rql()).rowcount < 1:
+            etype = self.session.describe(self.eid)[0]
+            msg = self.session._('at least one relation %s is required on %s(%s)')
+            raise ValidationError(self.eid, {self.rtype: msg % (self.rtype,
+                                                                etype, self.eid)})
+    
+    def commit_event(self):
+        pass
+        
+    def _rql(self):
+        raise NotImplementedError()
+    
+class CheckSRelationOp(CheckRequiredRelationOperation):
+    """check required subject relation"""
+    def _rql(self):
+        return 'Any O WHERE S eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x'
+    
+class CheckORelationOp(CheckRequiredRelationOperation):
+    """check required object relation"""
+    def _rql(self):
+        return 'Any S WHERE O eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x'
+
+def checkrel_if_necessary(session, opcls, rtype, eid):
+    """check an equivalent operation has not already been added"""
+    for op in session.pending_operations:
+        if isinstance(op, opcls) and op.rtype == rtype and op.eid == eid:
+            break
+    else:
+        opcls(session, rtype=rtype, eid=eid)
+    
+def cardinalitycheck_after_add_entity(session, entity):
+    """check cardinalities are satisfied"""
+    eid = entity.eid
+    for rschema, targetschemas, x in entity.e_schema.relation_definitions():
+        # skip automatically handled relations
+        if rschema.type in ('owned_by', 'created_by', 'is', 'is_instance_of'):
+            continue
+        if x == 'subject':
+            subjtype = entity.e_schema
+            objtype = targetschemas[0].type
+            cardindex = 0
+            opcls = CheckSRelationOp
+        else:
+            subjtype = targetschemas[0].type
+            objtype = entity.e_schema
+            cardindex = 1
+            opcls = CheckORelationOp
+        card = rschema.rproperty(subjtype, objtype, 'cardinality')
+        if card[cardindex] in '1+':
+            checkrel_if_necessary(session, opcls, rschema.type, eid)
+
+def cardinalitycheck_before_del_relation(session, eidfrom, rtype, eidto):
+    """check cardinalities are satisfied"""
+    card = rproperty(session, rtype, eidfrom, eidto, 'cardinality')
+    pendingeids = session.query_data('pendingeids', ())
+    if card[0] in '1+' and not eidfrom in pendingeids:
+        checkrel_if_necessary(session, CheckSRelationOp, rtype, eidfrom)
+    if card[1] in '1+' and not eidto in pendingeids:
+        checkrel_if_necessary(session, CheckORelationOp, rtype, eidto)
+
+
+def _register_core_hooks(hm):
+    hm.register_hook(handle_composite_before_del_relation, 'before_delete_relation', '')
+    hm.register_hook(before_del_group, 'before_delete_entity', 'EGroup')
+    
+    #hm.register_hook(cstrcheck_before_update_entity, 'before_update_entity', '')
+    hm.register_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '')
+    hm.register_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '')
+    hm.register_hook(cstrcheck_after_add_relation, 'after_add_relation', '')
+    hm.register_hook(uniquecstrcheck_before_modification, 'before_add_entity', '')
+    hm.register_hook(uniquecstrcheck_before_modification, 'before_update_entity', '')
+    hm.register_hook(tidy_html_fields('before_add_entity'), 'before_add_entity', '')
+    hm.register_hook(tidy_html_fields('before_update_entity'), 'before_update_entity', '')
+
+
+# user/groups synchronisation #################################################
+            
+class GroupOperation(Operation):
+    """base class for group operation"""
+    geid = None
+    def __init__(self, session, *args, **kwargs):
+        """override to get the group name before actual groups manipulation:
+        
+        we may temporarily loose right access during a commit event, so
+        no query should be emitted while comitting
+        """
+        rql = 'Any N WHERE G eid %(x)s, G name N'
+        result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False)
+        Operation.__init__(self, session, *args, **kwargs)
+        self.group = result[0][0]
+
+class DeleteGroupOp(GroupOperation):
+    """synchronize user when a in_group relation has been deleted"""
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        groups = self.cnxuser.groups
+        try:
+            groups.remove(self.group)
+        except KeyError:
+            self.error('user %s not in group %s',  self.cnxuser, self.group)
+            return
+    
+def after_del_in_group(session, fromeid, rtype, toeid):
+    """modify user permission, need to update users"""
+    for session_ in get_user_sessions(session.repo, fromeid):
+        DeleteGroupOp(session, cnxuser=session_.user, geid=toeid)
+
+        
+class AddGroupOp(GroupOperation):
+    """synchronize user when a in_group relation has been added"""
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        groups = self.cnxuser.groups
+        if self.group in groups:
+            self.warning('user %s already in group %s', self.cnxuser,
+                         self.group)
+            return
+        groups.add(self.group)
+
+def after_add_in_group(session, fromeid, rtype, toeid):
+    """modify user permission, need to update users"""
+    for session_ in get_user_sessions(session.repo, fromeid):
+        AddGroupOp(session, cnxuser=session_.user, geid=toeid)
+
+
+class DelUserOp(Operation):
+    """synchronize user when a in_group relation has been added"""
+    def __init__(self, session, cnxid):
+        self.cnxid = cnxid
+        Operation.__init__(self, session)
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            self.repo.close(self.cnxid)
+        except BadConnectionId:
+            pass # already closed
+
+def after_del_user(session, eid):
+    """modify user permission, need to update users"""
+    for session_ in get_user_sessions(session.repo, eid):
+        DelUserOp(session, session_.id)
+    
+def _register_usergroup_hooks(hm):
+    """register user/group related hooks on the hooks manager"""
+    hm.register_hook(after_del_user, 'after_delete_entity', 'EUser')
+    hm.register_hook(after_add_in_group, 'after_add_relation', 'in_group')
+    hm.register_hook(after_del_in_group, 'after_delete_relation', 'in_group')
+
+
+# workflow handling ###########################################################
+
+def before_add_in_state(session, fromeid, rtype, toeid):
+    """check the transition is allowed and record transition information
+    """
+    assert rtype == 'in_state'
+    state = previous_state(session, fromeid)
+    etype = session.describe(fromeid)[0]
+    if not (session.is_super_session or 'managers' in session.user.groups):
+        if not state is None:
+            entity = session.entity(fromeid)
+            # we should find at least one transition going to this state
+            try:
+                iter(state.transitions(entity, toeid)).next()
+            except StopIteration:
+                msg = session._('transition is not allowed')
+                raise ValidationError(fromeid, {'in_state': msg})
+        else:
+            # not a transition
+            # check state is initial state if the workflow defines one
+            isrset = session.unsafe_execute('Any S WHERE ET initial_state S, ET name %(etype)s',
+                                            {'etype': etype})
+            if isrset and not toeid == isrset[0][0]:
+                msg = session._('not the initial state for this entity')
+                raise ValidationError(fromeid, {'in_state': msg})
+    eschema = session.repo.schema[etype]
+    if not 'wf_info_for' in eschema.object_relations():
+        # workflow history not activated for this entity type
+        return
+    rql = 'INSERT TrInfo T: T wf_info_for E, T to_state DS, T comment %(comment)s'
+    args = {'comment': session.get_shared_data('trcomment', None, pop=True),
+            'e': fromeid, 'ds': toeid}
+    cformat = session.get_shared_data('trcommentformat', None, pop=True)
+    if cformat is not None:
+        args['comment_format'] = cformat
+        rql += ', T comment_format %(comment_format)s'
+    restriction = ['DS eid %(ds)s, E eid %(e)s']
+    if not state is None: # not a transition
+        rql += ', T from_state FS'
+        restriction.append('FS eid %(fs)s')
+        args['fs'] = state.eid
+    rql = '%s WHERE %s' % (rql, ', '.join(restriction))
+    session.unsafe_execute(rql, args, 'e')
+
+
+class SetInitialStateOp(PreCommitOperation):
+    """make initial state be a default state"""
+
+    def precommit_event(self):
+        session = self.session
+        entity = self.entity
+        rset = session.execute('Any S WHERE ET initial_state S, ET name %(name)s',
+                               {'name': str(entity.e_schema)})
+        # if there is an initial state and the entity's state is not set,
+        # use the initial state as a default state
+        pendingeids = session.query_data('pendingeids', ())
+        if rset and not entity.eid in pendingeids and not entity.in_state:
+            session.unsafe_execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                                   {'x' : entity.eid, 's' : rset[0][0]}, 'x')
+
+
+def set_initial_state_after_add(session, entity):
+    SetInitialStateOp(session, entity=entity)
+    
+def _register_wf_hooks(hm):
+    """register workflow related hooks on the hooks manager"""
+    if 'in_state' in hm.schema:
+        hm.register_hook(before_add_in_state, 'before_add_relation', 'in_state')
+        hm.register_hook(relation_deleted, 'before_delete_relation', 'in_state')
+        for eschema in hm.schema.entities():
+            if 'in_state' in eschema.subject_relations():
+                hm.register_hook(set_initial_state_after_add, 'after_add_entity',
+                                 str(eschema))
+
+
+# EProperty hooks #############################################################
+
+
+class DelEPropertyOp(Operation):
+    """a user's custom properties has been deleted"""
+    
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            del self.epropdict[self.key]
+        except KeyError:
+            self.error('%s has no associated value', self.key)
+
+class ChangeEPropertyOp(Operation):
+    """a user's custom properties has been added/changed"""
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        self.epropdict[self.key] = self.value
+
+class AddEPropertyOp(Operation):
+    """a user's custom properties has been added/changed"""
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        eprop = self.eprop
+        if not eprop.for_user:
+            self.repo.vreg.eprop_values[eprop.pkey] = eprop.value
+        # if for_user is set, update is handled by a ChangeEPropertyOp operation
+
+def after_add_eproperty(session, entity):
+    key, value = entity.pkey, entity.value
+    try:
+        value = session.vreg.typed_value(key, value)
+    except UnknownProperty:
+        raise ValidationError(entity.eid, {'pkey': session._('unknown property key')})
+    except ValueError, ex:
+        raise ValidationError(entity.eid, {'value': session._(str(ex))})
+    if not session.user.matching_groups('managers'):
+        session.unsafe_execute('SET P for_user U WHERE P eid %(x)s,U eid %(u)s',
+                               {'x': entity.eid, 'u': session.user.eid}, 'x')
+    else:
+        AddEPropertyOp(session, eprop=entity)
+        
+def after_update_eproperty(session, entity):
+    key, value = entity.pkey, entity.value
+    try:
+        value = session.vreg.typed_value(key, value)
+    except UnknownProperty:
+        return
+    except ValueError, ex:
+        raise ValidationError(entity.eid, {'value': session._(str(ex))})
+    if entity.for_user:
+        for session_ in get_user_sessions(session.repo, entity.for_user[0].eid):
+            ChangeEPropertyOp(session, epropdict=session_.user.properties,
+                              key=key, value=value)
+    else:
+        # site wide properties
+        ChangeEPropertyOp(session, epropdict=session.vreg.eprop_values,
+                          key=key, value=value)
+        
+def before_del_eproperty(session, eid):
+    for eidfrom, rtype, eidto in session.query_data('pendingrelations', ()):
+        if rtype == 'for_user' and eidfrom == eid:
+            # if for_user was set, delete has already been handled
+            break
+    else:
+        key = session.execute('Any K WHERE P eid %(x)s, P pkey K',
+                              {'x': eid}, 'x')[0][0]
+        DelEPropertyOp(session, epropdict=session.vreg.eprop_values, key=key)
+
+def after_add_for_user(session, fromeid, rtype, toeid):
+    if not session.describe(fromeid)[0] == 'EProperty':
+        return
+    key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V',
+                                 {'x': fromeid}, 'x')[0]
+    if session.vreg.property_info(key)['sitewide']:
+        raise ValidationError(fromeid,
+                              {'for_user': session._("site-wide property can't be set for user")})
+    for session_ in get_user_sessions(session.repo, toeid):
+        ChangeEPropertyOp(session, epropdict=session_.user.properties,
+                          key=key, value=value)
+        
+def before_del_for_user(session, fromeid, rtype, toeid):
+    key = session.execute('Any K WHERE P eid %(x)s, P pkey K',
+                          {'x': fromeid}, 'x')[0][0]
+    relation_deleted(session, fromeid, rtype, toeid)
+    for session_ in get_user_sessions(session.repo, toeid):
+        DelEPropertyOp(session, epropdict=session_.user.properties, key=key)
+
+def _register_eproperty_hooks(hm):
+    """register workflow related hooks on the hooks manager"""
+    hm.register_hook(after_add_eproperty, 'after_add_entity', 'EProperty')
+    hm.register_hook(after_update_eproperty, 'after_update_entity', 'EProperty')
+    hm.register_hook(before_del_eproperty, 'before_delete_entity', 'EProperty')
+    hm.register_hook(after_add_for_user, 'after_add_relation', 'for_user')
+    hm.register_hook(before_del_for_user, 'before_delete_relation', 'for_user')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/hooksmanager.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,254 @@
+"""Hooks management
+
+Hooks are called before / after any individual update of entities / relations
+in the repository.
+
+Here is the prototype of the different hooks:
+
+* filtered on the entity's type:
+
+  before_add_entity    (session, entity)
+  after_add_entity     (session, entity)
+  before_update_entity (session, entity)
+  after_update_entity  (session, entity)
+  before_delete_entity (session, eid)
+  after_delete_entity  (session, eid)
+
+* filtered on the relation's type:
+
+  before_add_relation    (session, fromeid, rtype, toeid)
+  after_add_relation     (session, fromeid, rtype, toeid)
+  before_delete_relation (session, fromeid, rtype, toeid)
+  after_delete_relation  (session, fromeid, rtype, toeid)
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+ENTITIES_HOOKS = ('before_add_entity',    'after_add_entity', 
+                  'before_update_entity', 'after_update_entity',
+                  'before_delete_entity', 'after_delete_entity')
+RELATIONS_HOOKS = ('before_add_relation',   'after_add_relation' ,
+                   'before_delete_relation','after_delete_relation')
+SYSTEM_HOOKS = ('server_startup', 'server_shutdown',
+                'session_open', 'session_close')
+
+ALL_HOOKS = frozenset(ENTITIES_HOOKS + RELATIONS_HOOKS + SYSTEM_HOOKS)
+
+class HooksManager(object):
+    """handle hooks registration and calls
+    """
+    verification_hooks_activated = True
+    
+    def __init__(self, schema):
+        self.set_schema(schema)
+
+    def set_schema(self, schema):
+        self._hooks = {}
+        self.schema = schema
+        self._init_hooks(schema)
+        
+    def register_hooks(self, hooks):
+        """register a dictionary of hooks :
+        
+             {'event': {'entity or relation type': [callbacks list]}}
+        """
+        for event, subevents in hooks.items():
+            for subevent, callbacks in subevents.items():
+                for callback in callbacks:
+                    self.register_hook(callback, event, subevent)
+                    
+    def register_hook(self, function, event, etype=''):
+        """register a function to call when <event> occurs
+        
+         <etype> is an entity/relation type or an empty string.
+         If etype is the empty string, the function will be called at each
+         event, else the function will be called only when event occurs on an
+         entity/relation of the given type.
+        """
+        assert event in ALL_HOOKS, '%r NOT IN %r' % (event, ALL_HOOKS)
+        assert (not event in SYSTEM_HOOKS or not etype), (event, etype)
+        etype = etype or ''
+        try:
+            self._hooks[event][etype].append(function)
+            self.debug('registered hook %s on %s (%s)', event, etype or 'any',
+                       function.func_name)
+            
+        except KeyError:
+            self.error('can\'t register hook %s on %s (%s)',
+                       event, etype or 'any', function.func_name)
+            
+    def unregister_hook(self, function, event, etype=''):
+        """register a function to call when <event> occurs
+        
+        <etype> is an entity/relation type or an empty string.
+        If etype is the empty string, the function will be called at each
+        event, else the function will be called only when event occurs on an
+        entity/relation of the given type.
+        """
+        assert event in ALL_HOOKS, event
+        etype = etype or ''
+        self.info('unregister hook %s on %s (%s)', event, etype,
+                  function.func_name)
+        self._hooks[event][etype].remove(function)
+
+    def call_hooks(self, __event, __type='', *args, **kwargs):
+        """call hook matching event and optional type"""
+        if __type:
+            self.info('calling hooks for event %s (%s)', __event, __type)
+        else:
+            self.info('calling hooks for event %s', __event)
+        # call generic hooks first
+        for hook in self._hooks[__event]['']:
+            #print '[generic]', hook.__name__
+            hook(*args, **kwargs)
+        if __type:
+            for hook in self._hooks[__event][__type]:
+                #print '[%s]'%__type, hook.__name__
+                hook(*args, **kwargs)
+    
+    def _init_hooks(self, schema):
+        """initialize the hooks map"""
+        for hook_event in ENTITIES_HOOKS:
+            self._hooks[hook_event] = {'': []}
+            for etype in schema.entities():
+                self._hooks[hook_event][etype] = []
+        for hook_event in RELATIONS_HOOKS:
+            self._hooks[hook_event] = {'': []}
+            for r_type in schema.relations():
+                self._hooks[hook_event][r_type] = []
+        for hook_event in SYSTEM_HOOKS:
+            self._hooks[hook_event] = {'': []}
+
+    def register_system_hooks(self, config):
+        """register system hooks according to the configuration"""
+        self.info('register core hooks')
+        from cubicweb.server.hooks import _register_metadata_hooks, _register_wf_hooks
+        _register_metadata_hooks(self)
+        self.info('register workflow hooks')
+        _register_wf_hooks(self)
+        if config.core_hooks:
+            from cubicweb.server.hooks import _register_core_hooks
+            _register_core_hooks(self)
+        if config.schema_hooks:
+            from cubicweb.server.schemahooks import _register_schema_hooks
+            self.info('register schema hooks')
+            _register_schema_hooks(self)
+        if config.usergroup_hooks:
+            from cubicweb.server.hooks import _register_usergroup_hooks
+            from cubicweb.server.hooks import _register_eproperty_hooks
+            self.info('register user/group hooks')
+            _register_usergroup_hooks(self)
+            _register_eproperty_hooks(self)
+        if config.security_hooks:
+            from cubicweb.server.securityhooks import register_security_hooks
+            self.info('register security hooks')
+            register_security_hooks(self)
+        if not self.verification_hooks_activated:
+            self.deactivate_verification_hooks()
+
+    def deactivate_verification_hooks(self):
+        from cubicweb.server.hooks import (cardinalitycheck_after_add_entity,
+                                        cardinalitycheck_before_del_relation,
+                                        cstrcheck_after_add_relation,
+                                        uniquecstrcheck_before_modification)
+        self.warning('deactivating verification hooks')
+        self.verification_hooks_activated = False
+        self.unregister_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '')
+        self.unregister_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '')
+        self.unregister_hook(cstrcheck_after_add_relation, 'after_add_relation', '')
+        self.unregister_hook(uniquecstrcheck_before_modification, 'before_add_entity', '')
+        self.unregister_hook(uniquecstrcheck_before_modification, 'before_update_entity', '')
+#         self.unregister_hook(tidy_html_fields('before_add_entity'), 'before_add_entity', '')
+#         self.unregister_hook(tidy_html_fields('before_update_entity'), 'before_update_entity', '')
+        
+    def reactivate_verification_hooks(self):
+        from cubicweb.server.hooks import (cardinalitycheck_after_add_entity,
+                                        cardinalitycheck_before_del_relation,
+                                        cstrcheck_after_add_relation,
+                                        uniquecstrcheck_before_modification)
+        self.warning('reactivating verification hooks')
+        self.verification_hooks_activated = True
+        self.register_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '')
+        self.register_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '')
+        self.register_hook(cstrcheck_after_add_relation, 'after_add_relation', '')
+        self.register_hook(uniquecstrcheck_before_modification, 'before_add_entity', '')
+        self.register_hook(uniquecstrcheck_before_modification, 'before_update_entity', '')
+#         self.register_hook(tidy_html_fields('before_add_entity'), 'before_add_entity', '')
+#         self.register_hook(tidy_html_fields('before_update_entity'), 'before_update_entity', '')
+            
+from cubicweb.vregistry import autoselectors
+from cubicweb.common.appobject import AppObject
+from cubicweb.common.registerers import accepts_registerer, yes_registerer
+from cubicweb.common.selectors import yes_selector
+
+class autoid(autoselectors):
+    """metaclass to create an unique 'id' attribute on the class using it"""
+    def __new__(mcs, name, bases, classdict):
+        cls = super(autoid, mcs).__new__(mcs, name, bases, classdict)
+        cls.id = str(id(cls))
+        return cls
+
+class Hook(AppObject):
+    __metaclass__ = autoid
+    __registry__ = 'hooks'
+    __registerer__ = accepts_registerer
+    __selectors__ = (yes_selector,)
+    # set this in derivated classes
+    events = None
+    accepts = None
+    enabled = True
+    
+    def __init__(self, event=None):
+        super(Hook, self).__init__()
+        self.event = event
+        
+    @classmethod
+    def registered(cls, vreg):
+        super(Hook, cls).registered(vreg)
+        return cls()
+    
+    @classmethod
+    def register_to(cls):
+        if not cls.enabled:
+            cls.warning('%s hook has been disabled', cls)
+            return
+        done = set()
+        for event in cls.events:
+            for ertype in cls.accepts:
+                if (event, ertype) in done:
+                    continue
+                yield event, ertype
+                done.add((event, ertype))
+                try:
+                    eschema = cls.schema.eschema(ertype)
+                except KeyError:
+                    # relation schema
+                    pass
+                else:
+                    for eetype in eschema.specialized_by():
+                        if (event, eetype) in done:
+                            continue
+                        yield event, str(eetype)
+                        done.add((event, eetype))
+                        
+
+    def make_callback(self, event):
+        if len(self.events) == 1:
+            return self.call
+        return self.__class__(event=event).call
+
+    def call(self):
+        raise NotImplementedError
+    
+class SystemHook(Hook):
+    __registerer__ = yes_registerer
+    accepts = ('',)
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(HooksManager, getLogger('cubicweb.hooksmanager'))
+set_log_methods(Hook, getLogger('cubicweb.hooks'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/migractions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1075 @@
+"""a class implementing basic actions used in migration scripts.
+
+The following schema actions are supported for now:
+* add/drop/rename attribute
+* add/drop entity/relation type
+* rename entity type
+
+The following data actions are supported for now:
+* add an entity
+* execute raw RQL queries
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+from os.path import join, exists
+
+from mx.DateTime import now
+from logilab.common.decorators import cached
+from logilab.common.adbh import get_adv_func_helper
+
+from yams.constraints import SizeConstraint
+from yams.schema2sql import eschema2sql, rschema2sql
+
+from cubicweb import AuthenticationError
+from cubicweb.dbapi import get_repository, repo_connect
+from cubicweb.common.migration import MigrationHelper, yes
+
+try:
+    from cubicweb.server import schemaserial as ss
+    from cubicweb.server.utils import manager_userpasswd
+    from cubicweb.server.sqlutils import sqlexec
+except ImportError: # LAX
+    pass
+
+class ServerMigrationHelper(MigrationHelper):
+    """specific migration helper for server side  migration scripts,
+    providind actions related to schema/data migration
+    """
+
+    def __init__(self, config, schema, interactive=True,
+                 repo=None, cnx=None, verbosity=1, connect=True):
+        MigrationHelper.__init__(self, config, interactive, verbosity)
+        if not interactive:
+            assert cnx
+            assert repo
+        if cnx is not None:
+            assert repo
+            self._cnx = cnx
+            self.repo = repo
+        elif connect:
+            self.repo_connect()
+        if not schema:
+            schema = config.load_schema(expand_cubes=True)
+        self.new_schema = schema
+        self._synchronized = set()
+
+    @cached
+    def repo_connect(self):
+        self.repo = get_repository(method='inmemory', config=self.config)
+        return self.repo
+    
+    def shutdown(self):
+        if self.repo is not None:
+            self.repo.shutdown()
+        
+    def rewrite_vcconfiguration(self):
+        """write current installed versions (of cubicweb software
+        and of each used cube) into the database
+        """
+        self.cmd_set_property('system.version.cubicweb', self.config.cubicweb_version())
+        for pkg in self.config.cubes():
+            pkgversion = self.config.cube_version(pkg)
+            self.cmd_set_property('system.version.%s' % pkg.lower(), pkgversion)
+        self.commit()
+        
+    def backup_database(self, backupfile=None, askconfirm=True):
+        config = self.config
+        source = config.sources()['system']
+        helper = get_adv_func_helper(source['db-driver'])
+        date = now().strftime('%Y-%m-%d_%H:%M:%S')
+        app = config.appid
+        backupfile = backupfile or join(config.backup_dir(),
+                                        '%s-%s.dump' % (app, date))
+        if exists(backupfile):
+            if not self.confirm('a backup already exists for %s, overwrite it?' % app):
+                return
+        elif askconfirm and not self.confirm('backup %s database?' % app):
+            return
+        cmd = helper.backup_command(source['db-name'], source.get('db-host'),
+                                    source.get('db-user'), backupfile,
+                                    keepownership=False)
+        while True:
+            print cmd
+            if os.system(cmd):
+                print 'error while backuping the base'
+                answer = self.confirm('continue anyway?',
+                                      shell=False, abort=False, retry=True)
+                if not answer:
+                    raise SystemExit(1)
+                if answer == 1: # 1: continue, 2: retry
+                    break
+            else:
+                print 'database backup:', backupfile
+                break
+        
+    def restore_database(self, backupfile, drop=True):
+        config = self.config
+        source = config.sources()['system']
+        helper = get_adv_func_helper(source['db-driver'])
+        app = config.appid
+        if not exists(backupfile):
+            raise Exception("backup file %s doesn't exist" % backupfile)
+        if self.confirm('restore %s database from %s ?' % (app, backupfile)):
+            for cmd in helper.restore_commands(source['db-name'], source.get('db-host'),
+                                               source.get('db-user'), backupfile,
+                                               source['db-encoding'],
+                                               keepownership=False, drop=drop):
+                while True:
+                    print cmd
+                    if os.system(cmd):
+                        print 'error while restoring the base'
+                        answer = self.confirm('continue anyway?',
+                                              shell=False, abort=False, retry=True)
+                        if not answer:
+                            raise SystemExit(1)
+                        if answer == 1: # 1: continue, 2: retry
+                            break
+                    else:
+                        break
+            print 'database restored'
+        
+    def migrate(self, vcconf, toupgrade, options):
+        if not options.fs_only:
+            if options.backup_db is None:
+                self.backup_database()
+            elif options.backup_db:
+                self.backup_database(askconfirm=False)
+        super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options)
+        self.rewrite_configuration()
+    
+    def process_script(self, migrscript, funcname=None, *args, **kwargs):
+        """execute a migration script
+        in interactive mode,  display the migration script path, ask for
+        confirmation and execute it if confirmed
+        """
+        if migrscript.endswith('.sql'):
+            if self.execscript_confirm(migrscript):
+                sqlexec(open(migrscript).read(), self.session.system_sql)
+        else:
+            return super(ServerMigrationHelper, self).process_script(
+                migrscript, funcname, *args, **kwargs)
+        
+    @property
+    def cnx(self):
+        """lazy connection"""
+        try:
+            return self._cnx
+        except AttributeError:
+            sourcescfg = self.repo.config.sources()
+            try:
+                login = sourcescfg['admin']['login']
+                pwd = sourcescfg['admin']['password']
+            except KeyError:
+                login, pwd = manager_userpasswd()
+            while True:
+                try:
+                    self._cnx = repo_connect(self.repo, login, pwd)
+                    if not 'managers' in self._cnx.user(self.session).groups:
+                        print 'migration need an account in the managers group'
+                    else:
+                        break
+                except AuthenticationError:
+                    print 'wrong user/password'
+                except (KeyboardInterrupt, EOFError):
+                    print 'aborting...'
+                    sys.exit(0)
+                try:
+                    login, pwd = manager_userpasswd()
+                except (KeyboardInterrupt, EOFError):
+                    print 'aborting...'
+                    sys.exit(0)
+            return self._cnx
+
+    @property
+    def session(self):
+        return self.repo._get_session(self.cnx.sessionid)
+    
+    @property
+    @cached
+    def rqlcursor(self):
+        """lazy rql cursor"""
+        return self.cnx.cursor(self.session)    
+    
+    def commit(self):
+        if hasattr(self, '_cnx'):
+            self._cnx.commit()
+            
+    def rollback(self):
+        if hasattr(self, '_cnx'):
+            self._cnx.rollback()
+                   
+    def rqlexecall(self, rqliter, cachekey=None, ask_confirm=True):
+        for rql, kwargs in rqliter:
+            self.rqlexec(rql, kwargs, cachekey, ask_confirm)
+
+    @cached
+    def _create_context(self):
+        """return a dictionary to use as migration script execution context"""
+        context = super(ServerMigrationHelper, self)._create_context()
+        context.update({'checkpoint': self.checkpoint,
+                        'sql': self.sqlexec,
+                        'rql': self.rqlexec,
+                        'rqliter': self.rqliter,
+                        'schema': self.repo.schema,
+                        'newschema': self.new_schema,
+                        'cnx': self.cnx,
+                        'session' : self.session,
+                        'repo' : self.repo,
+                        })
+        return context
+
+    @cached
+    def group_mapping(self):
+        """cached group mapping"""
+        return ss.group_mapping(self.rqlcursor)
+        
+    def exec_event_script(self, event, cubepath=None, funcname=None,
+                          *args, **kwargs):            
+        if cubepath:
+            apc = join(cubepath, 'migration', '%s.py' % event)
+        else:
+            apc = join(self.config.migration_scripts_dir(), '%s.py' % event)
+        if exists(apc):
+            if self.config.free_wheel:
+                from cubicweb.server.hooks import setowner_after_add_entity
+                self.repo.hm.unregister_hook(setowner_after_add_entity,
+                                             'after_add_entity', '')
+                self.deactivate_verification_hooks()
+            self.info('executing %s', apc)
+            confirm = self.confirm
+            execscript_confirm = self.execscript_confirm
+            self.confirm = yes
+            self.execscript_confirm = yes
+            try:
+                return self.process_script(apc, funcname, *args, **kwargs)
+            finally:
+                self.confirm = confirm
+                self.execscript_confirm = execscript_confirm
+                if self.config.free_wheel:
+                    self.repo.hm.register_hook(setowner_after_add_entity,
+                                               'after_add_entity', '')
+                    self.reactivate_verification_hooks()
+    
+    # base actions ############################################################
+
+    def checkpoint(self):
+        """checkpoint action"""
+        if self.confirm('commit now ?', shell=False):
+            self.commit()
+
+    def cmd_add_cube(self, cube, update_database=True):
+        """update_database is telling if the database schema should be updated
+        or if only the relevant eproperty should be inserted (for the case where
+        a cube has been extracted from an existing application, so the
+        cube schema is already in there)
+        """
+        newcubes = super(ServerMigrationHelper, self).cmd_add_cube(
+            cube)
+        if not newcubes:
+            return
+        for pack in newcubes:
+            self.cmd_set_property('system.version.'+pack,
+                                  self.config.cube_version(pack))
+        if not update_database:
+            self.commit()
+            return
+        self.new_schema = self.config.load_schema()
+        new = set()
+        # execute pre-create files
+        for pack in reversed(newcubes):
+            self.exec_event_script('precreate', self.config.cube_dir(pack))
+        # add new entity and relation types
+        for rschema in self.new_schema.relations():
+            if not rschema in self.repo.schema:
+                self.cmd_add_relation_type(rschema.type)
+                new.add(rschema.type)
+        for eschema in self.new_schema.entities():
+            if not eschema in self.repo.schema:
+                self.cmd_add_entity_type(eschema.type)
+                new.add(eschema.type)
+        # check if attributes has been added to existing entities
+        for rschema in self.new_schema.relations():
+            existingschema = self.repo.schema.rschema(rschema.type)
+            for (fromtype, totype) in rschema.iter_rdefs():
+                if existingschema.has_rdef(fromtype, totype):
+                    continue
+                # check we should actually add the relation definition
+                if not (fromtype in new or totype in new or rschema in new):
+                    continue
+                self.cmd_add_relation_definition(str(fromtype), rschema.type, 
+                                                 str(totype))
+        # execute post-create files
+        for pack in reversed(newcubes):
+            self.exec_event_script('postcreate', self.config.cube_dir(pack))
+            self.commit()        
+                
+    def cmd_remove_cube(self, cube):
+        removedcubes = super(ServerMigrationHelper, self).cmd_remove_cube(cube)
+        if not removedcubes:
+            return
+        oldschema = self.new_schema
+        self.new_schema = newschema = self.config.load_schema()
+        reposchema = self.repo.schema
+        # execute pre-remove files
+        for pack in reversed(removedcubes):
+            self.exec_event_script('preremove', self.config.cube_dir(pack))
+        # remove cubes'entity and relation types
+        for rschema in oldschema.relations():
+            if not rschema in newschema and rschema in reposchema:
+                self.cmd_drop_relation_type(rschema.type)
+        for eschema in oldschema.entities():
+            if not eschema in newschema and eschema in reposchema:
+                self.cmd_drop_entity_type(eschema.type)
+        for rschema in oldschema.relations():
+            if rschema in newschema and rschema in reposchema: 
+                # check if attributes/relations has been added to entities from 
+                # other cubes
+                for fromtype, totype in rschema.iter_rdefs():
+                    if not newschema[rschema.type].has_rdef(fromtype, totype) and \
+                           reposchema[rschema.type].has_rdef(fromtype, totype):
+                        self.cmd_drop_relation_definition(
+                            str(fromtype), rschema.type, str(totype))
+        # execute post-remove files
+        for pack in reversed(removedcubes):
+            self.exec_event_script('postremove', self.config.cube_dir(pack))
+            self.rqlexec('DELETE EProperty X WHERE X pkey %(pk)s',
+                         {'pk': u'system.version.'+pack}, ask_confirm=False)
+            self.commit()
+            
+    # schema migration actions ################################################
+    
+    def cmd_add_attribute(self, etype, attrname, attrtype=None, commit=True):
+        """add a new attribute on the given entity type"""
+        if attrtype is None:
+            rschema = self.new_schema.rschema(attrname)
+            attrtype = rschema.objects(etype)[0]
+        self.cmd_add_relation_definition(etype, attrname, attrtype, commit=commit)
+        
+    def cmd_drop_attribute(self, etype, attrname, commit=True):
+        """drop an existing attribute from the given entity type
+        
+        `attrname` is a string giving the name of the attribute to drop
+        """
+        rschema = self.repo.schema.rschema(attrname)
+        attrtype = rschema.objects(etype)[0]
+        self.cmd_drop_relation_definition(etype, attrname, attrtype, commit=commit)
+
+    def cmd_rename_attribute(self, etype, oldname, newname, commit=True):
+        """rename an existing attribute of the given entity type
+        
+        `oldname` is a string giving the name of the existing attribute
+        `newname` is a string giving the name of the renamed attribute
+        """
+        eschema = self.new_schema.eschema(etype)
+        attrtype = eschema.destination(newname)
+        # have to commit this first step anyway to get the definition
+        # actually in the schema
+        self.cmd_add_attribute(etype, newname, attrtype, commit=True)
+        # skipp NULL values if the attribute is required
+        rql = 'SET X %s VAL WHERE X is %s, X %s VAL' % (newname, etype, oldname)
+        card = eschema.rproperty(newname, 'cardinality')[0]
+        if card == '1':
+            rql += ', NOT X %s NULL' % oldname
+        self.rqlexec(rql, ask_confirm=self.verbosity>=2)
+        self.cmd_drop_attribute(etype, oldname, commit=commit)
+            
+    def cmd_add_entity_type(self, etype, auto=True, commit=True):
+        """register a new entity type
+        
+        in auto mode, automatically register entity's relation where the
+        targeted type is known
+        """
+        applschema = self.repo.schema
+        if etype in applschema:
+            eschema = applschema[etype]
+            if eschema.is_final():
+                applschema.del_entity_type(etype)
+        else:
+            eschema = self.new_schema.eschema(etype)
+        confirm = self.verbosity >= 2
+        # register the entity into EEType
+        self.rqlexecall(ss.eschema2rql(eschema), ask_confirm=confirm)
+        # add specializes relation if needed
+        self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm)
+        # register groups / permissions for the entity
+        self.rqlexecall(ss.erperms2rql(eschema, self.group_mapping()),
+                        ask_confirm=confirm)
+        # register entity's attributes
+        for rschema, attrschema in eschema.attribute_definitions():
+            # ignore those meta relations, they will be automatically added
+            if rschema.type in ('eid', 'creation_date', 'modification_date'):
+                continue
+            if not rschema.type in applschema:
+                # need to add the relation type and to commit to get it
+                # actually in the schema
+                self.cmd_add_relation_type(rschema.type, False, commit=True)
+            # register relation definition
+            self.rqlexecall(ss.rdef2rql(rschema, etype, attrschema.type),
+                            ask_confirm=confirm)
+        if auto:
+            # we have commit here to get relation types actually in the schema
+            self.commit()
+            added = []
+            for rschema in eschema.subject_relations():
+                # attribute relation have already been processed and
+                # 'owned_by'/'created_by' will be automatically added
+                if rschema.final or rschema.type in ('owned_by', 'created_by', 'is', 'is_instance_of'): 
+                    continue
+                rtypeadded = rschema.type in applschema
+                for targetschema in rschema.objects(etype):
+                    # ignore relations where the targeted type is not in the
+                    # current application schema
+                    targettype = targetschema.type
+                    if not targettype in applschema and targettype != etype:
+                        continue
+                    if not rtypeadded:
+                        # need to add the relation type and to commit to get it
+                        # actually in the schema
+                        added.append(rschema.type)
+                        self.cmd_add_relation_type(rschema.type, False, commit=True)
+                        rtypeadded = True
+                    # register relation definition
+                    # remember this two avoid adding twice non symetric relation
+                    # such as "Emailthread forked_from Emailthread"
+                    added.append((etype, rschema.type, targettype))
+                    self.rqlexecall(ss.rdef2rql(rschema, etype, targettype),
+                                    ask_confirm=confirm)
+            for rschema in eschema.object_relations():
+                rtypeadded = rschema.type in applschema or rschema.type in added
+                for targetschema in rschema.subjects(etype):
+                    # ignore relations where the targeted type is not in the
+                    # current application schema
+                    targettype = targetschema.type
+                    # don't check targettype != etype since in this case the
+                    # relation has already been added as a subject relation
+                    if not targettype in applschema:
+                        continue
+                    if not rtypeadded:
+                        # need to add the relation type and to commit to get it
+                        # actually in the schema
+                        self.cmd_add_relation_type(rschema.type, False, commit=True)
+                        rtypeadded = True
+                    elif (targettype, rschema.type, etype) in added:
+                        continue
+                    # register relation definition
+                    self.rqlexecall(ss.rdef2rql(rschema, targettype, etype),
+                                    ask_confirm=confirm)
+        if commit:
+            self.commit()
+                
+    def cmd_drop_entity_type(self, etype, commit=True):
+        """unregister an existing entity type
+        
+        This will trigger deletion of necessary relation types and definitions
+        """
+        # XXX what if we delete an entity type which is specialized by other types
+        # unregister the entity from EEType
+        self.rqlexec('DELETE EEType X WHERE X name %(etype)s', {'etype': etype},
+                     ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+
+    def cmd_rename_entity_type(self, oldname, newname, commit=True):
+        """rename an existing entity type in the persistent schema
+        
+        `oldname` is a string giving the name of the existing entity type
+        `newname` is a string giving the name of the renamed entity type
+        """
+        self.rqlexec('SET ET name %(newname)s WHERE ET is EEType, ET name %(oldname)s',
+                     {'newname' : unicode(newname), 'oldname' : oldname})
+        if commit:
+            self.commit()
+        
+    def cmd_add_relation_type(self, rtype, addrdef=True, commit=True):
+        """register a new relation type named `rtype`, as described in the
+        schema description file.
+
+        `addrdef` is a boolean value; when True, it will also add all relations
+        of the type just added found in the schema definition file. Note that it
+        implies an intermediate "commit" which commits the relation type
+        creation (but not the relation definitions themselves, for which
+        committing depends on the `commit` argument value).
+        
+        """
+        rschema = self.new_schema.rschema(rtype)
+        # register the relation into ERType and insert necessary relation
+        # definitions
+        self.rqlexecall(ss.rschema2rql(rschema, addrdef=False),
+                        ask_confirm=self.verbosity>=2)
+        # register groups / permissions for the relation
+        self.rqlexecall(ss.erperms2rql(rschema, self.group_mapping()),
+                        ask_confirm=self.verbosity>=2)
+        if addrdef:
+            self.commit()
+            self.rqlexecall(ss.rdef2rql(rschema),
+                            ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+        
+    def cmd_drop_relation_type(self, rtype, commit=True):
+        """unregister an existing relation type"""
+        # unregister the relation from ERType
+        self.rqlexec('DELETE ERType X WHERE X name %r' % rtype,
+                     ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+        
+    def cmd_rename_relation(self, oldname, newname, commit=True):
+        """rename an existing relation
+        
+        `oldname` is a string giving the name of the existing relation
+        `newname` is a string giving the name of the renamed relation
+        """
+        self.cmd_add_relation_type(newname, commit=True)
+        self.rqlexec('SET X %s Y WHERE X %s Y' % (newname, oldname),
+                     ask_confirm=self.verbosity>=2)
+        self.cmd_drop_relation_type(oldname, commit=commit)
+
+    def cmd_add_relation_definition(self, subjtype, rtype, objtype, commit=True):
+        """register a new relation definition, from its definition found in the
+        schema definition file
+        """
+        rschema = self.new_schema.rschema(rtype)
+        if not rtype in self.repo.schema:
+            self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
+        self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype),
+                        ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+        
+    def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True):
+        """unregister an existing relation definition"""
+        rschema = self.repo.schema.rschema(rtype)
+        # unregister the definition from EFRDef or ENFRDef
+        if rschema.is_final():
+            etype = 'EFRDef'
+        else:
+            etype = 'ENFRDef'
+        rql = ('DELETE %s X WHERE X from_entity FE, FE name "%s",'
+               'X relation_type RT, RT name "%s", X to_entity TE, TE name "%s"')
+        self.rqlexec(rql % (etype, subjtype, rtype, objtype),
+                     ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+        
+    def cmd_synchronize_permissions(self, ertype, commit=True):
+        """permission synchronization for an entity or relation type"""
+        if ertype in ('eid', 'has_text', 'identity'):
+            return
+        newrschema = self.new_schema[ertype]
+        teid = self.repo.schema[ertype].eid
+        if 'update' in newrschema.ACTIONS or newrschema.is_final():
+            # entity type
+            exprtype = u'ERQLExpression'
+        else:
+            # relation type
+            exprtype = u'RRQLExpression'
+        assert teid, ertype
+        gm = self.group_mapping()
+        confirm = self.verbosity >= 2
+        # * remove possibly deprecated permission (eg in the persistent schema
+        #   but not in the new schema)
+        # * synchronize existing expressions
+        # * add new groups/expressions
+        for action in newrschema.ACTIONS:
+            perm = '%s_permission' % action
+            # handle groups
+            newgroups = list(newrschema.get_groups(action))
+            for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, '
+                                            'T eid %%(x)s' % perm, {'x': teid}, 'x',
+                                            ask_confirm=False):
+                if not gname in newgroups:
+                    if not confirm or self.confirm('remove %s permission of %s to %s?'
+                                                   % (action, ertype, gname)):
+                        self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s'
+                                     % (perm, teid),
+                                     {'x': geid}, 'x', ask_confirm=False)
+                else:
+                    newgroups.remove(gname)
+            for gname in newgroups:
+                if not confirm or self.confirm('grant %s permission of %s to %s?'
+                                               % (action, ertype, gname)):
+                    self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s'
+                                 % (perm, teid),
+                                 {'x': gm[gname]}, 'x', ask_confirm=False)
+            # handle rql expressions
+            newexprs = dict((expr.expression, expr) for expr in newrschema.get_rqlexprs(action))
+            for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, '
+                                                    'T eid %s' % (perm, teid),
+                                                    ask_confirm=False):
+                if not expression in newexprs:
+                    if not confirm or self.confirm('remove %s expression for %s permission of %s?'
+                                                   % (expression, action, ertype)):
+                        # deleting the relation will delete the expression entity
+                        self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s'
+                                     % (perm, teid),
+                                     {'x': expreid}, 'x', ask_confirm=False)
+                else:
+                    newexprs.pop(expression)
+            for expression in newexprs.values():
+                expr = expression.expression
+                if not confirm or self.confirm('add %s expression for %s permission of %s?'
+                                               % (expr, action, ertype)):
+                    self.rqlexec('INSERT RQLExpression X: X exprtype %%(exprtype)s, '
+                                 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X '
+                                 'WHERE T eid %%(x)s' % perm,
+                                 {'expr': expr, 'exprtype': exprtype,
+                                  'vars': expression.mainvars, 'x': teid}, 'x',
+                                 ask_confirm=False)
+        if commit:
+            self.commit()
+        
+    def cmd_synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True,
+                                commit=True):
+        """synchronize properties of the persistent relation schema against its
+        current definition:
+        
+        * description
+        * symetric, meta
+        * inlined
+        * relation definitions if `syncrdefs`
+        * permissions if `syncperms`
+        
+        physical schema changes should be handled by repository's schema hooks
+        """
+        rtype = str(rtype)
+        if rtype in self._synchronized:
+            return
+        self._synchronized.add(rtype)
+        rschema = self.new_schema.rschema(rtype)
+        self.rqlexecall(ss.updaterschema2rql(rschema),
+                        ask_confirm=self.verbosity>=2)
+        reporschema = self.repo.schema.rschema(rtype)
+        if syncrdefs:
+            for subj, obj in rschema.iter_rdefs():
+                if not reporschema.has_rdef(subj, obj):
+                    continue
+                self.cmd_synchronize_rdef_schema(subj, rschema, obj,
+                                                 commit=False)
+        if syncperms:
+            self.cmd_synchronize_permissions(rtype, commit=False)
+        if commit:
+            self.commit()
+                
+    def cmd_synchronize_eschema(self, etype, syncperms=True, commit=True):
+        """synchronize properties of the persistent entity schema against
+        its current definition:
+        
+        * description
+        * internationalizable, fulltextindexed, indexed, meta
+        * relations from/to this entity
+        * permissions if `syncperms`
+        """
+        etype = str(etype)
+        if etype in self._synchronized:
+            return
+        self._synchronized.add(etype)
+        repoeschema = self.repo.schema.eschema(etype)
+        try:
+            eschema = self.new_schema.eschema(etype)
+        except KeyError:
+            return
+        repospschema = repoeschema.specializes()
+        espschema = eschema.specializes()
+        if repospschema and not espschema:
+            self.rqlexec('DELETE X specializes Y WHERE X is EEType, X name %(x)s',
+                         {'x': str(repoechema)})
+        elif not repospschema and espschema:
+            self.rqlexec('SET X specializes Y WHERE X is EEType, X name %(x)s, '
+                         'Y is EEType, Y name %(y)s',
+                         {'x': str(repoechema), 'y': str(epschema)})
+        self.rqlexecall(ss.updateeschema2rql(eschema),
+                        ask_confirm=self.verbosity >= 2)
+        for rschema, targettypes, x in eschema.relation_definitions(True):
+            if x == 'subject':
+                if not rschema in repoeschema.subject_relations():
+                    continue
+                subjtypes, objtypes = [etype], targettypes
+            else: # x == 'object'
+                if not rschema in repoeschema.object_relations():
+                    continue
+                subjtypes, objtypes = targettypes, [etype]
+            self.cmd_synchronize_rschema(rschema, syncperms=syncperms,
+                                         syncrdefs=False, commit=False)
+            reporschema = self.repo.schema.rschema(rschema)
+            for subj in subjtypes:
+                for obj in objtypes:
+                    if not reporschema.has_rdef(subj, obj):
+                        continue
+                    self.cmd_synchronize_rdef_schema(subj, rschema, obj,
+                                                     commit=False)
+        if syncperms:
+            self.cmd_synchronize_permissions(etype, commit=False)
+        if commit:
+            self.commit()
+
+    def cmd_synchronize_rdef_schema(self, subjtype, rtype, objtype,
+                                    commit=True):
+        """synchronize properties of the persistent relation definition schema
+        against its current definition:
+        * order and other properties
+        * constraints
+        """
+        subjtype, objtype = str(subjtype), str(objtype)
+        rschema = self.new_schema.rschema(rtype)
+        reporschema = self.repo.schema.rschema(rschema)
+        if (subjtype, rschema, objtype) in self._synchronized:
+            return
+        self._synchronized.add((subjtype, rschema, objtype))
+        if rschema.symetric:
+            self._synchronized.add((objtype, rschema, subjtype))
+        confirm = self.verbosity >= 2
+        # properties
+        self.rqlexecall(ss.updaterdef2rql(rschema, subjtype, objtype),
+                        ask_confirm=confirm)
+        # constraints
+        newconstraints = list(rschema.rproperty(subjtype, objtype, 'constraints'))
+        # 1. remove old constraints and update constraints of the same type
+        # NOTE: don't use rschema.constraint_by_type because it may be
+        #       out of sync with newconstraints when multiple
+        #       constraints of the same type are used
+        for cstr in reporschema.rproperty(subjtype, objtype, 'constraints'):
+            for newcstr in newconstraints:
+                if newcstr.type() == cstr.type():
+                    break
+            else:
+                newcstr = None
+            if newcstr is None:
+                self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s',
+                             {'x': cstr.eid}, 'x',
+                             ask_confirm=confirm)
+                self.rqlexec('DELETE EConstraint C WHERE C eid %(x)s',
+                             {'x': cstr.eid}, 'x',
+                             ask_confirm=confirm)
+            else:
+                newconstraints.remove(newcstr)
+                values = {'x': cstr.eid,
+                          'v': unicode(newcstr.serialize())}
+                self.rqlexec('SET X value %(v)s WHERE X eid %(x)s',
+                             values, 'x', ask_confirm=confirm)
+        # 2. add new constraints
+        for newcstr in newconstraints:
+            self.rqlexecall(ss.constraint2rql(rschema, subjtype, objtype,
+                                              newcstr),
+                            ask_confirm=confirm)
+        if commit:
+            self.commit()
+        
+    def cmd_synchronize_schema(self, syncperms=True, commit=True):
+        """synchronize the persistent schema against the current definition
+        schema.
+        
+        It will synch common stuff between the definition schema and the
+        actual persistent schema, it won't add/remove any entity or relation.
+        """
+        for etype in self.repo.schema.entities():
+            self.cmd_synchronize_eschema(etype, syncperms=syncperms, commit=False)
+        if commit:
+            self.commit()
+                
+    def cmd_change_relation_props(self, subjtype, rtype, objtype,
+                                  commit=True, **kwargs):
+        """change some properties of a relation definition"""
+        assert kwargs
+        restriction = []
+        if subjtype and subjtype != 'Any':
+            restriction.append('X from_entity FE, FE name "%s"' % subjtype)
+        if objtype and objtype != 'Any':
+            restriction.append('X to_entity TE, TE name "%s"' % objtype)
+        if rtype and rtype != 'Any':
+            restriction.append('X relation_type RT, RT name "%s"' % rtype)
+        assert restriction
+        values = []
+        for k, v in kwargs.items():
+            values.append('X %s %%(%s)s' % (k, k))
+            if isinstance(v, str):
+                kwargs[k] = unicode(v)
+        rql = 'SET %s WHERE %s' % (','.join(values), ','.join(restriction))
+        self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2)
+        if commit:
+            self.commit()
+
+    def cmd_set_size_constraint(self, etype, rtype, size, commit=True):
+        """set change size constraint of a string attribute
+
+        if size is None any size constraint will be removed
+        """
+        oldvalue = None
+        for constr in self.repo.schema.eschema(etype).constraints(rtype):
+            if isinstance(constr, SizeConstraint):
+                oldvalue = constr.max
+        if oldvalue == size:
+            return
+        if oldvalue is None and not size is None:
+            ceid = self.rqlexec('INSERT EConstraint C: C value %(v)s, C cstrtype CT '
+                                'WHERE CT name "SizeConstraint"',
+                                {'v': SizeConstraint(size).serialize()},
+                                ask_confirm=self.verbosity>=2)[0][0]
+            self.rqlexec('SET X constrained_by C WHERE X from_entity S, X relation_type R, '
+                         'S name "%s", R name "%s", C eid %s' % (etype, rtype, ceid),
+                         ask_confirm=self.verbosity>=2)
+        elif not oldvalue is None:
+            if not size is None:
+                self.rqlexec('SET C value %%(v)s WHERE X from_entity S, X relation_type R,'
+                             'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",'
+                             'S name "%s", R name "%s"' % (etype, rtype),
+                             {'v': unicode(SizeConstraint(size).serialize())},
+                             ask_confirm=self.verbosity>=2)
+            else:
+                self.rqlexec('DELETE X constrained_by C WHERE X from_entity S, X relation_type R,'
+                             'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",'
+                             'S name "%s", R name "%s"' % (etype, rtype),
+                             ask_confirm=self.verbosity>=2)
+                # cleanup unused constraints
+                self.rqlexec('DELETE EConstraint C WHERE NOT X constrained_by C')
+        if commit:
+            self.commit()
+    
+    # Workflows handling ######################################################
+    
+    def cmd_add_state(self, name, stateof, initial=False, commit=False, **kwargs):
+        """method to ease workflow definition: add a state for one or more
+        entity type(s)
+        """
+        stateeid = self.cmd_add_entity('State', name=name, **kwargs)
+        if not isinstance(stateof, (list, tuple)):
+            stateof = (stateof,)
+        for etype in stateof:
+            # XXX ensure etype validity
+            self.rqlexec('SET X state_of Y WHERE X eid %(x)s, Y name %(et)s',
+                         {'x': stateeid, 'et': etype}, 'x', ask_confirm=False)
+            if initial:
+                self.rqlexec('SET ET initial_state S WHERE ET name %(et)s, S eid %(x)s',
+                             {'x': stateeid, 'et': etype}, 'x', ask_confirm=False)
+        if commit:
+            self.commit()
+        return stateeid
+    
+    def cmd_add_transition(self, name, transitionof, fromstates, tostate,
+                           requiredgroups=(), conditions=(), commit=False, **kwargs):
+        """method to ease workflow definition: add a transition for one or more
+        entity type(s), from one or more state and to a single state
+        """
+        treid = self.cmd_add_entity('Transition', name=name, **kwargs)
+        if not isinstance(transitionof, (list, tuple)):
+            transitionof = (transitionof,)
+        for etype in transitionof:
+            # XXX ensure etype validity
+            self.rqlexec('SET X transition_of Y WHERE X eid %(x)s, Y name %(et)s',
+                         {'x': treid, 'et': etype}, 'x', ask_confirm=False)
+        for stateeid in fromstates:
+            self.rqlexec('SET X allowed_transition Y WHERE X eid %(x)s, Y eid %(y)s',
+                         {'x': stateeid, 'y': treid}, 'x', ask_confirm=False)
+        self.rqlexec('SET X destination_state Y WHERE X eid %(x)s, Y eid %(y)s',
+                     {'x': treid, 'y': tostate}, 'x', ask_confirm=False)
+        self.cmd_set_transition_permissions(treid, requiredgroups, conditions,
+                                            reset=False)
+        if commit:
+            self.commit()
+        return treid
+
+    def cmd_set_transition_permissions(self, treid,
+                                       requiredgroups=(), conditions=(),
+                                       reset=True, commit=False):
+        """set or add (if `reset` is False) groups and conditions for a
+        transition
+        """
+        if reset:
+            self.rqlexec('DELETE T require_group G WHERE T eid %(x)s',
+                         {'x': treid}, 'x', ask_confirm=False)
+            self.rqlexec('DELETE T condition R WHERE T eid %(x)s',
+                         {'x': treid}, 'x', ask_confirm=False)
+        for gname in requiredgroups:
+            ### XXX ensure gname validity
+            self.rqlexec('SET T require_group G WHERE T eid %(x)s, G name %(gn)s',
+                         {'x': treid, 'gn': gname}, 'x', ask_confirm=False)
+        if isinstance(conditions, basestring):
+            conditions = (conditions,)
+        for expr in conditions:
+            if isinstance(expr, str):
+                expr = unicode(expr)
+            self.rqlexec('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+                         'X expression %(expr)s, T condition X '
+                         'WHERE T eid %(x)s',
+                         {'x': treid, 'expr': expr}, 'x', ask_confirm=False)
+        if commit:
+            self.commit()
+
+    # EProperty handling ######################################################
+
+    def cmd_property_value(self, pkey):
+        rql = 'Any V WHERE X is EProperty, X pkey %(k)s, X value V'
+        rset = self.rqlexec(rql, {'k': pkey}, ask_confirm=False)
+        return rset[0][0]
+
+    def cmd_set_property(self, pkey, value):
+        value = unicode(value)
+        try:
+            prop = self.rqlexec('EProperty X WHERE X pkey %(k)s', {'k': pkey},
+                                ask_confirm=False).get_entity(0, 0)
+        except:
+            self.cmd_add_entity('EProperty', pkey=unicode(pkey), value=value)
+        else:
+            self.rqlexec('SET X value %(v)s WHERE X pkey %(k)s',
+                         {'k': pkey, 'v': value}, ask_confirm=False)
+
+    # other data migration commands ###########################################
+        
+    def cmd_add_entity(self, etype, *args, **kwargs):
+        """add a new entity of the given type"""
+        rql = 'INSERT %s X' % etype
+        relations = []
+        restrictions = []
+        for rtype, rvar in args:
+            relations.append('X %s %s' % (rtype, rvar))
+            restrictions.append('%s eid %s' % (rvar, kwargs.pop(rvar)))
+        commit = kwargs.pop('commit', False)
+        for attr in kwargs:
+            relations.append('X %s %%(%s)s' % (attr, attr))
+        if relations:
+            rql = '%s: %s' % (rql, ', '.join(relations))
+        if restrictions:
+            rql = '%s WHERE %s' % (rql, ', '.join(restrictions))
+        eid = self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2).rows[0][0]
+        if commit:
+            self.commit()
+        return eid
+    
+    def sqlexec(self, sql, args=None, ask_confirm=True):
+        """execute the given sql if confirmed
+        
+        should only be used for low level stuff undoable with existing higher
+        level actions
+        """
+        if not ask_confirm or self.confirm('execute sql: %s ?' % sql):
+            self.session.set_pool() # ensure pool is set
+            try:
+                cu = self.session.system_sql(sql, args)
+            except:
+                ex = sys.exc_info()[1]
+                if self.confirm('error: %s\nabort?' % ex):
+                    raise
+                return
+            try:
+                return cu.fetchall()
+            except:
+                # no result to fetch
+                return
+    
+    def rqlexec(self, rql, kwargs=None, cachekey=None, ask_confirm=True):
+        """rql action"""
+        if not isinstance(rql, (tuple, list)):
+            rql = ( (rql, kwargs), )
+        res = None
+        for rql, kwargs in rql:
+            if kwargs:
+                msg = '%s (%s)' % (rql, kwargs)
+            else:
+                msg = rql
+            if not ask_confirm or self.confirm('execute rql: %s ?' % msg):
+                try:
+                    res = self.rqlcursor.execute(rql, kwargs, cachekey)
+                except Exception, ex:
+                    if self.confirm('error: %s\nabort?' % ex):
+                        raise
+        return res
+
+    def rqliter(self, rql, kwargs=None, ask_confirm=True):
+        return ForRqlIterator(self, rql, None, ask_confirm)
+
+    def cmd_deactivate_verification_hooks(self):
+        self.repo.hm.deactivate_verification_hooks()
+
+    def cmd_reactivate_verification_hooks(self):
+        self.repo.hm.reactivate_verification_hooks()
+        
+    # broken db commands ######################################################
+
+    def cmd_change_attribute_type(self, etype, attr, newtype, commit=True):
+        """low level method to change the type of an entity attribute. This is
+        a quick hack which has some drawback:
+        * only works when the old type can be changed to the new type by the
+          underlying rdbms (eg using ALTER TABLE)
+        * the actual schema won't be updated until next startup
+        """
+        rschema = self.repo.schema.rschema(attr)
+        oldtype = rschema.objects(etype)[0]
+        rdefeid = rschema.rproperty(etype, oldtype, 'eid')
+        sql = ("UPDATE EFRDef "
+               "SET to_entity=(SELECT eid FROM EEType WHERE name='%s')"
+               "WHERE eid=%s") % (newtype, rdefeid)
+        self.sqlexec(sql, ask_confirm=False)
+        dbhelper = self.repo.system_source.dbhelper
+        sqltype = dbhelper.TYPE_MAPPING[newtype]
+        sql = 'ALTER TABLE %s ALTER COLUMN %s TYPE %s' % (etype, attr, sqltype)
+        self.sqlexec(sql, ask_confirm=False)
+        if commit:
+            self.commit()
+        
+    def cmd_add_entity_type_table(self, etype, commit=True):
+        """low level method to create the sql table for an existing entity.
+        This may be useful on accidental desync between the repository schema
+        and a sql database
+        """
+        dbhelper = self.repo.system_source.dbhelper
+        tablesql = eschema2sql(dbhelper, self.repo.schema.eschema(etype))
+        for sql in tablesql.split(';'):
+            if sql.strip():
+                self.sqlexec(sql)
+        if commit:
+            self.commit()
+            
+    def cmd_add_relation_type_table(self, rtype, commit=True):
+        """low level method to create the sql table for an existing relation.
+        This may be useful on accidental desync between the repository schema
+        and a sql database
+        """
+        dbhelper = self.repo.system_source.dbhelper
+        tablesql = rschema2sql(dbhelper, self.repo.schema.rschema(rtype))
+        for sql in tablesql.split(';'):
+            if sql.strip():
+                self.sqlexec(sql)
+        if commit:
+            self.commit()
+            
+
+class ForRqlIterator:
+    """specific rql iterator to make the loop skipable"""
+    def __init__(self, helper, rql, kwargs, ask_confirm):
+        self._h = helper
+        self.rql = rql
+        self.kwargs = kwargs
+        self.ask_confirm = ask_confirm
+        self._rsetit = None
+        
+    def __iter__(self):
+        return self
+    
+    def next(self):
+        if self._rsetit is not None:
+            return self._rsetit.next()
+        rql, kwargs = self.rql, self.kwargs
+        if kwargs:
+            msg = '%s (%s)' % (rql, kwargs)
+        else:
+            msg = rql
+        if self.ask_confirm:
+            if not self._h.confirm('execute rql: %s ?' % msg):
+                raise StopIteration
+        try:
+            #print rql, kwargs
+            rset = self._h.rqlcursor.execute(rql, kwargs)
+        except Exception, ex:
+            if self._h.confirm('error: %s\nabort?' % ex):
+                raise
+            else:
+                raise StopIteration
+        self._rsetit = iter(rset)
+        return self._rsetit.next()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/pool.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,273 @@
+"""CubicWeb server connections pool :
+
+* the rql repository has a limited number of connections pools, each of them
+  dealing with a set of connections on each source used by the repository
+  
+* operation may be registered by hooks during a transaction, which will  be
+  fired when the pool is commited or rollbacked
+
+This module defined the `ConnectionsPool` class and a set of abstract classes
+for operation.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+    
+class ConnectionsPool(object):
+    """handle connections on a set of sources, at some point associated to a
+    user session
+    """
+
+    def __init__(self, sources):
+        # dictionnary of (source, connection), indexed by sources'uri
+        self.source_cnxs = {}
+        for source in sources:
+            self.source_cnxs[source.uri] = (source, source.get_connection())
+        if not 'system' in self.source_cnxs:
+            self.source_cnxs['system'] = self.source_cnxs[sources[0].uri]
+        self._cursors = {}
+
+    def commit(self):
+        """commit the current transaction for this user"""
+        # FIXME: what happends if a commit fail
+        # would need a two phases commit or like, but I don't know how to do
+        # this using the db-api...
+        for source, cnx in self.source_cnxs.values():
+            # let exception propagates
+            cnx.commit()
+        
+    def rollback(self):
+        """rollback the current transaction for this user"""
+        for source, cnx in self.source_cnxs.values():
+            # catch exceptions, rollback other sources anyway
+            try:
+                cnx.rollback()
+            except:
+                source.critical('rollback error', exc_info=sys.exc_info())
+
+    def close(self, i_know_what_i_do=False):
+        """close all connections in the pool"""
+        if i_know_what_i_do is not True: # unexpected closing safety belt
+            raise RuntimeError('pool shouldn\'t be closed')
+        for cu in self._cursors.values():
+            try:
+                cu.close()
+            except:
+                continue
+        for _, cnx in self.source_cnxs.values():
+            try:
+                cnx.close()
+            except:
+                continue
+            
+    # internals ###############################################################
+
+    def pool_set(self, session):
+        """pool is being set"""
+        self.check_connections()
+
+    def pool_reset(self, session):
+        """pool is being reseted"""
+        for source, cnx in self.source_cnxs.values():
+            source.pool_reset(cnx)
+        
+    def __getitem__(self, uri):
+        """subscription notation provide access to sources'cursors"""
+        try:
+            cursor = self._cursors[uri]
+        except KeyError:
+            cursor = self.source_cnxs[uri][1].cursor()
+            if cursor is not None:
+                # None possible on sources without cursor support such as ldap
+                self._cursors[uri] = cursor
+        return cursor
+    
+    def sources(self):
+        """return the source objects handled by this pool"""
+        # implementation details of flying insert requires the system source
+        # first
+        yield self.source_cnxs['system']
+        for uri, (source, cursor) in self.source_cnxs.items():
+            if uri == 'system':
+                continue
+            yield source
+        #return [source_cnx[0] for source_cnx in self.source_cnxs.values()]
+    
+    def source(self, uid):
+        """return the source object with the given uri"""
+        return self.source_cnxs[uid][0]
+    
+    def connection(self, uid):
+        """return the connection on the source object with the given uri"""
+        return self.source_cnxs[uid][1]
+
+    def reconnect(self, source):
+        """reopen a connection for this source"""
+        source.info('trying to reconnect')
+        self.source_cnxs[source.uri] = (source, source.get_connection())        
+        del self._cursors[source.uri]
+
+    def check_connections(self):
+        for source, cnx in self.source_cnxs.itervalues():
+            newcnx = source.check_connection(cnx)
+            if newcnx is not None:
+                self.reset_connection(source, newcnx)
+
+    def reset_connection(self, source, cnx):
+        self.source_cnxs[source.uri] = (source, cnx)
+        self._cursors.pop(source.uri, None)
+
+
+class Operation(object):
+    """an operation is triggered on connections pool events related to
+    commit / rollback transations. Possible events are:
+
+    precommit:
+      the pool is preparing to commit. You shouldn't do anything things which
+      has to be reverted if the commit fail at this point, but you can freely
+      do any heavy computation or raise an exception if the commit can't go.
+      You can add some new operation during this phase but their precommit
+      event won't be triggered
+      
+    commit:
+      the pool is preparing to commit. You should avoid to do to expensive
+      stuff or something that may cause an exception in this event
+      
+    revertcommit:
+      if an operation failed while commited, this event is triggered for
+      all operations which had their commit event already to let them
+      revert things (including the operation which made fail the commit)
+
+    rollback:
+      the transaction has been either rollbacked either
+      * intentionaly
+      * a precommit event failed, all operations are rollbacked
+      * a commit event failed, all operations which are not been triggered for
+        commit are rollbacked
+
+    order of operations may be important, and is controlled according to:
+    * operation's class
+    """
+    
+    def __init__(self, session, **kwargs):
+        self.session = session
+        self.user = session.user
+        self.repo = session.repo
+        self.schema = session.repo.schema
+        self.config = session.repo.config
+        self.__dict__.update(kwargs)
+        self.register(session)
+        # execution information
+        self.processed = None # 'precommit', 'commit'
+        self.failed = False
+        
+    def register(self, session):
+        session.add_operation(self, self.insert_index())
+        
+    def insert_index(self):
+        """return the index of  the lastest instance which is not a
+        LateOperation instance
+        """
+        for i, op in enumerate(self.session.pending_operations):
+            if isinstance(op, (LateOperation, SingleLastOperation)):
+                return i
+        return None
+    
+    def handle_event(self, event):
+        """delegate event handling to the opertaion"""
+        getattr(self, event)()
+
+    def precommit_event(self):
+        """the observed connections pool is preparing a commit"""
+    
+    def revertprecommit_event(self):
+        """an error went when pre-commiting this operation or a later one
+        
+        should revert pre-commit's changes but take care, they may have not
+        been all considered if it's this operation which failed
+        """
+
+    def commit_event(self):
+        """the observed connections pool is commiting"""
+        raise NotImplementedError()
+    
+    def revertcommit_event(self):
+        """an error went when commiting this operation or a later one
+        
+        should revert commit's changes but take care, they may have not
+        been all considered if it's this operation which failed
+        """
+    
+    def rollback_event(self):
+        """the observed connections pool has been rollbacked
+        
+        do nothing by default, the operation will just be removed from the pool
+        operation list
+        """
+
+
+class PreCommitOperation(Operation):
+    """base class for operation only defining a precommit operation
+    """
+
+    def precommit_event(self):
+        """the observed connections pool is preparing a commit"""
+        raise NotImplementedError()
+
+    def commit_event(self):
+        """the observed connections pool is commiting"""
+
+
+class LateOperation(Operation):
+    """special operation which should be called after all possible (ie non late)
+    operations
+    """    
+    def insert_index(self):
+        """return the index of  the lastest instance which is not a
+        SingleLastOperation instance
+        """
+        for i, op in enumerate(self.session.pending_operations):
+            if isinstance(op, SingleLastOperation):
+                return i
+        return None
+
+
+class SingleOperation(Operation):
+    """special operation which should be called once"""    
+    def register(self, session):
+        """override register to handle cases where this operation has already
+        been added
+        """
+        operations = session.pending_operations
+        index = self.equivalent_index(operations)
+        if index is not None:
+            equivalent = operations.pop(index)
+        else:
+            equivalent = None
+        session.add_operation(self, self.insert_index())
+        return equivalent
+    
+    def equivalent_index(self, operations):
+        """return the index of the equivalent operation if any"""
+        equivalents = [i for i, op in enumerate(operations)
+                       if op.__class__ is self.__class__]
+        if equivalents:
+            return equivalents[0]
+        return None
+
+
+class SingleLastOperation(SingleOperation):
+    """special operation which should be called once and after all other
+    operations
+    """    
+    def insert_index(self):
+        return None
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Operation, getLogger('cubicweb.session'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/querier.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,680 @@
+"""Helper classes to execute RQL queries on a set of sources, performing
+security checking and data aggregation.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from itertools import repeat
+
+from logilab.common.cache import Cache
+from logilab.common.compat import any
+from rql import RQLHelper, RQLSyntaxError
+from rql.stmts import Union, Select
+from rql.nodes import (Relation, VariableRef, Constant, Exists, Variable,
+                       SubQuery)
+
+from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid
+from cubicweb import server
+from cubicweb.rset import ResultSet
+
+from cubicweb.server.utils import cleanup_solutions
+from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata
+from cubicweb.server.ssplanner import add_types_restriction
+
+def empty_rset(session, rql, args, rqlst=None):
+    """build an empty result set object"""
+    return ResultSet([], rql, args, rqlst=rqlst)
+
+def update_varmap(varmap, selected, table):
+    """return a sql schema to store RQL query result"""
+    for i, term in enumerate(selected):
+        key = term.as_string()
+        value = '%s.C%s' % (table, i)
+        if varmap.get(key, value) != value:
+            raise Exception('variable name conflict on %s' % key)
+        varmap[key] = value
+
+# permission utilities ########################################################
+
+def var_kwargs(restriction, args):
+    varkwargs = {}
+    for rel in restriction.iget_nodes(Relation):
+        cmp = rel.children[1]
+        if rel.r_type == 'eid' and cmp.operator == '=' and \
+                isinstance(cmp.children[0], Constant) and \
+                cmp.children[0].type == 'Substitute':
+            varkwargs[rel.children[0].name] = typed_eid(cmp.children[0].eval(args))
+    return varkwargs
+
+def check_no_password_selected(rqlst):
+    """check that Password entities are not selected"""
+    for solution in rqlst.solutions:
+        if 'Password' in solution.itervalues():
+            raise Unauthorized('Password selection is not allowed')
+
+def check_read_access(schema, user, rqlst, solution):
+    """check that the given user has credentials to access data read the
+    query
+
+    return a dict defining necessary local checks (due to use of rql expression
+    in the schema), keys are variable names and values associated rql expression
+    for the associated variable with the given solution
+    """
+    if rqlst.where is not None:
+        for rel in rqlst.where.iget_nodes(Relation):
+            # XXX has_text may have specific perm ?
+            if rel.r_type in ('is', 'is_instance_of', 'has_text', 'identity', 'eid'):
+                continue
+            if not schema.rschema(rel.r_type).has_access(user, 'read'):
+                raise Unauthorized('read', rel.r_type)
+    localchecks = {}
+    # iterate on defined_vars and not on solutions to ignore column aliases
+    for varname in rqlst.defined_vars:
+        etype = solution[varname]
+        eschema = schema.eschema(etype)
+        if not eschema.has_access(user, 'read'):
+            erqlexprs = eschema.get_rqlexprs('read')
+            if not erqlexprs:
+                ex = Unauthorized('read', etype)
+                ex.var = varname
+                raise ex
+            #assert len(erqlexprs) == 1
+            localchecks[varname] = tuple(erqlexprs)
+    return localchecks
+                    
+def noinvariant_vars(restricted, select, nbtrees):
+    # a variable can actually be invariant if it has not been restricted for
+    # security reason or if security assertion hasn't modified the possible
+    # solutions for the query
+    if nbtrees != 1:
+        for vname in restricted:
+            try:
+                yield select.defined_vars[vname]
+            except KeyError:
+                # this is an alias
+                continue
+    else:
+        for vname in restricted:
+            try:
+                var = select.defined_vars[vname]
+            except KeyError:
+                # this is an alias
+                continue
+            if len(var.stinfo['possibletypes']) != 1:
+                yield var
+
+def _expand_selection(terms, selected, aliases, select, newselect):
+    for term in terms:
+        for vref in term.iget_nodes(VariableRef):
+            if not vref.name in selected:
+                select.append_selected(vref)
+                colalias = newselect.get_variable(vref.name, len(aliases))
+                aliases.append(VariableRef(colalias))
+                selected.add(vref.name)
+                
+# Plans #######################################################################
+
+class ExecutionPlan(object):
+    """the execution model of a rql query, composed of querier steps"""
+    
+    def __init__(self, querier, rqlst, args, session):
+        # original rql syntax tree
+        self.rqlst = rqlst
+        self.args = args or {}
+        # session executing the query
+        self.session = session
+        # quick reference to the system source
+        self.syssource = session.pool.source('system')
+        # execution steps
+        self.steps = []
+        # index of temporary tables created during execution
+        self.temp_tables = {}
+        # various resource accesors
+        self.querier = querier
+        self.schema = querier.schema
+        self.rqlhelper = querier._rqlhelper
+        self.sqlannotate = querier.sqlgen_annotate
+        
+    def annotate_rqlst(self):
+        if not self.rqlst.annotated:
+            self.rqlhelper.annotate(self.rqlst)
+            
+    def add_step(self, step):
+        """add a step to the plan"""
+        self.steps.append(step)
+
+    def clean(self):
+        """remove temporary tables"""
+        self.syssource.clean_temp_data(self.session, self.temp_tables)
+        
+    def sqlexec(self, sql, args=None):
+        return self.syssource.sqlexec(self.session, sql, args)
+            
+    def execute(self):
+        """execute a plan and return resulting rows"""
+        try:
+            for step in self.steps:
+                result = step.execute()
+            # the latest executed step contains the full query result
+            return result
+        finally:
+            self.clean()
+            
+    def init_temp_table(self, table, selected, sol):
+        """initialize sql schema and variable map for a temporary table which
+        will be used to store result for the given rqlst
+        """
+        try:
+            outputmap, sqlschema, _ = self.temp_tables[table]
+            update_varmap(outputmap, selected, table)
+        except KeyError:
+            sqlschema, outputmap = self.syssource.temp_table_def(selected, sol,
+                                                                 table)
+            self.temp_tables[table] = [outputmap, sqlschema, False]
+        return outputmap
+        
+    def create_temp_table(self, table):
+        """create a temporary table to store result for the given rqlst"""
+        if not self.temp_tables[table][-1]:
+            sqlschema = self.temp_tables[table][1]
+            self.syssource.create_temp_table(self.session, table, sqlschema)
+            self.temp_tables[table][-1] = True
+        
+    def preprocess(self, union, security=True):
+        """insert security when necessary then annotate rql st for sql generation
+        
+        return rqlst to actually execute
+        """
+        #if server.DEBUG:
+        #    print '------- preprocessing', union.as_string('utf8')
+        noinvariant = set()
+        if security and not self.session.is_super_session:
+            self._insert_security(union, noinvariant)
+        self.rqlhelper.simplify(union)
+        self.sqlannotate(union)
+        set_qdata(union, noinvariant)
+        if union.has_text_query:
+            self.cache_key = None
+
+    def _insert_security(self, union, noinvariant):
+        rh = self.rqlhelper
+        for select in union.children[:]:
+            for subquery in select.with_:
+                self._insert_security(subquery.query, noinvariant)
+            localchecks, restricted = self._check_permissions(select)
+            if any(localchecks):
+                rewrite = self.session.rql_rewriter.rewrite
+                nbtrees = len(localchecks)
+                myunion = union
+                # transform in subquery when len(localchecks)>1 and groups
+                if nbtrees > 1 and (select.orderby or select.groupby or
+                                    select.having or select.has_aggregat or
+                                    select.limit or select.offset):
+                    newselect = Select()
+                    # only select variables in subqueries
+                    origselection = select.selection
+                    select.select_only_variables()
+                    select.has_aggregat = False
+                    # create subquery first so correct node are used on copy
+                    # (eg ColumnAlias instead of Variable)
+                    aliases = [VariableRef(newselect.get_variable(vref.name, i))
+                               for i, vref in enumerate(select.selection)]
+                    selected = set(vref.name for vref in aliases)
+                    # now copy original selection and groups
+                    for term in origselection:
+                        newselect.append_selected(term.copy(newselect))
+                    if select.orderby:
+                        newselect.set_orderby([s.copy(newselect) for s in select.orderby])
+                        _expand_selection(select.orderby, selected, aliases, select, newselect)
+                        select.orderby = () # XXX dereference?
+                    if select.groupby:
+                        newselect.set_groupby([g.copy(newselect) for g in select.groupby])
+                        _expand_selection(select.groupby, selected, aliases, select, newselect)
+                        select.groupby = () # XXX dereference?
+                    if select.having:
+                        newselect.set_having([g.copy(newselect) for g in select.having])
+                        _expand_selection(select.having, selected, aliases, select, newselect)
+                        select.having = () # XXX dereference?
+                    if select.limit:
+                        newselect.limit = select.limit
+                        select.limit = None
+                    if select.offset:
+                        newselect.offset = select.offset
+                        select.offset = 0
+                    myunion = Union()
+                    newselect.set_with([SubQuery(aliases, myunion)], check=False)
+                    solutions = [sol.copy() for sol in select.solutions]
+                    cleanup_solutions(newselect, solutions)
+                    newselect.set_possible_types(solutions)
+                    # if some solutions doesn't need rewriting, insert original
+                    # select as first union subquery
+                    if () in localchecks:
+                        myunion.append(select)
+                    # we're done, replace original select by the new select with
+                    # subqueries (more added in the loop below)
+                    union.replace(select, newselect)
+                elif not () in localchecks:
+                    union.remove(select)
+                for lcheckdef, lchecksolutions in localchecks.iteritems():
+                    if not lcheckdef:
+                        continue
+                    myrqlst = select.copy(solutions=lchecksolutions)
+                    myunion.append(myrqlst)
+                    # in-place rewrite + annotation / simplification
+                    rewrite(myrqlst, lcheckdef, lchecksolutions, self.args)
+                    noinvariant.update(noinvariant_vars(restricted, myrqlst, nbtrees))
+                if () in localchecks:
+                    select.set_possible_types(localchecks[()])
+                    add_types_restriction(self.schema, select)
+                    noinvariant.update(noinvariant_vars(restricted, select, nbtrees))
+
+    def _check_permissions(self, rqlst):
+        """return a dict defining "local checks", e.g. RQLExpression defined in
+        the schema that should be inserted in the original query
+
+        solutions where a variable has a type which the user can't definitly read
+        are removed, else if the user may read it (eg if an rql expression is
+        defined for the "read" permission of the related type), the local checks
+        dict for the solution is updated
+        
+        return a dict with entries for each different local check necessary,
+        with associated solutions as value. A local check is defined by a list
+        of 2-uple, with variable name as first item and the necessary rql
+        expression as second item for each variable which has to be checked.
+        So solutions which don't require local checks will be associated to
+        the empty tuple key.
+
+        note: rqlst should not have been simplified at this point
+        """
+        assert not self.session.is_super_session
+        user = self.session.user
+        schema = self.schema
+        msgs = []
+        # dictionnary of variables restricted for security reason
+        localchecks = {}
+        if rqlst.where is not None:
+            varkwargs = var_kwargs(rqlst.where, self.args)
+            neweids = self.session.query_data('neweids', ())
+        else:
+            varkwargs = None
+        restricted_vars = set()
+        newsolutions = []
+        for solution in rqlst.solutions:
+            try:
+                localcheck = check_read_access(schema, user, rqlst, solution)
+            except Unauthorized, ex:
+                msg = 'remove %s from solutions since %s has no %s access to %s'
+                msg %= (solution, user.login, ex.args[0], ex.args[1])
+                msgs.append(msg)
+                LOGGER.info(msg)
+            else:
+                newsolutions.append(solution)
+                if varkwargs:
+                    # try to benefit of rqlexpr.check cache for entities which
+                    # are specified by eid in query'args
+                    for varname, eid in varkwargs.iteritems():
+                        try:
+                            rqlexprs = localcheck.pop(varname)
+                        except KeyError:
+                            continue
+                        if eid in neweids:
+                            continue
+                        for rqlexpr in rqlexprs:
+                            if rqlexpr.check(self.session, eid):
+                                break
+                        else:
+                            raise Unauthorized()
+                restricted_vars.update(localcheck)
+                localchecks.setdefault(tuple(localcheck.iteritems()), []).append(solution)
+        # raise Unautorized exception if the user can't access to any solution
+        if not newsolutions:
+            raise Unauthorized('\n'.join(msgs))
+        rqlst.set_possible_types(newsolutions)
+        return localchecks, restricted_vars
+
+    def finalize(self, select, solutions, insertedvars):
+        rqlst = Union()
+        rqlst.append(select)
+        for mainvarname, rschema, newvarname in insertedvars:
+            nvartype = str(rschema.objects(solutions[0][mainvarname])[0])
+            for sol in solutions:
+                sol[newvarname] = nvartype
+        select.clean_solutions(solutions)
+        self.rqlhelper.annotate(rqlst)
+        self.preprocess(rqlst, security=False)
+        return rqlst
+       
+class InsertPlan(ExecutionPlan):
+    """an execution model specific to the INSERT rql query
+    """
+    
+    def __init__(self, querier, rqlst, args, session):
+        ExecutionPlan.__init__(self, querier, rqlst, args, session)
+        # save originaly selected variable, we may modify this
+        # dictionary for substitution (query parameters)
+        self.selected = rqlst.selection
+        # list of new or updated entities definition (utils.Entity)
+        self.e_defs = [[]]
+        # list of new relation definition (3-uple (from_eid, r_type, to_eid)
+        self.r_defs = []
+        # indexes to track entity definitions bound to relation definitions
+        self._r_subj_index = {}
+        self._r_obj_index = {}
+        self._expanded_r_defs = {}
+
+    def relation_definitions(self, rqlst, to_build):
+        """add constant values to entity def, mark variables to be selected
+        """
+        to_select = {}
+        for relation in rqlst.main_relations:
+            lhs, rhs = relation.get_variable_parts()
+            rtype = relation.r_type
+            if rtype in ('eid', 'has_text', 'is', 'is_instance_of', 'identity'):
+                raise QueryError("can't assign to %s" % rtype)
+            try:
+                edef = to_build[str(lhs)]
+            except KeyError:
+                # lhs var is not to build, should be selected and added as an
+                # object relation
+                edef = to_build[str(rhs)]
+                to_select.setdefault(edef, []).append((rtype, lhs, 1))
+            else:
+                if isinstance(rhs, Constant) and not rhs.uid:
+                    # add constant values to entity def
+                    value = rhs.eval(self.args)
+                    eschema = edef.e_schema
+                    attrtype = eschema.subject_relation(rtype).objects(eschema)[0]
+                    if attrtype == 'Password' and isinstance(value, unicode): 
+                        value = value.encode('UTF8')
+                    edef[rtype] = value
+                elif to_build.has_key(str(rhs)):
+                    # create a relation between two newly created variables
+                    self.add_relation_def((edef, rtype, to_build[rhs.name]))
+                else:
+                    to_select.setdefault(edef, []).append( (rtype, rhs, 0) )
+        return to_select
+
+        
+    def add_entity_def(self, edef):
+        """add an entity definition to build"""
+        edef.querier_pending_relations = {}
+        self.e_defs[-1].append(edef)
+        
+    def add_relation_def(self, rdef):
+        """add an relation definition to build"""
+        self.r_defs.append(rdef)
+        if not isinstance(rdef[0], int):
+            self._r_subj_index.setdefault(rdef[0], []).append(rdef)
+        if not isinstance(rdef[2], int):
+            self._r_obj_index.setdefault(rdef[2], []).append(rdef)
+        
+    def substitute_entity_def(self, edef, edefs):
+        """substitute an incomplete entity definition by a list of complete
+        equivalents
+        
+        e.g. on queries such as ::
+          INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y
+          WHERE U login 'admin', U login N
+
+        X will be inserted as many times as U exists, and so the X travaille Y
+        relations as to be added as many time as X is inserted
+        """
+        if not edefs or not self.e_defs:
+            # no result, no entity will be created
+            self.e_defs = ()
+            return
+        # first remove the incomplete entity definition
+        colidx = self.e_defs[0].index(edef)
+        for i, row in enumerate(self.e_defs[:]):
+            self.e_defs[i][colidx] = edefs[0]
+            samplerow = self.e_defs[i]
+            for edef in edefs[1:]:
+                row = samplerow[:]
+                row[colidx] = edef
+                self.e_defs.append(row)
+        # now, see if this entity def is referenced as subject in some relation
+        # definition
+        if self._r_subj_index.has_key(edef):
+            for rdef in self._r_subj_index[edef]:
+                expanded = self._expanded(rdef)
+                result = []
+                for exp_rdef in expanded:
+                    for edef in edefs:
+                        result.append( (edef, exp_rdef[1], exp_rdef[2]) )
+                self._expanded_r_defs[rdef] = result
+        # and finally, see if this entity def is referenced as object in some
+        # relation definition
+        if self._r_obj_index.has_key(edef):
+            for rdef in self._r_obj_index[edef]:
+                expanded = self._expanded(rdef)
+                result = []
+                for exp_rdef in expanded:
+                    for edef in edefs:
+                        result.append( (exp_rdef[0], exp_rdef[1], edef) )
+                self._expanded_r_defs[rdef] = result
+        
+    def _expanded(self, rdef):
+        """return expanded value for the given relation definition"""
+        try:
+            return self._expanded_r_defs[rdef]
+        except KeyError:
+            self.r_defs.remove(rdef)
+            return [rdef]
+        
+    def relation_defs(self):
+        """return the list for relation definitions to insert"""
+        for rdefs in self._expanded_r_defs.values():
+            for rdef in rdefs:
+                yield rdef
+        for rdef in self.r_defs:
+            yield rdef
+            
+    def insert_entity_defs(self):
+        """return eids of inserted entities in a suitable form for the resulting
+        result set, e.g.:
+        
+        e.g. on queries such as ::
+          INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y
+          WHERE U login 'admin', U login N
+
+        if there is two entities matching U, the result set will look like
+        [(eidX1, eidY1), (eidX2, eidY2)]
+        """
+        session = self.session
+        repo = session.repo
+        results = []
+        for row in self.e_defs:
+            results.append([repo.glob_add_entity(session, edef)
+                            for edef in row])
+        return results
+        
+    def insert_relation_defs(self):
+        session = self.session
+        repo = session.repo
+        for subj, rtype, obj in self.relation_defs():
+            # if a string is given into args instead of an int, we get it here
+            if isinstance(subj, basestring):
+                subj = typed_eid(subj)
+            elif not isinstance(subj, (int, long)):
+                subj = subj.eid
+            if isinstance(obj, basestring):
+                obj = typed_eid(obj)
+            elif not isinstance(obj, (int, long)):
+                obj = obj.eid
+            if repo.schema.rschema(rtype).inlined:
+                entity = session.eid_rset(subj).get_entity(0, 0)
+                entity[rtype] = obj
+                repo.glob_update_entity(session, entity)
+            else:
+                repo.glob_add_relation(session, subj, rtype, obj)
+
+
+class QuerierHelper(object):
+    """helper class to execute rql queries, putting all things together"""
+    
+    def __init__(self, repo, schema):
+        # system info helper
+        self._repo = repo
+        # application schema
+        self.set_schema(schema)
+        
+    def set_schema(self, schema):
+        self.schema = schema
+        # rql parsing / analysing helper
+        self._rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid',
+                                                               'has_text': 'fti'})        
+        self._rql_cache = Cache(self._repo.config['rql-cache-size'])
+        self.cache_hit, self.cache_miss = 0, 0
+        # rql planner
+        # note: don't use repo.sources, may not be built yet, and also "admin"
+        #       isn't an actual source
+        if len([uri for uri in self._repo.config.sources() if uri != 'admin']) < 2:
+            from cubicweb.server.ssplanner import SSPlanner
+            self._planner = SSPlanner(schema, self._rqlhelper)
+        else:
+            from cubicweb.server.msplanner import MSPlanner            
+            self._planner = MSPlanner(schema, self._rqlhelper)
+        # sql generation annotator
+        self.sqlgen_annotate = SQLGenAnnotator(schema).annotate
+        
+    def parse(self, rql, annotate=False):
+        """return a rql syntax tree for the given rql"""
+        try:
+            return self._rqlhelper.parse(unicode(rql), annotate=annotate)
+        except UnicodeError:
+            raise RQLSyntaxError(rql)
+
+    def solutions(self, session, rqlst, args):
+        assert session is not None
+        def type_from_eid(eid, type_from_eid=self._repo.type_from_eid,
+                          session=session):
+            return type_from_eid(eid, session)
+        self._rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
+
+    def plan_factory(self, rqlst, args, session):
+        """create an execution plan for an INSERT RQL query"""
+        if rqlst.TYPE == 'insert':
+            return InsertPlan(self, rqlst, args, session)
+        return ExecutionPlan(self, rqlst, args, session)
+        
+    def execute(self, session, rql, args=None, eid_key=None, build_descr=True):
+        """execute a rql query, return resulting rows and their description in
+        a `ResultSet` object
+
+        * `rql` should be an unicode string or a plain ascii string
+        * `args` the optional parameters dictionary associated to the query
+        * `build_descr` is a boolean flag indicating if the description should
+          be built on select queries (if false, the description will be en empty
+          list)
+        * `eid_key` must be both a key in args and a substitution in the rql
+          query. It should be used to enhance cacheability of rql queries.
+          It may be a tuple for keys in args.
+          eid_key must be providen in case where a eid substitution is providen
+          and resolve some ambiguity in the possible solutions infered for each
+          variable in the query.
+
+        on INSERT queries, there will be on row with the eid of each inserted
+        entity
+        
+        result for DELETE and SET queries is undefined yet
+
+        to maximize the rql parsing/analyzing cache performance, you should
+        always use substitute arguments in queries (eg avoid query such as
+        'Any X WHERE X eid 123'!)
+        """
+        if server.DEBUG:
+            print '*'*80
+            print rql
+        # parse the query and binds variables
+        if eid_key is not None:
+            if not isinstance(eid_key, (tuple, list)):
+                eid_key = (eid_key,)
+            cachekey = [rql]
+            for key in eid_key:
+                try:
+                    etype = self._repo.type_from_eid(args[key], session)
+                except KeyError:
+                    raise QueryError('bad cache key %s (no value)' % key)
+                except TypeError:
+                    raise QueryError('bad cache key %s (value: %r)' % (key, args[key]))
+                except UnknownEid:
+                    # we want queries such as "Any X WHERE X eid 9999"
+                    # return an empty result instead of raising UnknownEid
+                    return empty_rset(session, rql, args)
+                cachekey.append(etype)
+            cachekey = tuple(cachekey)
+        else:
+            cachekey = rql
+        try:
+            rqlst = self._rql_cache[cachekey]
+            self.cache_hit += 1
+        except KeyError:
+            self.cache_miss += 1
+            rqlst = self.parse(rql)
+            try:
+                self.solutions(session, rqlst, args)
+            except UnknownEid:
+                # we want queries such as "Any X WHERE X eid 9999"
+                # return an empty result instead of raising UnknownEid
+                return empty_rset(session, rql, args, rqlst)
+            self._rql_cache[cachekey] = rqlst
+        orig_rqlst = rqlst
+        if not rqlst.TYPE == 'select':
+            if not session.is_super_session:
+                check_no_password_selected(rqlst)
+            # write query, ensure session's mode is 'write' so connections
+            # won't be released until commit/rollback
+            session.mode = 'write'
+            cachekey = None
+        else:
+            if not session.is_super_session:
+                for select in rqlst.children:
+                    check_no_password_selected(select)
+            # on select query, always copy the cached rqlst so we don't have to
+            # bother modifying it. This is not necessary on write queries since
+            # a new syntax tree is built from them.
+            rqlst = rqlst.copy()
+            self._rqlhelper.annotate(rqlst)
+        # make an execution plan
+        plan = self.plan_factory(rqlst, args, session)
+        plan.cache_key = cachekey
+        self._planner.build_plan(plan)
+        # execute the plan
+        try:
+            results = plan.execute()
+        except Unauthorized:
+            # XXX this could be done in security's after_add_relation hooks
+            # since it's actually realy only needed there (other relations
+            # security is done *before* actual changes, and add/update entity
+            # security is done after changes but in an operation, and exception
+            # generated in operation's events  properly generate a rollback on
+            # the session). Even though, this is done here for a better
+            # consistency: getting an Unauthorized exception means the
+            # transaction has been rollbacked
+            session.rollback()
+            raise
+        # build a description for the results if necessary
+        descr = ()
+        if build_descr:
+            if rqlst.TYPE == 'select':
+                # sample selection
+                descr = session.build_description(orig_rqlst, args, results)
+            elif rqlst.TYPE == 'insert':
+                # on insert plan, some entities may have been auto-casted,
+                # so compute description manually even if there is only
+                # one solution
+                basedescr = [None] * len(plan.selected)
+                todetermine = zip(xrange(len(plan.selected)), repeat(False))
+                descr = session._build_descr(results, basedescr, todetermine)
+            # FIXME: get number of affected entities / relations on non
+            # selection queries ?
+        # return a result set object
+        return ResultSet(results, rql, args, descr, eid_key, orig_rqlst)
+
+from logging import getLogger
+from cubicweb import set_log_methods
+LOGGER = getLogger('cubicweb.querier')
+set_log_methods(QuerierHelper, LOGGER)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/repository.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1114 @@
+"""Defines the central class for the CubicWeb RQL server: the repository.
+
+The repository is an abstraction allowing execution of rql queries against
+data sources. Most of the work is actually done in helper classes. The
+repository mainly:
+
+* brings these classes all together to provide a single access
+  point to a cubicweb application.
+* handles session management
+* provides method for pyro registration, to call if pyro is enabled
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import Queue
+from os.path import join, exists
+from time import time, localtime, strftime
+
+from mx.DateTime import now
+
+from logilab.common.decorators import cached
+
+from yams import BadSchemaDefinition
+from rql import RQLSyntaxError
+
+from cubicweb import (CW_SOFTWARE_ROOT, UnknownEid, AuthenticationError,
+                      ETypeNotSupportedBySources, RTypeNotSupportedBySources,
+                      BadConnectionId, Unauthorized, ValidationError,
+                      ExecutionError, typed_eid,
+                      CW_MIGRATION_MAP)
+from cubicweb.cwvreg import CubicWebRegistry
+from cubicweb.schema import CubicWebSchema
+
+from cubicweb.server.utils import RepoThread, LoopTask
+from cubicweb.server.pool import ConnectionsPool, LateOperation, SingleLastOperation
+from cubicweb.server.session import Session, InternalSession
+from cubicweb.server.querier import QuerierHelper
+from cubicweb.server.sources import get_source
+from cubicweb.server.hooksmanager import HooksManager
+from cubicweb.server.hookhelper import rproperty
+
+
+class CleanupEidTypeCacheOp(SingleLastOperation):
+    """on rollback of a insert query or commit of delete query, we have to
+    clear repository's cache from no more valid entries
+
+    NOTE: querier's rqlst/solutions cache may have been polluted too with
+    queries such as Any X WHERE X eid 32 if 32 has been rollbacked however
+    generated queries are unpredictable and analysing all the cache probably
+    too expensive. Notice that there is no pb when using args to specify eids
+    instead of giving them into the rql string.
+    """
+
+    def commit_event(self):
+        """the observed connections pool has been rollbacked,
+        remove inserted eid from repository type/source cache
+        """
+        self.repo.clear_caches(self.session.query_data('pendingeids', ()))
+        
+    def rollback_event(self):
+        """the observed connections pool has been rollbacked,
+        remove inserted eid from repository type/source cache
+        """
+        self.repo.clear_caches(self.session.query_data('neweids', ()))
+
+
+class FTIndexEntityOp(LateOperation):
+    """operation to delay entity full text indexation to commit
+
+    since fti indexing may trigger discovery of other entities, it should be
+    triggered on precommit, not commit, and this should be done after other
+    precommit operation which may add relations to the entity
+    """
+
+    def precommit_event(self):
+        session = self.session
+        entity = self.entity
+        if entity.eid in session.query_data('pendingeids', ()):
+            return # entity added and deleted in the same transaction
+        session.repo.system_source.fti_unindex_entity(session, entity.eid)
+        for container in entity.fti_containers():
+            session.repo.index_entity(session, container)
+            
+    def commit_event(self):
+        pass
+
+def del_existing_rel_if_needed(session, eidfrom, rtype, eidto):
+    """delete existing relation when adding a new one if card is 1 or ?
+
+    have to be done once the new relation has been inserted to avoid having
+    an entity without a relation for some time
+
+    this kind of behaviour has to be done in the repository so we don't have
+    hooks order hazardness
+    """
+    # skip delete queries (only?) if session is an internal session. This is
+    # hooks responsability to ensure they do not violate relation's cardinality
+    if session.is_super_session:
+        return
+    card = rproperty(session, rtype, eidfrom, eidto, 'cardinality')
+    # one may be tented to check for neweids but this may cause more than one
+    # relation even with '1?'  cardinality if thoses relations are added in the
+    # same transaction where the entity is being created. This never occurs from
+    # the web interface but may occurs during test or dbapi connection (though
+    # not expected for this).  So: don't do it, we pretend to ensure repository
+    # consistency.
+    # XXX should probably not use unsafe_execute!
+    if card[0] in '1?':
+        rschema = session.repo.schema.rschema(rtype)
+        if not rschema.inlined:
+            session.unsafe_execute('DELETE X %s Y WHERE X eid %%(x)s, NOT Y eid %%(y)s' % rtype,
+                                   {'x': eidfrom, 'y': eidto}, 'x')
+    if card[1] in '1?':
+        session.unsafe_execute('DELETE X %s Y WHERE NOT X eid %%(x)s, Y eid %%(y)s' % rtype,
+                               {'x': eidfrom, 'y': eidto}, 'y')
+
+    
+class Repository(object):
+    """a repository provides access to a set of persistent storages for
+    entities and relations
+
+    XXX protect pyro access
+    """
+    
+    def __init__(self, config, vreg=None, debug=False):
+        self.config = config
+        if vreg is None:
+            vreg = CubicWebRegistry(config, debug)
+        self.vreg = vreg
+        self.pyro_registered = False
+        self.info('starting repository from %s', self.config.apphome)
+        # dictionary of opened sessions
+        self._sessions = {}
+        # list of functions to be called at regular interval
+        self._looping_tasks = []
+        # list of running threads
+        self._running_threads = []
+        # initial schema, should be build or replaced latter
+        self.schema = CubicWebSchema(config.appid)
+        # querier helper, need to be created after sources initialization
+        self.querier = QuerierHelper(self, self.schema)
+        # sources
+        self.sources = []
+        self.sources_by_uri = {}
+        # FIXME: store additional sources info in the system database ?
+        # FIXME: sources should be ordered (add_entity priority)
+        for uri, source_config in config.sources().items():
+            if uri == 'admin':
+                # not an actual source
+                continue 
+            source = self.get_source(uri, source_config)
+            self.sources_by_uri[uri] = source
+            self.sources.append(source)
+        self.system_source = self.sources_by_uri['system']
+        # ensure system source is the first one
+        self.sources.remove(self.system_source)
+        self.sources.insert(0, self.system_source)
+        # cache eid -> type / source
+        self._type_source_cache = {}
+        # cache (extid, source uri) -> eid
+        self._extid_cache = {}
+        # create the hooks manager
+        self.hm = HooksManager(self.schema)
+        # open some connections pools
+        self._available_pools = Queue.Queue()
+        self._available_pools.put_nowait(ConnectionsPool(self.sources))
+        if config.read_application_schema:
+            # normal start: load the application schema from the database
+            self.fill_schema()
+        elif config.bootstrap_schema:
+            # usually during repository creation
+            self.warning("set fs application'schema as bootstrap schema")
+            config.bootstrap_cubes()
+            self.set_bootstrap_schema(self.config.load_schema())
+            # need to load the Any and EUser entity types
+            self.vreg.schema = self.schema
+            etdirectory = join(CW_SOFTWARE_ROOT, 'entities')
+            self.vreg.load_file(etdirectory, '__init__.py')
+            self.vreg.load_file(etdirectory, 'authobjs.py')
+        else:
+            # test start: use the file system schema (quicker)
+            self.warning("set fs application'schema")
+            config.bootstrap_cubes()
+            self.set_schema(self.config.load_schema())
+        if not config.creating:
+            if 'EProperty' in self.schema:
+                self.vreg.init_properties(self.properties())
+            # call source's init method to complete their initialisation if
+            # needed (for instance looking for persistent configuration using an
+            # internal session, which is not possible until pools have been
+            # initialized)
+            for source in self.sources:
+                source.init()
+            # call application level initialisation hooks
+            self.hm.call_hooks('server_startup', repo=self)
+            # register a task to cleanup expired session
+            self.looping_task(self.config['session-time']/3.,
+                              self.clean_sessions)
+        else:
+            # call init_creating so for instance native source can configurate
+            # tsearch according to postgres version
+            for source in self.sources:
+                source.init_creating()
+        # close initialization pool and reopen fresh ones for proper
+        # initialization now that we know cubes
+        self._get_pool().close(True) 
+        for i in xrange(config['connections-pool-size']):
+            self._available_pools.put_nowait(ConnectionsPool(self.sources))
+     
+    # internals ###############################################################
+
+    def get_source(self, uri, source_config):
+        source_config['uri'] = uri
+        return get_source(source_config, self.schema, self)
+        
+    def set_schema(self, schema, resetvreg=True):
+        schema.rebuild_infered_relations()
+        self.info('set schema %s %#x', schema.name, id(schema))
+        self.debug(', '.join(sorted(str(e) for e in schema.entities())))
+        self.querier.set_schema(schema)
+        for source in self.sources:
+            source.set_schema(schema)
+        self.schema = schema
+        if resetvreg:
+            # full reload of all appobjects
+            self.vreg.reset()
+            self.vreg.set_schema(schema)
+        self.hm.set_schema(schema)
+        self.hm.register_system_hooks(self.config)
+        # application specific hooks
+        if self.config.application_hooks:
+            self.info('loading application hooks')
+            self.hm.register_hooks(self.config.load_hooks(self.vreg))
+
+    def fill_schema(self):
+        """lod schema from the repository"""
+        from cubicweb.server.schemaserial import deserialize_schema
+        self.info('loading schema from the repository')
+        appschema = CubicWebSchema(self.config.appid)
+        self.set_bootstrap_schema(self.config.load_bootstrap_schema())
+        self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema))
+        session = self.internal_session()
+        try:
+            try:
+                deserialize_schema(appschema, session)
+            except BadSchemaDefinition:
+                raise
+            except Exception, ex:
+                raise Exception('Is the database initialised ? (cause: %s)' % 
+                                (ex.args and ex.args[0].strip() or 'unknown')), \
+                                None, sys.exc_info()[-1]
+            self.info('set the actual schema')
+            # XXX have to do this since EProperty isn't in the bootstrap schema
+            #     it'll be redone in set_schema
+            self.set_bootstrap_schema(appschema)
+            # 2.49 migration
+            if exists(join(self.config.apphome, 'vc.conf')):
+                session.set_pool()
+                if not 'template' in file(join(self.config.apphome, 'vc.conf')).read():
+                    # remaning from cubicweb < 2.38...
+                    session.execute('DELETE EProperty X WHERE X pkey "system.version.template"')
+                    session.commit()
+        finally:
+            session.close()
+        self.config.init_cubes(self.get_cubes())
+        self.set_schema(appschema)
+        
+    def set_bootstrap_schema(self, schema):
+        """disable hooks when setting a bootstrap schema, but restore
+        the configuration for the next time
+        """
+        config = self.config
+        # XXX refactor
+        config.core_hooks = False
+        config.usergroup_hooks = False
+        config.schema_hooks = False
+        config.notification_hooks = False
+        config.application_hooks = False
+        self.set_schema(schema, resetvreg=False)
+        config.core_hooks = True
+        config.usergroup_hooks = True
+        config.schema_hooks = True
+        config.notification_hooks = True
+        config.application_hooks = True
+            
+    def start_looping_tasks(self):
+        assert isinstance(self._looping_tasks, list), 'already started'
+        for i, (interval, func) in enumerate(self._looping_tasks):
+            self._looping_tasks[i] = task = LoopTask(interval, func)
+            self.info('starting task %s with interval %.2fs', task.name,
+                      interval)
+            task.start()
+        # ensure no tasks will be further added
+        self._looping_tasks = tuple(self._looping_tasks)
+
+    def looping_task(self, interval, func):
+        """register a function to be called every `interval` seconds.
+        
+        looping tasks can only be registered during repository initialization,
+        once done this method will fail.
+        """
+        try:
+            self._looping_tasks.append( (interval, func) )
+        except AttributeError:
+            raise RuntimeError("can't add looping task once the repository is started")
+
+    def threaded_task(self, func):
+        """start function in a separated thread"""
+        t = RepoThread(func, self._running_threads)
+        t.start()
+        
+    #@locked
+    def _get_pool(self):
+        try:
+            return self._available_pools.get(True, timeout=5)
+        except Queue.Empty:
+            raise Exception('no pool available after 5 secs, probably either a '
+                            'bug in code (to many uncommited/rollbacked '
+                            'connections) or to much load on the server (in '
+                            'which case you can try to set a bigger '
+                            'connections pools size)')
+        
+    def _free_pool(self, pool):
+        pool.rollback()
+        self._available_pools.put_nowait(pool)
+
+    def pinfo(self):
+        # XXX: session.pool is accessed from a local storage, would be interesting
+        #      to see if there is a pool set in any thread specific data)
+        import threading
+        return '%s: %s (%s)' % (self._available_pools.qsize(),
+                                ','.join(session.user.login for session in self._sessions.values()
+                                         if session.pool),
+                                threading.currentThread())
+    def shutdown(self):
+        """called on server stop event to properly close opened sessions and
+        connections
+        """
+        if isinstance(self._looping_tasks, tuple): # if tasks have been started
+            for looptask in self._looping_tasks:
+                self.info('canceling task %s...', looptask.name)
+                looptask.cancel()
+                looptask.join()
+                self.info('task %s finished', looptask.name)
+        for thread in self._running_threads:
+            self.info('waiting thread %s...', thread.name)
+            thread.join()
+            self.info('thread %s finished', thread.name)
+        self.hm.call_hooks('server_shutdown', repo=self)
+        self.close_sessions()
+        while not self._available_pools.empty():
+            pool = self._available_pools.get_nowait()
+            try:
+                pool.close(True)
+            except:
+                self.exception('error while closing %s' % pool)
+                continue
+        if self.pyro_registered:
+            pyro_unregister(self.config)
+        hits, misses = self.querier.cache_hit, self.querier.cache_miss
+        try:
+            self.info('rqlt st cache hit/miss: %s/%s (%s%% hits)', hits, misses,
+                      (hits * 100) / (hits + misses))
+            hits, misses = self.system_source.cache_hit, self.system_source.cache_miss
+            self.info('sql cache hit/miss: %s/%s (%s%% hits)', hits, misses,
+                      (hits * 100) / (hits + misses))
+            nocache  = self.system_source.no_cache
+            self.info('sql cache usage: %s/%s (%s%%)', hits+ misses, nocache,
+                      ((hits + misses) * 100) / (hits + misses + nocache))
+        except ZeroDivisionError:
+            pass
+        
+    def authenticate_user(self, session, login, password):
+        """validate login / password, raise AuthenticationError on failure
+        return associated EUser instance on success
+        """
+        for source in self.sources:
+            if source.support_entity('EUser'):
+                try:
+                    eid = source.authenticate(session, login, password)
+                    break
+                except AuthenticationError:
+                    continue
+        else:
+            raise AuthenticationError('authentication failed with all sources')
+        euser = self._build_user(session, eid)
+        if self.config.consider_user_state and \
+               not euser.state in euser.AUTHENTICABLE_STATES:
+            raise AuthenticationError('user is not in authenticable state')
+        return euser
+
+    def _build_user(self, session, eid):
+        cls = self.vreg.etype_class('EUser')
+        rql = cls.fetch_rql(session.user, ['X eid %(x)s'])
+        rset = session.execute(rql, {'x': eid}, 'x')
+        assert len(rset) == 1, rset
+        euser = rset.get_entity(0, 0)
+        # prefetch / cache euser's groups and properties. This is especially
+        # useful for internal sessions to avoid security insertions
+        euser.groups
+        euser.properties
+        return euser
+        
+    # public (dbapi) interface ################################################
+            
+    def get_schema(self):
+        """return the application schema. This is a public method, not
+        requiring a session id
+        """
+        try:
+            # necessary to support pickling used by pyro
+            self.schema.__hashmode__ = 'pickle'
+            return self.schema
+        finally:
+            self.schema.__hashmode__ = None
+
+    def get_cubes(self):
+        """return the list of cubes used by this application. This is a
+        public method, not requiring a session id.
+        """
+        versions = self.get_versions(not self.config.creating)
+        cubes = list(versions)
+        cubes.remove('cubicweb')
+        return cubes
+
+    @cached
+    def get_versions(self, checkversions=False):
+        """return the a dictionary containing cubes used by this application
+        as key with their version as value, including cubicweb version. This is a
+        public method, not requiring a session id.
+        """
+        from logilab.common.changelog import Version
+        vcconf = {}
+        session = self.internal_session()
+        try:
+            for pk, version in session.execute(
+                'Any K,V WHERE P is EProperty, P value V, P pkey K, '
+                'P pkey ~="system.version.%"', build_descr=False):
+                cube = pk.split('.')[-1]
+                # XXX cubicweb migration
+                if cube in CW_MIGRATION_MAP:
+                    cube = CW_MIGRATION_MAP[cube]
+                version = Version(version)
+                vcconf[cube] = version
+                if checkversions:
+                    if cube != 'cubicweb':
+                        fsversion = self.config.cube_version(cube)
+                    else:
+                        fsversion = self.config.cubicweb_version()
+                    if version < fsversion:
+                        msg = ('application has %s version %s but %s '
+                               'is installed. Run "cubicweb-ctl upgrade".')
+                        raise ExecutionError(msg % (cube, version, fsversion))
+        finally:
+            session.close()
+        return vcconf
+    
+    @cached
+    def source_defs(self):
+        sources = self.config.sources().copy()
+        # remove manager information
+        sources.pop('admin', None)
+        # remove sensitive information
+        for uri, sourcedef in sources.iteritems():
+            sourcedef = sourcedef.copy()
+            self.sources_by_uri[uri].remove_sensitive_information(sourcedef)
+            sources[uri] = sourcedef
+        return sources
+
+    def properties(self):
+        """return a result set containing system wide properties"""
+        session = self.internal_session()
+        try:
+            return session.execute('Any K,V WHERE P is EProperty,'
+                                   'P pkey K, P value V, NOT P for_user U',
+                                   build_descr=False)
+        finally:
+            session.close()
+
+    def register_user(self, login, password, **kwargs):
+        """check a user with the given login exists, if not create it with the
+        given password. This method is designed to be used for anonymous
+        registration on public web site.
+        """
+        session = self.internal_session()
+        try:
+            if session.execute('EUser X WHERE X login %(login)s', {'login': login}):
+                return
+            # we have to create the user
+            user = self.vreg.etype_class('EUser')(session, None)
+            if isinstance(password, unicode):
+                # password should *always* be utf8 encoded
+                password = password.encode('UTF8')
+            kwargs['login'] = login
+            kwargs['upassword'] = password
+            user.update(kwargs)
+            self.glob_add_entity(session, user)
+            session.execute('SET X in_group G WHERE X eid %(x)s, G name "users"',
+                            {'x': user.eid})
+            session.commit()
+        finally:
+            session.close()
+        
+    def connect(self, login, password, cnxprops=None):
+        """open a connection for a given user
+
+        base_url may be needed to send mails
+        cnxtype indicate if this is a pyro connection or a in-memory connection
+        
+        raise `AuthenticationError` if the authentication failed
+        raise `ConnectionError` if we can't open a connection
+        """
+        # use an internal connection
+        session = self.internal_session()
+        # try to get a user object
+        try:
+            user = self.authenticate_user(session, login, password)
+        finally:
+            session.close()
+        session = Session(user, self, cnxprops)
+        user.req = user.rset.req = session
+        user.clear_related_cache()
+        self._sessions[session.id] = session
+        self.info('opened %s', session)
+        self.hm.call_hooks('session_open', session=session)
+        # commit session at this point in case write operation has been done
+        # during `session_open` hooks
+        session.commit()
+        return session.id
+
+    def execute(self, sessionid, rqlstring, args=None, eid_key=None, build_descr=True):
+        """execute a RQL query
+
+        * rqlstring should be an unicode string or a plain ascii string
+        * args the optional parameters used in the query
+        * build_descr is a flag indicating if the description should be
+          built on select queries
+        """
+        session = self._get_session(sessionid, setpool=True)
+        try:
+            try:
+                return self.querier.execute(session, rqlstring, args, eid_key,
+                                            build_descr)
+            except (Unauthorized, RQLSyntaxError):
+                raise
+            except ValidationError, ex:
+                # need ValidationError normalization here so error may pass
+                # through pyro
+                if hasattr(ex.entity, 'eid'):
+                    ex.entity = ex.entity.eid # error raised by yams
+                    args = list(ex.args)
+                    args[0] = ex.entity
+                    ex.args = tuple(args)
+                raise
+            except:
+                # FIXME: check error to catch internal errors
+                self.exception('unexpected error')
+                raise
+        finally:
+            session.reset_pool()
+    
+    def describe(self, sessionid, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        session = self._get_session(sessionid, setpool=True)
+        try:
+            return self.type_and_source_from_eid(eid, session)
+        finally:
+            session.reset_pool()
+
+    def check_session(self, sessionid):
+        """raise `BadSessionId` if the connection is no more valid"""
+        self._get_session(sessionid, setpool=False)
+
+    def get_shared_data(self, sessionid, key, default=None, pop=False):
+        """return the session's data dictionary"""
+        session = self._get_session(sessionid, setpool=False)
+        return session.get_shared_data(key, default, pop)
+
+    def set_shared_data(self, sessionid, key, value, querydata=False):
+        """set value associated to `key` in shared data
+
+        if `querydata` is true, the value will be added to the repository
+        session's query data which are cleared on commit/rollback of the current
+        transaction, and won't be available through the connexion, only on the
+        repository side.
+        """
+        session = self._get_session(sessionid, setpool=False)
+        session.set_shared_data(key, value, querydata)
+
+    def commit(self, sessionid):
+        """commit transaction for the session with the given id"""
+        self.debug('begin commit for session %s', sessionid)
+        try:
+            self._get_session(sessionid, setpool=True).commit()
+        except (ValidationError, Unauthorized): 
+            raise
+        except:
+            self.exception('unexpected error')
+            raise
+        
+    def rollback(self, sessionid):
+        """commit transaction for the session with the given id"""
+        self.debug('begin rollback for session %s', sessionid)
+        try:
+            self._get_session(sessionid, setpool=True).rollback()
+        except:
+            self.exception('unexpected error')
+            raise
+
+    def close(self, sessionid):
+        """close the session with the given id"""
+        session = self._get_session(sessionid, setpool=True)
+        # operation uncommited before close are rollbacked before hook is called
+        session.rollback()
+        self.hm.call_hooks('session_close', session=session)
+        # commit session at this point in case write operation has been done
+        # during `session_close` hooks
+        session.commit()
+        session.close()
+        del self._sessions[sessionid]
+        self.info('closed session %s for user %s', sessionid, session.user.login)
+    
+    def user_info(self, sessionid, props=None):
+        """this method should be used by client to:
+        * check session id validity
+        * update user information on each user's request (i.e. groups and
+          custom properties)
+        """
+        session = self._get_session(sessionid, setpool=False)
+        if props:
+            # update session properties
+            for prop, value in props.items():
+                session.change_property(prop, value)
+        user = session.user
+        return user.eid, user.login, user.groups, user.properties
+            
+    # public (inter-repository) interface #####################################
+    
+    def entities_modified_since(self, etypes, mtime):
+        """function designed to be called from an external repository which
+        is using this one as a rql source for synchronization, and return a
+        3-uple containing :
+        * the local date
+        * list of (etype, eid) of entities of the given types which have been
+          modified since the given timestamp (actually entities whose full text
+          index content has changed)
+        * list of (etype, eid) of entities of the given types which have been
+          deleted since the given timestamp
+        """
+        session = self.internal_session()
+        updatetime = now()
+        try:
+            modentities, delentities = self.system_source.modified_entities(
+                session, etypes, mtime)
+            return updatetime, modentities, delentities
+        finally:
+            session.close()
+
+    # session handling ########################################################
+        
+    def close_sessions(self):
+        """close every opened sessions"""
+        for sessionid in self._sessions.keys():
+            try:
+                self.close(sessionid)
+            except:
+                self.exception('error while closing session %s' % sessionid)
+
+    def clean_sessions(self):
+        """close sessions not used since an amount of time specified in the
+        configuration
+        """
+        mintime = time() - self.config['session-time']
+        self.debug('cleaning session unused since %s',
+                   strftime('%T', localtime(mintime)))
+        nbclosed = 0
+        for session in self._sessions.values():
+            if session.timestamp < mintime:
+                self.close(session.id)
+                nbclosed += 1
+        return nbclosed
+    
+    def internal_session(self, cnxprops=None):
+        """return a dbapi like connection/cursor using internal user which
+        have every rights on the repository. You'll *have to* commit/rollback
+        or close (rollback implicitly) the session once the job's done, else
+        you'll leak connections pool up to the time where no more pool is
+        available, causing irremediable freeze...
+        """
+        session = InternalSession(self, cnxprops)
+        session.set_pool()
+        return session
+            
+    def _get_session(self, sessionid, setpool=False):
+        """return the user associated to the given session identifier"""
+        try:
+            session = self._sessions[sessionid]
+        except KeyError:
+            raise BadConnectionId('No such session %s' % sessionid)
+        if setpool:
+            session.set_pool()
+        return session
+
+    # data sources handling ###################################################
+    # * correspondance between eid and (type, source)
+    # * correspondance between eid and local id (i.e. specific to a given source)
+    # * searchable text indexes
+    
+    def type_and_source_from_eid(self, eid, session=None):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        try:
+            eid = typed_eid(eid)
+        except ValueError:
+            raise UnknownEid(eid)
+        try:
+            return self._type_source_cache[eid]
+        except KeyError:
+            if session is None:
+                session = self.internal_session()
+                reset_pool = True
+            else:
+                reset_pool = False
+            try:
+                etype, uri, extid = self.system_source.eid_type_source(session,
+                                                                       eid)
+            finally:
+                if reset_pool:
+                    session.reset_pool()
+        self._type_source_cache[eid] = (etype, uri, extid)
+        if uri != 'system':
+            self._extid_cache[(extid, uri)] = eid
+        return etype, uri, extid
+
+    def clear_caches(self, eids):
+        etcache = self._type_source_cache
+        extidcache = self._extid_cache
+        rqlcache = self.querier._rql_cache
+        for eid in eids:
+            try:
+                etype, uri, extid = etcache.pop(typed_eid(eid)) # may be a string in some cases
+                rqlcache.pop('%s X WHERE X eid %s' % (etype, eid), None)
+                extidcache.pop((extid, uri), None)
+            except KeyError:
+                etype = None
+            rqlcache.pop('Any X WHERE X eid %s' % eid, None)
+            for source in self.sources:
+                source.clear_eid_cache(eid, etype)
+                
+    def type_from_eid(self, eid, session=None):
+        """return the type of the entity with id <eid>"""
+        return self.type_and_source_from_eid(eid, session)[0]
+    
+    def source_from_eid(self, eid, session=None):
+        """return the source for the given entity's eid"""
+        return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]]
+        
+    def eid2extid(self, source, eid, session=None):
+        """get local id from an eid"""
+        etype, uri, extid = self.type_and_source_from_eid(eid, session)
+        if source.uri != uri:
+            # eid not from the given source
+            raise UnknownEid(eid)
+        return extid
+
+    def extid2eid(self, source, lid, etype, session=None, insert=True):
+        """get eid from a local id. An eid is attributed if no record is found"""
+        cachekey = (str(lid), source.uri)
+        try:
+            return self._extid_cache[cachekey]
+        except KeyError:
+            pass
+        reset_pool = False
+        if session is None:
+            session = self.internal_session()
+            reset_pool = True
+        eid = self.system_source.extid2eid(session, source, lid)
+        if eid is not None:
+            self._extid_cache[cachekey] = eid
+            self._type_source_cache[eid] = (etype, source.uri, lid)
+            if reset_pool:
+                session.reset_pool()
+            return eid
+        if not insert:
+            return
+        # no link between lid and eid, create one using an internal session
+        # since the current session user may not have required permissions to
+        # do necessary stuff and we don't want to commit user session.
+        #
+        # More other, even if session is already an internal session but is
+        # processing a commit, we have to use another one
+        if not session.is_internal_session:
+            session = self.internal_session()
+            reset_pool = True
+        try:
+            eid = self.system_source.create_eid(session)
+            self._extid_cache[cachekey] = eid
+            self._type_source_cache[eid] = (etype, source.uri, lid)
+            entity = source.before_entity_insertion(session, lid, etype, eid)
+            if source.should_call_hooks:
+                self.hm.call_hooks('before_add_entity', etype, session, entity)
+            self.add_info(session, entity, source, lid)
+            source.after_entity_insertion(session, lid, entity)
+            if source.should_call_hooks:
+                self.hm.call_hooks('after_add_entity', etype, session, entity)
+            else:
+                # minimal meta-data
+                session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
+                                {'x': entity.eid, 'name': entity.id}, 'x')
+            session.commit(reset_pool)
+            return eid
+        except:
+            session.rollback(reset_pool)
+            raise
+        
+    def add_info(self, session, entity, source, extid=None, complete=True):
+        """add type and source info for an eid into the system table,
+        and index the entity with the full text index
+        """
+        # begin by inserting eid/type/source/extid into the entities table
+        self.system_source.add_info(session, entity, source, extid)
+        if complete:
+            entity.complete(entity.e_schema.indexable_attributes())
+        session.add_query_data('neweids', entity.eid)
+        # now we can update the full text index
+        FTIndexEntityOp(session, entity=entity)
+        CleanupEidTypeCacheOp(session)
+        
+    def delete_info(self, session, eid):
+        self._prepare_delete_info(session, eid)
+        self._delete_info(session, eid)
+        
+    def _prepare_delete_info(self, session, eid):
+        """prepare the repository for deletion of an entity:
+        * update the fti
+        * mark eid as being deleted in session info
+        * setup cache update operation
+        """
+        self.system_source.fti_unindex_entity(session, eid)
+        pending = session.query_data('pendingeids', set(), setdefault=True)
+        pending.add(eid)
+        CleanupEidTypeCacheOp(session)
+        
+    def _delete_info(self, session, eid):
+        """delete system information on deletion of an entity:
+        * delete all relations on this entity
+        * transfer record from the entities table to the deleted_entities table
+        """
+        etype, uri, extid = self.type_and_source_from_eid(eid, session)
+        self._clear_eid_relations(session, etype, eid)
+        self.system_source.delete_info(session, eid, etype, uri, extid)
+        
+    def _clear_eid_relations(self, session, etype, eid):
+        """when a entity is deleted, build and execute rql query to delete all
+        its relations
+        """
+        rql = []
+        eschema = self.schema.eschema(etype)
+        for rschema, targetschemas, x in eschema.relation_definitions():
+            rtype = rschema.type
+            if rtype == 'identity':
+                continue
+            var = '%s%s' % (rtype.upper(), x.upper())
+            if x == 'subject':
+                # don't skip inlined relation so they are regularly
+                # deleted and so hooks are correctly called
+                rql.append('X %s %s' % (rtype, var))
+            else:
+                rql.append('%s %s X' % (var, rtype))
+        rql = 'DELETE %s WHERE X eid %%(x)s' % ','.join(rql)
+        # unsafe_execute since we suppose that if user can delete the entity,
+        # he can delete all its relations without security checking
+        session.unsafe_execute(rql, {'x': eid}, 'x', build_descr=False)
+
+    def index_entity(self, session, entity):
+        """full text index a modified entity"""
+        alreadydone = session.query_data('indexedeids', set(), setdefault=True)
+        if entity.eid in alreadydone:
+            self.info('skipping reindexation of %s, already done', entity.eid)
+            return
+        alreadydone.add(entity.eid)
+        self.system_source.fti_index_entity(session, entity)
+        
+    def locate_relation_source(self, session, subject, rtype, object):
+        subjsource = self.source_from_eid(subject, session)
+        objsource = self.source_from_eid(object, session)
+        if not (subjsource is objsource and subjsource.support_relation(rtype, 1)):
+            source = self.system_source
+            if not source.support_relation(rtype, 1):
+                raise RTypeNotSupportedBySources(rtype)
+        else:
+            source = subjsource
+        return source
+    
+    def locate_etype_source(self, etype):
+        for source in self.sources:
+            if source.support_entity(etype, 1):
+                return source
+        else:
+            raise ETypeNotSupportedBySources(etype)
+        
+    def glob_add_entity(self, session, entity):
+        """add an entity to the repository
+        
+        the entity eid should originaly be None and a unique eid is assigned to
+        the entity instance
+        """
+        entity = entity.pre_add_hook()
+        eschema = entity.e_schema
+        etype = str(eschema)
+        source = self.locate_etype_source(etype)
+        # attribute an eid to the entity before calling hooks
+        entity.set_eid(self.system_source.create_eid(session))
+        entity._is_saved = False # entity has an eid but is not yet saved
+        relations = []
+        # if inlined relations are specified, fill entity's related cache to
+        # avoid unnecessary queries
+        for attr in entity.keys():
+            rschema = eschema.subject_relation(attr)
+            if not rschema.is_final(): # inlined relation
+                entity.set_related_cache(attr, 'subject',
+                                         entity.req.eid_rset(entity[attr]))
+                relations.append((attr, entity[attr]))
+        if source.should_call_hooks:
+            self.hm.call_hooks('before_add_entity', etype, session, entity)
+        entity.set_defaults()
+        entity.check(creation=True)
+        source.add_entity(session, entity)
+        if source.uri != 'system':
+            extid = source.get_extid(entity)
+            self._extid_cache[(str(extid), source.uri)] = entity.eid
+        else:
+            extid = None
+        self.add_info(session, entity, source, extid, complete=False)
+        entity._is_saved = True # entity has an eid and is saved
+        #print 'added', entity#, entity.items()
+        # trigger after_add_entity after after_add_relation
+        if source.should_call_hooks:
+            self.hm.call_hooks('after_add_entity', etype, session, entity)
+            # call hooks for inlined relations
+            for attr, value in relations:
+                self.hm.call_hooks('before_add_relation', attr, session,
+                                    entity.eid, attr, value)
+                self.hm.call_hooks('after_add_relation', attr, session,
+                                    entity.eid, attr, value)
+        return entity.eid
+        
+    def glob_update_entity(self, session, entity):
+        """replace an entity in the repository
+        the type and the eid of an entity must not be changed
+        """
+        #print 'update', entity
+        entity.check()
+        etype = str(entity.e_schema)
+        eschema = entity.e_schema
+        only_inline_rels, need_fti_update = True, False
+        relations = []
+        for attr in entity.keys():
+            if attr == 'eid':
+                continue
+            rschema = eschema.subject_relation(attr)
+            if rschema.is_final():
+                if eschema.rproperty(attr, 'fulltextindexed'):
+                    need_fti_update = True
+                only_inline_rels = False
+            else:
+                # inlined relation
+                previous_value = entity.related(attr)
+                if previous_value:
+                    previous_value = previous_value[0][0] # got a result set
+                    self.hm.call_hooks('before_delete_relation', attr, session,
+                                       entity.eid, attr, previous_value)
+                entity.set_related_cache(attr, 'subject',
+                                         entity.req.eid_rset(entity[attr]))
+                relations.append((attr, entity[attr], previous_value))
+        source = self.source_from_eid(entity.eid, session)
+        if source.should_call_hooks:
+            # call hooks for inlined relations
+            for attr, value, _ in relations:
+                self.hm.call_hooks('before_add_relation', attr, session,
+                                    entity.eid, attr, value)
+            if not only_inline_rels:
+                self.hm.call_hooks('before_update_entity', etype, session,
+                                    entity)
+        source.update_entity(session, entity)
+        if not only_inline_rels:
+            if need_fti_update:
+                # reindex the entity only if this query is updating at least
+                # one indexable attribute
+                FTIndexEntityOp(session, entity=entity)
+            if source.should_call_hooks:
+                self.hm.call_hooks('after_update_entity', etype, session,
+                                    entity)
+        if source.should_call_hooks:
+            for attr, value, prevvalue in relations:
+                if prevvalue:
+                    self.hm.call_hooks('after_delete_relation', attr, session,
+                                       entity.eid, attr, prevvalue)
+                del_existing_rel_if_needed(session, entity.eid, attr, value)
+                self.hm.call_hooks('after_add_relation', attr, session,
+                                    entity.eid, attr, value)
+
+    def glob_delete_entity(self, session, eid):
+        """delete an entity and all related entities from the repository"""
+        #print 'deleting', eid
+        # call delete_info before hooks
+        self._prepare_delete_info(session, eid)
+        etype, uri, extid = self.type_and_source_from_eid(eid, session)
+        source = self.sources_by_uri[uri]
+        if source.should_call_hooks:
+            self.hm.call_hooks('before_delete_entity', etype, session, eid)
+        self._delete_info(session, eid)
+        source.delete_entity(session, etype, eid)
+        if source.should_call_hooks:
+            self.hm.call_hooks('after_delete_entity', etype, session, eid)
+        # don't clear cache here this is done in a hook on commit
+        
+    def glob_add_relation(self, session, subject, rtype, object):
+        """add a relation to the repository"""
+        assert subject is not None
+        assert rtype
+        assert object is not None
+        source = self.locate_relation_source(session, subject, rtype, object)
+        #print 'adding', subject, rtype, object, 'to', source
+        if source.should_call_hooks:
+            del_existing_rel_if_needed(session, subject, rtype, object)
+            self.hm.call_hooks('before_add_relation', rtype, session,
+                               subject, rtype, object)
+        source.add_relation(session, subject, rtype, object)
+        if source.should_call_hooks:
+            self.hm.call_hooks('after_add_relation', rtype, session,
+                               subject, rtype, object)
+
+    def glob_delete_relation(self, session, subject, rtype, object):
+        """delete a relation from the repository"""
+        assert subject is not None
+        assert rtype
+        assert object is not None
+        source = self.locate_relation_source(session, subject, rtype, object)
+        #print 'delete rel', subject, rtype, object
+        if source.should_call_hooks:
+            self.hm.call_hooks('before_delete_relation', rtype, session,
+                               subject, rtype, object)
+        source.delete_relation(session, subject, rtype, object)
+        if self.schema.rschema(rtype).symetric:
+            # on symetric relation, we can't now in which sense it's
+            # stored so try to delete both
+            source.delete_relation(session, object, rtype, subject)
+        if source.should_call_hooks:
+            self.hm.call_hooks('after_delete_relation', rtype, session,
+                               subject, rtype, object)
+
+
+    # pyro handling ###########################################################
+    
+    def pyro_register(self, host=''):
+        """register the repository as a pyro object"""
+        from Pyro import core
+        port = self.config['pyro-port']
+        nshost, nsgroup = self.config['pyro-ns-host'], self.config['pyro-ns-group']
+        nsgroup = ':' + nsgroup
+        core.initServer(banner=0)
+        daemon = core.Daemon(host=host, port=port)
+        daemon.useNameServer(self.pyro_nameserver(nshost, nsgroup))
+        # use Delegation approach
+        impl = core.ObjBase()
+        impl.delegateTo(self)
+        nsid = self.config['pyro-id'] or self.config.appid
+        daemon.connect(impl, '%s.%s' % (nsgroup, nsid))
+        msg = 'repository registered as a pyro object using group %s and id %s'
+        self.info(msg, nsgroup, nsid)
+        self.pyro_registered = True
+        return daemon
+    
+    def pyro_nameserver(self, host=None, group=None):
+        """locate and bind the the name server to the daemon"""
+        from Pyro import naming, errors
+        # locate the name server
+        nameserver = naming.NameServerLocator().getNS(host)
+        if group is not None:
+            # make sure our namespace group exists
+            try:
+                nameserver.createGroup(group)
+            except errors.NamingError:
+                pass
+        return nameserver
+
+
+def pyro_unregister(config):
+    """unregister the repository from the pyro name server"""
+    nshost, nsgroup = config['pyro-ns-host'], config['pyro-ns-group']
+    appid = config['pyro-id'] or config.appid
+    from Pyro import core, naming, errors
+    core.initClient(banner=False)
+    try:
+        nameserver = naming.NameServerLocator().getNS(nshost)
+    except errors.PyroError, ex:
+        # name server not responding
+        config.error('can\'t locate pyro name server: %s', ex)
+        return
+    try:
+        nameserver.unregister(':%s.%s' % (nsgroup, appid))
+        config.info('%s unregistered from pyro name server', appid)
+    except errors.NamingError:
+        config.warning('%s already unregistered from pyro name server', appid)
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Repository, getLogger('cubicweb.repository'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/rqlannotation.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,374 @@
+"""Functions to add additional annotations on a rql syntax tree to ease later
+code generation.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.compat import any
+
+from rql.nodes import Relation, Exists, VariableRef, Constant, Variable, Or
+from rql.utils import common_parent
+
+from cubicweb import server
+
+def _annotate_select(annotator, rqlst):
+    for subquery in rqlst.with_:
+        annotator._annotate_union(subquery.query)
+    #if server.DEBUG:
+    #    print '-------- sql annotate', repr(rqlst)
+    getrschema = annotator.schema.rschema
+    has_text_query = False
+    need_distinct = rqlst.distinct
+    for rel in rqlst.iget_nodes(Relation):
+        if rel.neged(strict=True):
+            if rel.is_types_restriction():
+                need_distinct = True
+            else:
+                rschema = getrschema(rel.r_type)
+                if rschema.inlined:
+                    try:
+                        var = rel.children[1].children[0].variable
+                    except AttributeError:
+                        pass # rewritten variable
+                    else:
+                        if not var.stinfo['constnode']:
+                            need_distinct = True
+        elif getrschema(rel.r_type).symetric:
+            for vref in rel.iget_nodes(VariableRef):
+                stinfo = vref.variable.stinfo
+                if not stinfo['constnode'] and stinfo['selected']:
+                    need_distinct = True
+                    # XXX could mark as not invariant
+                    break
+    for name, var in rqlst.defined_vars.items():
+        stinfo = var.stinfo
+        if stinfo.get('ftirels'):
+            has_text_query = True
+        if stinfo['attrvar']:
+            stinfo['invariant'] = False
+            stinfo['principal'] = _select_main_var(stinfo['rhsrelations'])
+            continue
+        if not stinfo['relations'] and not stinfo['typerels']:
+            # Any X, Any MAX(X)...
+            # those particular queries should be executed using the system
+            # entities table unless there is some type restriction
+            stinfo['invariant'] = True
+            stinfo['principal'] = None
+            continue
+        if any(rel for rel in stinfo['relations'] if rel.r_type == 'eid' and rel.operator() != '=') and \
+               not any(r for r in var.stinfo['relations'] - var.stinfo['rhsrelations']
+                       if r.r_type != 'eid' and (getrschema(r.r_type).inlined or getrschema(r.r_type).final)):
+            # Any X WHERE X eid > 2
+            # those particular queries should be executed using the system entities table
+            stinfo['invariant'] = True
+            stinfo['principal'] = None
+            continue
+        if stinfo['selected'] and var.valuable_references() == 1+bool(stinfo['constnode']):
+            # "Any X", "Any X, Y WHERE X attr Y"
+            stinfo['invariant'] = False
+            continue
+        joins = set()            
+        invariant = False
+        for ref in var.references():
+            rel = ref.relation()
+            if rel is None or rel.is_types_restriction():
+                continue
+            lhs, rhs = rel.get_parts()
+            onlhs = ref is lhs
+            if rel.r_type == 'eid':
+                if not (onlhs and len(stinfo['relations']) > 1): 
+                    break
+                if not stinfo['constnode']:
+                    joins.add(rel)
+                continue
+            elif rel.r_type == 'identity':
+                # identity can't be used as principal, so check other relation are used
+                # XXX explain rhs.operator == '='
+                if rhs.operator != '=' or len(stinfo['relations']) <= 1: #(stinfo['constnode'] and rhs.operator == '='):
+                    break
+                joins.add(rel)
+                continue
+            rschema = getrschema(rel.r_type)
+            if rel.optional:
+                if rel in stinfo['optrelations']:
+                    # optional variable can't be invariant if this is the lhs
+                    # variable of an inlined relation
+                    if not rel in stinfo['rhsrelations'] and rschema.inlined:
+                        break
+                else:
+                    # variable used as main variable of an optional relation
+                    # can't be invariant
+                    break
+            if rschema.final or (onlhs and rschema.inlined):
+                if rschema.type != 'has_text':
+                    # need join anyway if the variable appears in a final or
+                    # inlined relation
+                    break
+                joins.add(rel)
+                continue
+            if not stinfo['constnode']:
+                if rschema.inlined and rel.neged(strict=True):
+                    # if relation is inlined, can't be invariant if that 
+                    # variable is used anywhere else.
+                    # see 'Any P WHERE NOT N ecrit_par P, N eid 512':                    
+                    # sql for 'NOT N ecrit_par P' is 'N.ecrit_par is NULL' so P
+                    # can use N.ecrit_par as principal
+                    if (stinfo['selected'] or len(stinfo['relations']) > 1):
+                        break
+                elif rschema.symetric and stinfo['selected']:
+                    break
+            joins.add(rel)
+        else:
+            # if there is at least one ambigous relation and no other to
+            # restrict types, can't be invariant since we need to filter out
+            # other types
+            if not annotator.is_ambiguous(var):
+                invariant = True
+        stinfo['invariant'] = invariant
+        if invariant and joins:
+            # remember rqlst/solutions analyze information
+            # we have to select a kindof "main" relation which will "extrajoins"
+            # the other
+            # priority should be given to relation which are not in inner queries
+            # (eg exists)
+            try:
+                stinfo['principal'] = _select_principal(var.sqlscope, joins)
+            except CantSelectPrincipal:
+                stinfo['invariant'] = False
+    rqlst.need_distinct = need_distinct
+    return has_text_query
+
+
+
+class CantSelectPrincipal(Exception): pass
+
+def _select_principal(sqlscope, relations):
+    """given a list of rqlst relations, select one which will be used to
+    represent an invariant variable (e.g. using on extremity of the relation
+    instead of the variable's type table
+    """
+    diffscope_rels = {}
+    has_same_scope_rel = False
+    ored_rels = set()
+    diffscope_rels = set()
+    for rel in relations:
+        # note: only eid and has_text among all final relations may be there
+        if rel.r_type in ('eid', 'identity'):
+            has_same_scope_rel = rel.sqlscope is sqlscope
+            continue
+        if rel.ored(traverse_scope=True):
+            ored_rels.add(rel)
+        elif rel.sqlscope is sqlscope:
+            return rel
+        elif not rel.neged(traverse_scope=True):
+            diffscope_rels.add(rel)
+    if len(ored_rels) > 1:
+        ored_rels_copy = tuple(ored_rels)
+        for rel1 in ored_rels_copy:
+            for rel2 in ored_rels_copy:
+                if rel1 is rel2:
+                    continue
+                if isinstance(common_parent(rel1, rel2), Or):
+                    ored_rels.discard(rel1)
+                    ored_rels.discard(rel2)
+    for rel in ored_rels:
+        if rel.sqlscope is sqlscope:
+            return rel
+        diffscope_rels.add(rel)
+    # if DISTINCT query, can use variable from a different scope as principal
+    # since introduced duplicates will be removed
+    if sqlscope.stmt.distinct and diffscope_rels:
+        return iter(diffscope_rels).next()
+    # XXX  could use a relation for a different scope if it can't generate
+    # duplicates, so we would have to check cardinality
+    raise CantSelectPrincipal()
+    
+
+def _select_main_var(relations):
+    """given a list of rqlst relations, select one which will be used as main
+    relation for the rhs variable
+    """
+    for rel in relations:
+        if rel.sqlscope is rel.stmt:
+            return rel
+        principal = rel
+    return principal
+
+
+def set_qdata(union, noinvariant):
+    """recursive function to set querier data on variables in the syntax tree
+    """
+    for select in union.children:
+        for subquery in select.with_:
+            set_qdata(subquery.query, noinvariant)
+        for var in select.defined_vars.itervalues():
+            if var.stinfo['invariant']:
+                if var in noinvariant and not var.stinfo['principal'].r_type == 'has_text':
+                    var._q_invariant = False
+                else:
+                    var._q_invariant = True
+            else:
+                var._q_invariant = False
+
+
+class SQLGenAnnotator(object):
+    def __init__(self, schema):
+        self.schema = schema
+        self.nfdomain = frozenset(eschema.type for eschema in schema.entities()
+                                  if not eschema.is_final())
+
+    def annotate(self, rqlst):
+        """add information to the rql syntax tree to help sources to do their
+        job (read sql generation)
+
+        a variable is tagged as invariant if:
+        * it's a non final variable
+        * it's not used as lhs in any final or inlined relation
+        * there is no type restriction on this variable (either explicit in the
+          syntax tree or because a solution for this variable has been removed
+          due to security filtering)
+        """
+        assert rqlst.TYPE == 'select', rqlst
+        rqlst.has_text_query = self._annotate_union(rqlst)
+
+    def _annotate_union(self, union):
+        has_text_query = False
+        for select in union.children:
+            htq = _annotate_select(self, select)
+            if htq:
+                has_text_query = True
+        return has_text_query
+
+
+    def is_ambiguous(self, var):
+        # ignore has_text relation
+        if len([rel for rel in var.stinfo['relations']
+                if rel.sqlscope is var.sqlscope and rel.r_type == 'has_text']) == 1:
+            return False
+        try:
+            data = var.stmt._deamb_data
+        except AttributeError: 
+            data = var.stmt._deamb_data = IsAmbData(self.schema, self.nfdomain)
+            data.compute(var.stmt)
+        return data.is_ambiguous(var)
+
+        
+class IsAmbData(object):
+    def __init__(self, schema, nfdomain):
+        self.schema = schema
+        # shortcuts
+        self.rschema = schema.rschema
+        self.eschema = schema.eschema
+        # domain for non final variables
+        self.nfdomain = nfdomain
+        # {var: possible solutions set}
+        self.varsols = {}
+        # set of ambiguous variables
+        self.ambiguousvars = set()
+        # remember if a variable has been deambiguified by another to avoid
+        # doing the opposite
+        self.deambification_map = {}
+        # not invariant variables (access to final.inlined relation)
+        self.not_invariants = set()
+        
+    def is_ambiguous(self, var):
+        return var in self.ambiguousvars
+
+    def restrict(self, var, restricted_domain):
+        self.varsols[var] &= restricted_domain
+        if var in self.ambiguousvars and self.varsols[var] == var.stinfo['possibletypes']:
+            self.ambiguousvars.remove(var)
+    
+    def compute(self, rqlst):
+        # set domains for each variable
+        for varname, var in rqlst.defined_vars.iteritems():
+            if var.stinfo['uidrels'] or \
+                   self.eschema(rqlst.solutions[0][varname]).is_final():
+                ptypes = var.stinfo['possibletypes']
+            else:
+                ptypes = set(self.nfdomain)
+                self.ambiguousvars.add(var)
+            self.varsols[var] = ptypes
+        if not self.ambiguousvars:
+            return
+        # apply relation restriction
+        self.maydeambrels = maydeambrels = {}
+        for rel in rqlst.iget_nodes(Relation):
+            if rel.is_types_restriction() or rel.r_type == 'eid':
+                continue
+            lhs, rhs = rel.get_variable_parts()
+            if isinstance(lhs, VariableRef) or isinstance(rhs, VariableRef):
+                rschema = self.rschema(rel.r_type)
+                if rschema.inlined or rschema.is_final():
+                    self.not_invariants.add(lhs.variable)
+                self.set_rel_constraint(lhs, rel, rschema.subjects)
+                self.set_rel_constraint(rhs, rel, rschema.objects)
+        # try to deambiguify more variables by considering other variables'type
+        modified = True
+        while modified and self.ambiguousvars:
+            modified = False
+            for var in self.ambiguousvars.copy():
+                try:
+                    for rel in (var.stinfo['relations'] & maydeambrels[var]):
+                        if self.deambiguifying_relation(var, rel):
+                            modified = True
+                            break
+                except KeyError:
+                    # no relation to deambiguify
+                    continue
+
+    def _debug_print(self):
+        print 'varsols', dict((x, sorted(str(v) for v in values))
+                               for x, values in self.varsols.iteritems())
+        print 'ambiguous vars', sorted(self.ambiguousvars)
+
+    def set_rel_constraint(self, term, rel, etypes_func):
+        if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
+            var = term.variable
+            if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \
+                   or rel.sqlscope is var.sqlscope:
+                self.restrict(var, frozenset(etypes_func()))
+                try:
+                    self.maydeambrels[var].add(rel)
+                except KeyError:
+                    self.maydeambrels[var] = set((rel,))
+        
+    def deambiguifying_relation(self, var, rel):
+        lhs, rhs = rel.get_variable_parts()
+        onlhs = var is getattr(lhs, 'variable', None)
+        other = onlhs and rhs or lhs
+        otheretypes = None
+        # XXX isinstance(other.variable, Variable) to skip column alias
+        if isinstance(other, VariableRef) and isinstance(other.variable, Variable):
+            deambiguifier = other.variable
+            if not var is self.deambification_map.get(deambiguifier):
+                if not var.stinfo['typerels']:
+                    otheretypes = deambiguifier.stinfo['possibletypes']
+                elif not self.is_ambiguous(deambiguifier):
+                    otheretypes = self.varsols[deambiguifier]
+                elif deambiguifier in self.not_invariants:
+                    # we know variable won't be invariant, try to use
+                    # it to deambguify the current variable
+                    otheretypes = self.varsols[deambiguifier]
+        elif isinstance(other, Constant) and other.uidtype:
+            otheretypes = (other.uidtype,)
+            deambiguifier = None
+        if otheretypes is not None:
+            # unless types for variable are already non-ambigous, check
+            # if this relation has some type ambiguity
+            rschema = self.rschema(rel.r_type)
+            if onlhs:
+                rtypefunc = rschema.subjects
+            else:
+                rtypefunc = rschema.objects
+            for otheretype in otheretypes:
+                reltypes = frozenset(rtypefunc(otheretype))
+                if var.stinfo['possibletypes'] != reltypes:
+                    break
+                self.restrict(var, reltypes)
+                self.deambification_map[var] = deambiguifier
+                return True
+        return False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/rqlrewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,395 @@
+"""RQL rewriting utilities, used for read security checking
+
+:organization: Logilab
+:copyright: 2007-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from rql import nodes, stmts, TypeResolverException
+from cubicweb import Unauthorized, server, typed_eid
+from cubicweb.server.ssplanner import add_types_restriction
+
+def remove_solutions(origsolutions, solutions, defined):
+    """when a rqlst has been generated from another by introducing security
+    assertions, this method returns solutions which are contained in orig
+    solutions
+    """
+    newsolutions = []
+    for origsol in origsolutions:
+        for newsol in solutions[:]:
+            for var, etype in origsol.items():
+                try:
+                    if newsol[var] != etype:
+                        try:
+                            defined[var].stinfo['possibletypes'].remove(newsol[var])
+                        except KeyError:
+                            pass
+                        break
+                except KeyError,ex:
+                    # variable has been rewritten
+                    continue
+            else:
+                newsolutions.append(newsol)
+                solutions.remove(newsol)
+    return newsolutions
+
+class Unsupported(Exception): pass
+        
+class RQLRewriter(object):
+    """insert some rql snippets into another rql syntax tree"""
+    def __init__(self, querier, session):
+        self.session = session
+        self.annotate = querier._rqlhelper.annotate
+        self._compute_solutions = querier.solutions
+        self.schema = querier.schema
+
+    def compute_solutions(self):
+        self.annotate(self.select)
+        try:
+            self._compute_solutions(self.session, self.select, self.kwargs)
+        except TypeResolverException:
+            raise Unsupported()
+        if len(self.select.solutions) < len(self.solutions):
+            raise Unsupported()
+        
+    def rewrite(self, select, snippets, solutions, kwargs):
+        if server.DEBUG:
+            print '---- rewrite', select, snippets, solutions
+        self.select = select
+        self.solutions = solutions
+        self.kwargs = kwargs
+        self.u_varname = None
+        self.removing_ambiguity = False
+        self.exists_snippet = {}
+        # we have to annotate the rqlst before inserting snippets, even though
+        # we'll have to redo it latter
+        self.annotate(select)
+        self.insert_snippets(snippets)
+        if not self.exists_snippet and self.u_varname:
+            # U has been inserted than cancelled, cleanup
+            select.undefine_variable(select.defined_vars[self.u_varname])
+        # clean solutions according to initial solutions
+        newsolutions = remove_solutions(solutions, select.solutions,
+                                        select.defined_vars)
+        assert len(newsolutions) >= len(solutions), \
+               'rewritten rql %s has lost some solutions, there is probably something '\
+               'wrong in your schema permission (for instance using a '\
+              'RQLExpression which insert a relation which doesn\'t exists in '\
+               'the schema)\nOrig solutions: %s\nnew solutions: %s' % (
+            select, solutions, newsolutions)
+        if len(newsolutions) > len(solutions):
+            # the snippet has introduced some ambiguities, we have to resolve them
+            # "manually"
+            variantes = self.build_variantes(newsolutions)
+            # insert "is" where necessary
+            varexistsmap = {}
+            self.removing_ambiguity = True
+            for (erqlexpr, mainvar, oldvarname), etype in variantes[0].iteritems():
+                varname = self.rewritten[(erqlexpr, mainvar, oldvarname)]
+                var = select.defined_vars[varname]
+                exists = var.references()[0].scope
+                exists.add_constant_restriction(var, 'is', etype, 'etype')
+                varexistsmap[mainvar] = exists
+            # insert ORED exists where necessary
+            for variante in variantes[1:]:
+                self.insert_snippets(snippets, varexistsmap)
+                for (erqlexpr, mainvar, oldvarname), etype in variante.iteritems():
+                    varname = self.rewritten[(erqlexpr, mainvar, oldvarname)]
+                    try:
+                        var = select.defined_vars[varname]
+                    except KeyError:
+                        # not a newly inserted variable
+                        continue
+                    exists = var.references()[0].scope
+                    exists.add_constant_restriction(var, 'is', etype, 'etype')
+            # recompute solutions
+            #select.annotated = False # avoid assertion error
+            self.compute_solutions()
+            # clean solutions according to initial solutions
+            newsolutions = remove_solutions(solutions, select.solutions,
+                                            select.defined_vars)
+        select.solutions = newsolutions
+        add_types_restriction(self.schema, select)
+        if server.DEBUG:
+            print '---- rewriten', select
+            
+    def build_variantes(self, newsolutions):
+        variantes = set()
+        for sol in newsolutions:
+            variante = []
+            for (erqlexpr, mainvar, oldvar), newvar in self.rewritten.iteritems():
+                variante.append( ((erqlexpr, mainvar, oldvar), sol[newvar]) )
+            variantes.add(tuple(variante))
+        # rebuild variantes as dict
+        variantes = [dict(variante) for variante in variantes]
+        # remove variable which have always the same type
+        for erqlexpr, mainvar, oldvar in self.rewritten:
+            it = iter(variantes)
+            etype = it.next()[(erqlexpr, mainvar, oldvar)]
+            for variante in it:
+                if variante[(erqlexpr, mainvar, oldvar)] != etype:
+                    break
+            else:
+                for variante in variantes:
+                    del variante[(erqlexpr, mainvar, oldvar)]
+        return variantes
+    
+    def insert_snippets(self, snippets, varexistsmap=None):
+        self.rewritten = {}
+        for varname, erqlexprs in snippets:
+            if varexistsmap is not None and not varname in varexistsmap:
+                continue
+            try:
+                self.const = typed_eid(varname)
+                self.varname = self.const
+                self.rhs_rels = self.lhs_rels = {}
+            except ValueError:
+                self.varname = varname
+                self.const = None
+                self.varstinfo = stinfo = self.select.defined_vars[varname].stinfo
+                if varexistsmap is None:
+                    self.rhs_rels = dict( (rel.r_type, rel) for rel in stinfo['rhsrelations'])
+                    self.lhs_rels = dict( (rel.r_type, rel) for rel in stinfo['relations']
+                                                  if not rel in stinfo['rhsrelations'])
+                else:
+                    self.rhs_rels = self.lhs_rels = {}
+            parent = None
+            inserted = False
+            for erqlexpr in erqlexprs:
+                self.current_expr = erqlexpr
+                if varexistsmap is None:
+                    try:
+                        new = self.insert_snippet(varname, erqlexpr.snippet_rqlst, parent)
+                    except Unsupported:
+                        continue
+                    inserted = True
+                    if new is not None:
+                        self.exists_snippet[erqlexpr] = new
+                    parent = parent or new
+                else:
+                    # called to reintroduce snippet due to ambiguity creation,
+                    # so skip snippets which are not introducing this ambiguity
+                    exists = varexistsmap[varname]
+                    if self.exists_snippet[erqlexpr] is exists:
+                        self.insert_snippet(varname, erqlexpr.snippet_rqlst, exists)
+            if varexistsmap is None and not inserted:
+                # no rql expression found matching rql solutions. User has no access right
+                raise Unauthorized()
+            
+    def insert_snippet(self, varname, snippetrqlst, parent=None):
+        new = snippetrqlst.where.accept(self)
+        if new is not None:
+            try:
+                var = self.select.defined_vars[varname]
+            except KeyError:
+                # not a variable
+                pass
+            else:
+                if var.stinfo['optrelations']:
+                    # use a subquery
+                    subselect = stmts.Select()
+                    subselect.append_selected(nodes.VariableRef(subselect.get_variable(varname)))
+                    subselect.add_restriction(new.copy(subselect))
+                    aliases = [varname]
+                    for rel in var.stinfo['relations']:
+                        rschema = self.schema.rschema(rel.r_type)
+                        if rschema.is_final() or (rschema.inlined and not rel in var.stinfo['rhsrelations']):
+                            self.select.remove_node(rel)
+                            rel.children[0].name = varname
+                            subselect.add_restriction(rel.copy(subselect))
+                            for vref in rel.children[1].iget_nodes(nodes.VariableRef):
+                                subselect.append_selected(vref.copy(subselect))
+                                aliases.append(vref.name)
+                    if self.u_varname:
+                        # generate an identifier for the substitution
+                        argname = subselect.allocate_varname()
+                        while argname in self.kwargs:
+                            argname = subselect.allocate_varname()
+                        subselect.add_constant_restriction(subselect.get_variable(self.u_varname),
+                                                        'eid', unicode(argname), 'Substitute')
+                        self.kwargs[argname] = self.session.user.eid
+                    add_types_restriction(self.schema, subselect, subselect, solutions=self.solutions)
+                    assert parent is None
+                    myunion = stmts.Union()
+                    myunion.append(subselect)
+                    aliases = [nodes.VariableRef(self.select.get_variable(name, i))
+                               for i, name in enumerate(aliases)]
+                    self.select.add_subquery(nodes.SubQuery(aliases, myunion), check=False)
+                    self._cleanup_inserted(new)
+                    try:
+                        self.compute_solutions()
+                    except Unsupported:
+                        # some solutions have been lost, can't apply this rql expr
+                        self.select.remove_subquery(new, undefine=True)
+                        raise
+                    return
+            new = nodes.Exists(new)
+            if parent is None:
+                self.select.add_restriction(new)
+            else:
+                grandpa = parent.parent
+                or_ = nodes.Or(parent, new)
+                grandpa.replace(parent, or_)
+            if not self.removing_ambiguity:
+                try:
+                    self.compute_solutions()
+                except Unsupported:
+                    # some solutions have been lost, can't apply this rql expr
+                    if parent is None:
+                        self.select.remove_node(new, undefine=True)
+                    else:
+                        parent.parent.replace(or_, or_.children[0])
+                        self._cleanup_inserted(new)
+                    raise 
+            return new
+
+    def _cleanup_inserted(self, node):
+        # cleanup inserted variable references
+        for vref in node.iget_nodes(nodes.VariableRef):
+            vref.unregister_reference()
+            if not vref.variable.stinfo['references']:
+                # no more references, undefine the variable
+                del self.select.defined_vars[vref.name]
+        
+    def _visit_binary(self, node, cls):
+        newnode = cls()
+        for c in node.children:
+            new = c.accept(self)
+            if new is None:
+                continue
+            newnode.append(new)
+        if len(newnode.children) == 0:
+            return None
+        if len(newnode.children) == 1:
+            return newnode.children[0]
+        return newnode
+
+    def _visit_unary(self, node, cls):
+        newc = node.children[0].accept(self)
+        if newc is None:
+            return None
+        newnode = cls()
+        newnode.append(newc)
+        return newnode 
+        
+    def visit_and(self, et):
+        return self._visit_binary(et, nodes.And)
+
+    def visit_or(self, ou):
+        return self._visit_binary(ou, nodes.Or)
+        
+    def visit_not(self, node):
+        return self._visit_unary(node, nodes.Not)
+
+    def visit_exists(self, node):
+        return self._visit_unary(node, nodes.Exists)
+   
+    def visit_relation(self, relation):
+        lhs, rhs = relation.get_variable_parts()
+        if lhs.name == 'X':
+            # on lhs
+            # see if we can reuse this relation
+            if relation.r_type in self.lhs_rels and isinstance(rhs, nodes.VariableRef) and rhs.name != 'U':
+                if self._may_be_shared(relation, 'object'):
+                    # ok, can share variable
+                    term = self.lhs_rels[relation.r_type].children[1].children[0]
+                    self._use_outer_term(rhs.name, term)
+                    return
+        elif isinstance(rhs, nodes.VariableRef) and rhs.name == 'X' and lhs.name != 'U':
+            # on rhs
+            # see if we can reuse this relation
+            if relation.r_type in self.rhs_rels and self._may_be_shared(relation, 'subject'):
+                # ok, can share variable
+                term = self.rhs_rels[relation.r_type].children[0]
+                self._use_outer_term(lhs.name, term)            
+                return
+        rel = nodes.Relation(relation.r_type, relation.optional)
+        for c in relation.children:
+            rel.append(c.accept(self))
+        return rel
+
+    def visit_comparison(self, cmp):
+        cmp_ = nodes.Comparison(cmp.operator)
+        for c in cmp.children:
+            cmp_.append(c.accept(self))
+        return cmp_
+
+    def visit_mathexpression(self, mexpr):
+        cmp_ = nodes.MathExpression(cmp.operator)
+        for c in cmp.children:
+            cmp_.append(c.accept(self))
+        return cmp_
+        
+    def visit_function(self, function):
+        """generate filter name for a function"""
+        function_ = nodes.Function(function.name)
+        for c in function.children:
+            function_.append(c.accept(self))
+        return function_
+
+    def visit_constant(self, constant):
+        """generate filter name for a constant"""
+        return nodes.Constant(constant.value, constant.type)
+
+    def visit_variableref(self, vref):
+        """get the sql name for a variable reference"""
+        if vref.name == 'X':
+            if self.const is not None:
+                return nodes.Constant(self.const, 'Int')
+            return nodes.VariableRef(self.select.get_variable(self.varname))
+        vname_or_term = self._get_varname_or_term(vref.name)
+        if isinstance(vname_or_term, basestring):
+            return nodes.VariableRef(self.select.get_variable(vname_or_term))
+        # shared term
+        return vname_or_term.copy(self.select)
+
+    def _may_be_shared(self, relation, target):
+        """return True if the snippet relation can be skipped to use a relation
+        from the original query
+        """
+        # if cardinality is in '?1', we can ignore the relation and use variable
+        # from the original query
+        rschema = self.schema.rschema(relation.r_type)
+        if target == 'object':
+            cardindex = 0
+            ttypes_func = rschema.objects
+            rprop = rschema.rproperty
+        else: # target == 'subject':
+            cardindex = 1
+            ttypes_func = rschema.subjects
+            rprop = lambda x,y,z: rschema.rproperty(y, x, z)
+        for etype in self.varstinfo['possibletypes']:
+            for ttype in ttypes_func(etype):
+                if rprop(etype, ttype, 'cardinality')[cardindex] in '+*':
+                    return False
+        return True
+
+    def _use_outer_term(self, snippet_varname, term):
+        key = (self.current_expr, self.varname, snippet_varname)
+        if key in self.rewritten:
+            insertedvar = self.select.defined_vars.pop(self.rewritten[key])
+            for inserted_vref in insertedvar.references():
+                inserted_vref.parent.replace(inserted_vref, term.copy(self.select))
+        self.rewritten[key] = term
+        
+    def _get_varname_or_term(self, vname):
+        if vname == 'U':
+            if self.u_varname is None:
+                select = self.select
+                self.u_varname = select.allocate_varname()
+                # generate an identifier for the substitution
+                argname = select.allocate_varname()
+                while argname in self.kwargs:
+                    argname = select.allocate_varname()
+                # insert "U eid %(u)s"
+                var = select.get_variable(self.u_varname)
+                select.add_constant_restriction(select.get_variable(self.u_varname),
+                                                'eid', unicode(argname), 'Substitute')
+                self.kwargs[argname] = self.session.user.eid
+            return self.u_varname
+        key = (self.current_expr, self.varname, vname)
+        try:
+            return self.rewritten[key]
+        except KeyError:
+            self.rewritten[key] = newvname = self.select.allocate_varname()
+            return newvname
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/schemahooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,920 @@
+"""schema hooks:
+
+- synchronize the living schema object with the persistent schema
+- perform physical update on the source when necessary
+
+checking for schema consistency is done in hooks.py
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from yams.schema import BASE_TYPES
+from yams.buildobjs import EntityType, RelationType, RelationDefinition
+from yams.schema2sql import eschema2sql, rschema2sql, _type_from_constraints
+
+from cubicweb import ValidationError, RepositoryError
+from cubicweb.server import schemaserial as ss
+from cubicweb.server.pool import Operation, SingleLastOperation, PreCommitOperation
+from cubicweb.server.hookhelper import (entity_attr, entity_name,
+                                     check_internal_entity)
+    
+# core entity and relation types which can't be removed
+CORE_ETYPES = list(BASE_TYPES) + ['EEType', 'ERType', 'EUser', 'EGroup',
+                                  'EConstraint', 'EFRDef', 'ENFRDef']
+CORE_RTYPES = ['eid', 'creation_date', 'modification_date',
+               'login', 'upassword', 'name',
+               'is', 'instanceof', 'owned_by', 'created_by', 'in_group',
+               'relation_type', 'from_entity', 'to_entity',
+               'constrainted_by',
+               'read_permission', 'add_permission',
+               'delete_permission', 'updated_permission',
+               ]
+
+def get_constraints(session, entity):
+    constraints = []
+    for cstreid in session.query_data(entity.eid, ()):
+        cstrent = session.entity(cstreid)
+        cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value)
+        cstr.eid = cstreid
+        constraints.append(cstr)
+    return constraints
+
+def add_inline_relation_column(session, etype, rtype):
+    """add necessary column and index for an inlined relation"""
+    try:
+        session.system_sql(str('ALTER TABLE %s ADD COLUMN %s integer'
+                               % (etype, rtype)))
+        session.info('added column %s to table %s', rtype, etype)
+    except:
+        # silent exception here, if this error has not been raised because the 
+        # column already exists, index creation will fail anyway
+        session.exception('error while adding column %s to table %s', etype, rtype)
+    # create index before alter table which may expectingly fail during test
+    # (sqlite) while index creation should never fail (test for index existence
+    # is done by the dbhelper)
+    session.pool.source('system').create_index(session, etype, rtype)
+    session.info('added index on %s(%s)', etype, rtype)
+    session.add_query_data('createdattrs', '%s.%s' % (etype, rtype))
+
+
+class SchemaOperation(Operation):
+    """base class for schema operations"""
+    def __init__(self, session, kobj=None, **kwargs):
+        self.schema = session.repo.schema
+        self.kobj = kobj
+        # once Operation.__init__ has been called, event may be triggered, so
+        # do this last !
+        Operation.__init__(self, session, **kwargs)
+        # every schema operation is triggering a schema update
+        UpdateSchemaOp(session)
+        
+class EarlySchemaOperation(SchemaOperation):
+    def insert_index(self):
+        """schema operation which are inserted at the begining of the queue
+        (typically to add/remove entity or relation types)
+        """
+        i = -1
+        for i, op in enumerate(self.session.pending_operations):
+            if not isinstance(op, EarlySchemaOperation):
+                return i
+        return i + 1
+    
+class UpdateSchemaOp(SingleLastOperation):
+    """the update schema operation:
+
+    special operation which should be called once and after all other schema
+    operations. It will trigger internal structures rebuilding to consider
+    schema changes
+    """
+    
+    def __init__(self, session):
+        self.repo = session.repo
+        SingleLastOperation.__init__(self, session)
+        
+    def commit_event(self):
+        self.repo.set_schema(self.repo.schema)
+
+        
+class DropTableOp(PreCommitOperation):
+    """actually remove a database from the application's schema"""
+    def precommit_event(self):
+        dropped = self.session.query_data('droppedtables',
+                                          default=set(), setdefault=True)
+        if self.table in dropped:
+            return # already processed
+        dropped.add(self.table)
+        self.session.system_sql('DROP TABLE %s' % self.table)
+        self.info('dropped table %s', self.table)
+        
+class DropColumnOp(PreCommitOperation):
+    """actually remove the attribut's column from entity table in the system
+    database
+    """
+    def precommit_event(self):
+        session, table, column = self.session, self.table, self.column
+        # drop index if any
+        session.pool.source('system').drop_index(session, table, column)
+        try:
+            session.system_sql('ALTER TABLE %s DROP COLUMN %s'
+                               % (table, column))
+            self.info('dropped column %s from table %s', column, table)
+        except Exception, ex:
+            # not supported by sqlite for instance
+            self.error('error while altering table %s: %s', table, ex)
+            
+
+# deletion ####################################################################
+
+class DeleteEETypeOp(SchemaOperation):
+    """actually remove the entity type from the application's schema"""    
+    def commit_event(self):
+        try:
+            # del_entity_type also removes entity's relations
+            self.schema.del_entity_type(self.kobj)
+        except KeyError:
+            # s/o entity type have already been deleted
+            pass
+
+def before_del_eetype(session, eid):
+    """before deleting a EEType entity:
+    * check that we don't remove a core entity type
+    * cascade to delete related EFRDef and ENFRDef entities
+    * instantiate an operation to delete the entity type on commit
+    """
+    # final entities can't be deleted, don't care about that
+    name = check_internal_entity(session, eid, CORE_ETYPES)
+    # delete every entities of this type
+    session.unsafe_execute('DELETE %s X' % name)
+    DropTableOp(session, table=name)
+    DeleteEETypeOp(session, name)
+
+def after_del_eetype(session, eid):
+    # workflow cleanup
+    session.execute('DELETE State X WHERE NOT X state_of Y')
+    session.execute('DELETE Transition X WHERE NOT X transition_of Y')
+
+        
+class DeleteERTypeOp(SchemaOperation):
+    """actually remove the relation type from the application's schema"""    
+    def commit_event(self):
+        try:
+            self.schema.del_relation_type(self.kobj)
+        except KeyError:
+            # s/o entity type have already been deleted
+            pass
+
+def before_del_ertype(session, eid):
+    """before deleting a ERType entity:
+    * check that we don't remove a core relation type
+    * cascade to delete related EFRDef and ENFRDef entities
+    * instantiate an operation to delete the relation type on commit
+    """
+    name = check_internal_entity(session, eid, CORE_RTYPES)
+    # delete relation definitions using this relation type
+    session.execute('DELETE EFRDef X WHERE X relation_type Y, Y eid %(x)s',
+                    {'x': eid})
+    session.execute('DELETE ENFRDef X WHERE X relation_type Y, Y eid %(x)s',
+                    {'x': eid})
+    DeleteERTypeOp(session, name)
+
+    
+class DelErdefOp(SchemaOperation):
+    """actually remove the relation definition from the application's schema"""
+    def commit_event(self):
+        subjtype, rtype, objtype = self.kobj
+        try:
+            self.schema.del_relation_def(subjtype, rtype, objtype)
+        except KeyError:
+            # relation type may have been already deleted
+            pass
+        
+def after_del_relation_type(session, rdefeid, rtype, rteid):
+    """before deleting a EFRDef or ENFRDef entity:
+    * if this is a final or inlined relation definition, instantiate an
+      operation to drop necessary column, else if this is the last instance
+      of a non final relation, instantiate an operation to drop necessary
+      table
+    * instantiate an operation to delete the relation definition on commit
+    * delete the associated relation type when necessary
+    """
+    subjschema, rschema, objschema = session.repo.schema.schema_by_eid(rdefeid)
+    pendings = session.query_data('pendingeids', ())
+    # first delete existing relation if necessary
+    if rschema.is_final():
+        rdeftype = 'EFRDef'
+    else:
+        rdeftype = 'ENFRDef'
+        if not (subjschema.eid in pendings or objschema.eid in pendings):
+            session.execute('DELETE X %s Y WHERE X is %s, Y is %s'
+                            % (rschema, subjschema, objschema))
+    execute = session.unsafe_execute
+    rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,'
+                   'R eid %%(x)s' % rdeftype, {'x': rteid})
+    lastrel = rset[0][0] == 0
+    # we have to update physical schema systematically for final and inlined
+    # relations, but only if it's the last instance for this relation type
+    # for other relations
+    
+    if (rschema.is_final() or rschema.inlined):
+        rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, '
+                       'R eid %%(x)s, X from_entity E, E name %%(name)s'
+                       % rdeftype, {'x': rteid, 'name': str(subjschema)})
+        if rset[0][0] == 0 and not subjschema.eid in pendings:
+            DropColumnOp(session, table=subjschema.type, column=rschema.type)
+    elif lastrel:
+        DropTableOp(session, table='%s_relation' % rschema.type)
+    # if this is the last instance, drop associated relation type
+    if lastrel and not rteid in pendings:
+        execute('DELETE ERType X WHERE X eid %(x)s', {'x': rteid}, 'x')
+    DelErdefOp(session, (subjschema, rschema, objschema))
+
+        
+# addition ####################################################################
+
+class AddEETypeOp(EarlySchemaOperation):
+    """actually add the entity type to the application's schema"""    
+    def commit_event(self):
+        eschema = self.schema.add_entity_type(self.kobj)
+        eschema.eid = self.eid
+        
+def before_add_eetype(session, entity):
+    """before adding a EEType entity:
+    * check that we are not using an existing entity type,
+    """
+    name = entity['name']
+    schema = session.repo.schema
+    if name in schema and schema[name].eid is not None:
+        raise RepositoryError('an entity type %s already exists' % name)
+
+def after_add_eetype(session, entity):
+    """after adding a EEType entity:
+    * create the necessary table
+    * set creation_date and modification_date by creating the necessary
+      EFRDef entities
+    * add owned_by relation by creating the necessary ENFRDef entity
+    * register an operation to add the entity type to the application's
+      schema on commit
+    """
+    if entity.get('final'):
+        return
+    schema = session.repo.schema
+    name = entity['name']
+    etype = EntityType(name=name, description=entity.get('description'),
+                       meta=entity.get('meta')) # don't care about final
+    # fake we add it to the schema now to get a correctly initialized schema
+    # but remove it before doing anything more dangerous...
+    schema = session.repo.schema
+    eschema = schema.add_entity_type(etype)
+    eschema.set_default_groups()
+    # generate table sql and rql to add metadata
+    tablesql = eschema2sql(session.pool.source('system').dbhelper, eschema)
+    relrqls = []
+    for rtype in ('is', 'is_instance_of', 'creation_date', 'modification_date',
+                  'created_by', 'owned_by'):
+        rschema = schema[rtype]
+        sampletype = rschema.subjects()[0]
+        desttype = rschema.objects()[0]
+        props = rschema.rproperties(sampletype, desttype)
+        relrqls += list(ss.rdef2rql(rschema, name, desttype, props))
+    # now remove it !
+    schema.del_entity_type(name)
+    # create the necessary table
+    for sql in tablesql.split(';'):
+        if sql.strip():
+            session.system_sql(sql)
+    # register operation to modify the schema on commit
+    # this have to be done before adding other relations definitions
+    # or permission settings
+    AddEETypeOp(session, etype, eid=entity.eid)
+    # add meta creation_date, modification_date and owned_by relations
+    for rql, kwargs in relrqls:
+        session.execute(rql, kwargs)
+
+
+class AddERTypeOp(EarlySchemaOperation):
+    """actually add the relation type to the application's schema"""    
+    def commit_event(self):
+        rschema = self.schema.add_relation_type(self.kobj)
+        rschema.set_default_groups()
+        rschema.eid = self.eid
+        
+def before_add_ertype(session, entity):
+    """before adding a ERType entity:
+    * check that we are not using an existing relation type,
+    * register an operation to add the relation type to the application's
+      schema on commit
+      
+    We don't know yeat this point if a table is necessary
+    """
+    name = entity['name']
+    if name in session.repo.schema.relations():
+        raise RepositoryError('a relation type %s already exists' % name)
+    
+def after_add_ertype(session, entity):
+    """after a ERType entity has been added:
+    * register an operation to add the relation type to the application's
+      schema on commit
+    We don't know yeat this point if a table is necessary
+    """
+    AddERTypeOp(session, RelationType(name=entity['name'],
+                                      description=entity.get('description'),
+                                      meta=entity.get('meta', False),
+                                      inlined=entity.get('inlined', False),
+                                      symetric=entity.get('symetric', False)),
+                eid=entity.eid)
+
+
+class AddErdefOp(EarlySchemaOperation):
+    """actually add the attribute relation definition to the application's
+    schema
+    """    
+    def commit_event(self):
+        self.schema.add_relation_def(self.kobj)
+
+TYPE_CONVERTER = {
+    'Boolean': bool,
+    'Int': int,
+    'Float': float,
+    'Password': str,
+    'String': unicode,
+    'Date' : unicode, 
+    'Datetime' : unicode,
+    'Time' : unicode,
+    }
+
+
+class AddEFRDefPreCommitOp(PreCommitOperation):
+    """an attribute relation (EFRDef) has been added:
+    * add the necessary column
+    * set default on this column if any and possible
+    * register an operation to add the relation definition to the
+      application's schema on commit
+      
+    constraints are handled by specific hooks
+    """
+    def precommit_event(self):
+        session = self.session
+        entity = self.entity
+        fromentity = entity.from_entity[0]
+        relationtype = entity.relation_type[0]
+        session.execute('SET X ordernum Y+1 WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, X ordernum >= %(order)s, NOT X eid %(x)s',
+                        {'x': entity.eid, 'se': fromentity.eid, 'order': entity.ordernum or 0})
+        subj, rtype = str(fromentity.name), str(relationtype.name)
+        obj = str(entity.to_entity[0].name)
+        # at this point default is a string or None, but we need a correctly
+        # typed value
+        default = entity.defaultval
+        if default is not None:
+            default = TYPE_CONVERTER[obj](default)
+        constraints = get_constraints(session, entity)
+        rdef = RelationDefinition(subj, rtype, obj,
+                                  cardinality=entity.cardinality,
+                                  order=entity.ordernum,
+                                  description=entity.description,
+                                  default=default,
+                                  indexed=entity.indexed,
+                                  fulltextindexed=entity.fulltextindexed,
+                                  internationalizable=entity.internationalizable,
+                                  constraints=constraints,
+                                  eid=entity.eid)
+        sysource = session.pool.source('system')
+        attrtype = _type_from_constraints(sysource.dbhelper, rdef.object,
+                                          constraints)
+        # XXX should be moved somehow into lgc.adbh: sqlite doesn't support to
+        # add a new column with UNIQUE, it should be added after the ALTER TABLE
+        # using ADD INDEX
+        if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype:
+            extra_unique_index = True
+            attrtype = attrtype.replace(' UNIQUE', '')
+        else:
+            extra_unique_index = False
+        # added some str() wrapping query since some backend (eg psycopg) don't
+        # allow unicode queries
+        try:
+            session.system_sql(str('ALTER TABLE %s ADD COLUMN %s %s'
+                                   % (subj, rtype, attrtype)))
+            self.info('added column %s to table %s', rtype, subj)
+        except Exception, ex:
+            # the column probably already exists. this occurs when
+            # the entity's type has just been added or if the column
+            # has not been previously dropped
+            self.error('error while altering table %s: %s', subj, ex)
+        if extra_unique_index or entity.indexed:
+            try:
+                sysource.create_index(session, subj, rtype,
+                                      unique=extra_unique_index)
+            except Exception, ex:
+                self.error('error while creating index for %s.%s: %s',
+                           subj, rtype, ex)
+        # postgres doesn't implement, so do it in two times
+        # ALTER TABLE %s ADD COLUMN %s %s SET DEFAULT %s
+        if default is not None:
+            if isinstance(default, unicode):
+                default = default.encode(sysource.encoding)
+            try:
+                session.system_sql('ALTER TABLE %s ALTER COLUMN %s SET DEFAULT '
+                                   '%%(default)s' % (subj, rtype),
+                                   {'default': default})
+            except Exception, ex:
+                # not supported by sqlite for instance
+                self.error('error while altering table %s: %s', subj, ex)
+            session.system_sql('UPDATE %s SET %s=%%(default)s' % (subj, rtype),
+                               {'default': default})
+        AddErdefOp(session, rdef)
+
+def after_add_efrdef(session, entity):
+    AddEFRDefPreCommitOp(session, entity=entity)
+
+
+class AddENFRDefPreCommitOp(PreCommitOperation):
+    """an actual relation has been added:
+    * if this is an inlined relation, add the necessary column
+      else if it's the first instance of this relation type, add the
+      necessary table and set default permissions
+    * register an operation to add the relation definition to the
+      application's schema on commit
+
+    constraints are handled by specific hooks
+    """
+    def precommit_event(self):
+        session = self.session
+        entity = self.entity
+        fromentity = entity.from_entity[0]
+        relationtype = entity.relation_type[0] 
+        session.execute('SET X ordernum Y+1 WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, X ordernum >= %(order)s, NOT X eid %(x)s',
+                        {'x': entity.eid, 'se': fromentity.eid, 'order': entity.ordernum or 0})
+        subj, rtype = str(fromentity.name), str(relationtype.name)
+        obj = str(entity.to_entity[0].name)
+        card = entity.get('cardinality')
+        rdef = RelationDefinition(subj, rtype, obj,
+                                  cardinality=card,
+                                  order=entity.ordernum,
+                                  composite=entity.composite,
+                                  description=entity.description,
+                                  constraints=get_constraints(session, entity),
+                                  eid=entity.eid)
+        schema = session.repo.schema
+        rschema = schema.rschema(rtype)
+        # this have to be done before permissions setting
+        AddErdefOp(session, rdef)
+        if rschema.inlined:
+            # need to add a column if the relation is inlined and if this is the
+            # first occurence of "Subject relation Something" whatever Something
+            # and if it has not been added during other event of the same
+            # transaction
+            key = '%s.%s' % (subj, rtype)
+            try:
+                alreadythere = bool(rschema.objects(subj))
+            except KeyError:
+                alreadythere = False
+            if not (alreadythere or
+                    key in session.query_data('createdattrs', ())):
+                add_inline_relation_column(session, subj, rtype)
+        else:
+            # need to create the relation if no relation definition in the
+            # schema and if it has not been added during other event of the same
+            # transaction
+            if not (rschema.subjects() or
+                    rtype in session.query_data('createdtables', ())):
+                try:
+                    rschema = schema[rtype]
+                    tablesql = rschema2sql(rschema)
+                except KeyError:
+                    # fake we add it to the schema now to get a correctly
+                    # initialized schema but remove it before doing anything
+                    # more dangerous...
+                    rschema = schema.add_relation_type(rdef)
+                    tablesql = rschema2sql(rschema)
+                    schema.del_relation_type(rtype)
+                # create the necessary table
+                for sql in tablesql.split(';'):
+                    if sql.strip():
+                        self.session.system_sql(sql)
+                session.add_query_data('createdtables', rtype)
+                
+def after_add_enfrdef(session, entity):
+    AddENFRDefPreCommitOp(session, entity=entity)
+
+
+# update ######################################################################
+
+def check_valid_changes(session, entity, ro_attrs=('name', 'final')):
+    errors = {}
+    # don't use getattr(entity, attr), we would get the modified value if any
+    for attr in ro_attrs:
+        origval = entity_attr(session, entity.eid, attr)
+        if entity.get(attr, origval) != origval:
+            errors[attr] = session._("can't change the %s attribute") % \
+                           display_name(session, attr)
+    if errors:
+        raise ValidationError(entity.eid, errors)
+
+def before_update_eetype(session, entity):
+    """check name change, handle final"""
+    check_valid_changes(session, entity, ro_attrs=('final',))
+    # don't use getattr(entity, attr), we would get the modified value if any
+    oldname = entity_attr(session, entity.eid, 'name')
+    newname = entity.get('name', oldname)
+    if newname.lower() != oldname.lower():
+        eschema = session.repo.schema[oldname]
+        UpdateEntityTypeName(session, eschema=eschema,
+                             oldname=oldname, newname=newname)
+
+def before_update_ertype(session, entity):
+    """check name change, handle final"""
+    check_valid_changes(session, entity)
+
+
+class UpdateEntityTypeName(SchemaOperation):
+    """this operation updates physical storage accordingly"""
+
+    def precommit_event(self):
+        # we need sql to operate physical changes on the system database
+        sqlexec = self.session.system_sql
+        sqlexec('ALTER TABLE %s RENAME TO %s' % (self.oldname, self.newname))
+        self.info('renamed table %s to %s', self.oldname, self.newname)
+        sqlexec('UPDATE entities SET type=%s WHERE type=%s',
+                (self.newname, self.oldname))
+        sqlexec('UPDATE deleted_entities SET type=%s WHERE type=%s',
+                (self.newname, self.oldname))
+        
+    def commit_event(self):
+        self.session.repo.schema.rename_entity_type(self.oldname, self.newname)
+
+
+class UpdateRdefOp(SchemaOperation):
+    """actually update some properties of a relation definition"""
+
+    def precommit_event(self):
+        if 'indexed' in self.values:
+            sysource = self.session.pool.source('system')
+            table, column = self.kobj[0], self.rschema.type
+            if self.values['indexed']:
+                sysource.create_index(self.session, table, column)
+            else:
+                sysource.drop_index(self.session, table, column)
+                
+    def commit_event(self):
+        # structure should be clean, not need to remove entity's relations
+        # at this point
+        self.rschema._rproperties[self.kobj].update(self.values)
+    
+def after_update_erdef(session, entity):
+    desttype = entity.to_entity[0].name
+    rschema = session.repo.schema[entity.relation_type[0].name]
+    newvalues = {}
+    for prop in rschema.rproperty_defs(desttype):
+        if prop == 'constraints':
+            continue
+        if prop == 'order':
+            prop = 'ordernum'
+        if prop in entity:
+            newvalues[prop] = entity[prop]
+    if newvalues:
+        subjtype = entity.from_entity[0].name
+        UpdateRdefOp(session, (subjtype, desttype), rschema=rschema,
+                     values=newvalues)
+
+
+class UpdateRtypeOp(SchemaOperation):
+    """actually update some properties of a relation definition"""    
+    def precommit_event(self):
+        session = self.session
+        rschema = self.rschema
+        if rschema.is_final() or not 'inlined' in self.values:
+            return # nothing to do
+        inlined = self.values['inlined']
+        entity = self.entity
+        if not entity.inlined_changed(inlined): # check in-lining is necessary/possible
+            return # nothing to do
+        # inlined changed, make necessary physical changes!
+        sqlexec = self.session.system_sql
+        rtype = rschema.type
+        if not inlined:
+            # need to create the relation if it has not been already done by another
+            # event of the same transaction
+            if not rschema.type in session.query_data('createdtables', ()):
+                tablesql = rschema2sql(rschema)
+                # create the necessary table
+                for sql in tablesql.split(';'):
+                    if sql.strip():
+                        sqlexec(sql)
+                session.add_query_data('createdtables', rschema.type)
+            # copy existant data
+            for etype in rschema.subjects():
+                sqlexec('INSERT INTO %s_relation SELECT eid, %s FROM %s WHERE NOT %s IS NULL'
+                        % (rtype, rtype, etype, rtype))
+            # drop existant columns
+            for etype in rschema.subjects():
+                DropColumnOp(session, table=str(etype), column=rtype)
+        else:
+            for etype in rschema.subjects():
+                try:
+                    add_inline_relation_column(session, str(etype), rtype)                    
+                except Exception, ex:
+                    # the column probably already exists. this occurs when
+                    # the entity's type has just been added or if the column
+                    # has not been previously dropped
+                    self.error('error while altering table %s: %s', etype, ex)
+                # copy existant data. 
+                # XXX don't use, it's not supported by sqlite (at least at when i tried it)
+                #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to '
+                #        'FROM %(rtype)s_relation '
+                #        'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from'
+                #        % locals())
+                cursor = sqlexec('SELECT eid_from, eid_to FROM %(etype)s, '
+                                 '%(rtype)s_relation WHERE %(etype)s.eid='
+                                 '%(rtype)s_relation.eid_from' % locals())
+                args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()]
+                if args:
+                    cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE eid=%%(x)s'
+                                       % (etype, rtype), args)
+                # drop existant table
+                DropTableOp(session, table='%s_relation' % rtype)
+
+    def commit_event(self):
+        # structure should be clean, not need to remove entity's relations
+        # at this point
+        self.rschema.__dict__.update(self.values)
+    
+def after_update_ertype(session, entity):
+    rschema = session.repo.schema.rschema(entity.name)
+    newvalues = {}
+    for prop in ('meta', 'symetric', 'inlined'):
+        if prop in entity:
+            newvalues[prop] = entity[prop]
+    if newvalues:
+        UpdateRtypeOp(session, entity=entity, rschema=rschema, values=newvalues)
+
+# constraints synchronization #################################################
+
+from cubicweb.schema import CONSTRAINTS
+
+class ConstraintOp(SchemaOperation):
+    """actually update constraint of a relation definition"""
+    def prepare_constraints(self, rtype, subjtype, objtype):
+        constraints = rtype.rproperty(subjtype, objtype, 'constraints')
+        self.constraints = list(constraints)
+        rtype.set_rproperty(subjtype, objtype, 'constraints', self.constraints)
+        return self.constraints
+    
+    def precommit_event(self):
+        rdef = self.entity.reverse_constrained_by[0]
+        session = self.session
+        # when the relation is added in the same transaction, the constraint object
+        # is created by AddEN?FRDefPreCommitOp, there is nothing to do here
+        if rdef.eid in session.query_data('neweids', ()):
+            self.cancelled = True
+            return 
+        self.cancelled = False
+        schema = session.repo.schema
+        subjtype, rtype, objtype = schema.schema_by_eid(rdef.eid)
+        self.prepare_constraints(rtype, subjtype, objtype)
+        cstrtype = self.entity.type
+        self.cstr = rtype.constraint_by_type(subjtype, objtype, cstrtype)
+        self._cstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value)
+        self._cstr.eid = self.entity.eid
+        # alter the physical schema on size constraint changes
+        if self._cstr.type() == 'SizeConstraint' and (
+            self.cstr is None or self.cstr.max != self._cstr.max):
+            try:
+                session.system_sql('ALTER TABLE %s ALTER COLUMN %s TYPE VARCHAR(%s)'
+                                   % (subjtype, rtype, self._cstr.max))
+                self.info('altered column %s of table %s: now VARCHAR(%s)',
+                          rtype, subjtype, self._cstr.max)
+            except Exception, ex:
+                # not supported by sqlite for instance
+                self.error('error while altering table %s: %s', subjtype, ex)
+        elif cstrtype == 'UniqueConstraint':
+            session.pool.source('system').create_index(
+                self.session, str(subjtype), str(rtype), unique=True)
+        
+    def commit_event(self):
+        if self.cancelled:
+            return
+        # in-place removing
+        if not self.cstr is None:
+            self.constraints.remove(self.cstr)
+        self.constraints.append(self._cstr)
+
+def after_add_econstraint(session, entity):
+    ConstraintOp(session, entity=entity)
+
+def after_update_econstraint(session, entity):
+    ConstraintOp(session, entity=entity)
+
+class DelConstraintOp(ConstraintOp):
+    """actually remove a constraint of a relation definition"""
+    
+    def precommit_event(self):
+        self.prepare_constraints(self.rtype, self.subjtype, self.objtype)
+        cstrtype = self.cstr.type()
+        # alter the physical schema on size/unique constraint changes
+        if cstrtype == 'SizeConstraint':
+            try:
+                self.session.system_sql('ALTER TABLE %s ALTER COLUMN %s TYPE TEXT'
+                                        % (self.subjtype, self.rtype))
+                self.info('altered column %s of table %s: now TEXT', 
+                          self.rtype,  self.subjtype)
+            except Exception, ex:
+                # not supported by sqlite for instance
+                self.error('error while altering table %s: %s', 
+                           self.subjtype, ex)
+        elif cstrtype == 'UniqueConstraint':
+            self.session.pool.source('system').drop_index(
+                self.session, str(self.subjtype), str(self.rtype), unique=True)
+                
+    def commit_event(self):
+        self.constraints.remove(self.cstr)
+
+
+def before_delete_constrained_by(session, fromeid, rtype, toeid):
+    if not fromeid in session.query_data('pendingeids', ()):
+        schema = session.repo.schema
+        entity = session.eid_rset(toeid).get_entity(0, 0)
+        subjtype, rtype, objtype = schema.schema_by_eid(fromeid)
+        try:
+            cstr = rtype.constraint_by_type(subjtype, objtype, entity.cstrtype[0].name)
+            DelConstraintOp(session, subjtype=subjtype, rtype=rtype, objtype=objtype,
+                            cstr=cstr)
+        except IndexError:
+            session.critical('constraint type no more accessible')
+
+
+def after_add_constrained_by(session, fromeid, rtype, toeid):
+    if fromeid in session.query_data('neweids', ()):
+        session.add_query_data(fromeid, toeid)
+
+    
+# schema permissions synchronization ##########################################
+
+class PermissionOp(Operation):
+    """base class to synchronize schema permission definitions"""
+    def __init__(self, session, perm, etype_eid):
+        self.perm = perm
+        try:
+            self.name = entity_name(session, etype_eid)
+        except IndexError:
+            self.error('changing permission of a no more existant type #%s',
+                etype_eid)
+        else:
+            Operation.__init__(self, session)
+
+class AddGroupPermissionOp(PermissionOp):
+    """synchronize schema when a *_permission relation has been added on a group
+    """
+    def __init__(self, session, perm, etype_eid, group_eid):
+        self.group = entity_name(session, group_eid)
+        PermissionOp.__init__(self, session, perm, etype_eid)
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            erschema = self.schema[self.name]
+        except KeyError:
+            # duh, schema not found, log error and skip operation
+            self.error('no schema for %s', self.name)
+            return
+        groups = list(erschema.get_groups(self.perm))
+        try:            
+            groups.index(self.group)
+            self.warning('group %s already have permission %s on %s',
+                         self.group, self.perm, erschema.type)
+        except ValueError:
+            groups.append(self.group)
+            erschema.set_groups(self.perm, groups)
+
+class AddRQLExpressionPermissionOp(PermissionOp):
+    """synchronize schema when a *_permission relation has been added on a rql
+    expression
+    """
+    def __init__(self, session, perm, etype_eid, expression):
+        self.expr = expression
+        PermissionOp.__init__(self, session, perm, etype_eid)
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            erschema = self.schema[self.name]
+        except KeyError:
+            # duh, schema not found, log error and skip operation
+            self.error('no schema for %s', self.name)
+            return
+        exprs = list(erschema.get_rqlexprs(self.perm))
+        exprs.append(erschema.rql_expression(self.expr))
+        erschema.set_rqlexprs(self.perm, exprs)
+
+def after_add_permission(session, subject, rtype, object):
+    """added entity/relation *_permission, need to update schema"""
+    perm = rtype.split('_', 1)[0]
+    if session.describe(object)[0] == 'EGroup':
+        AddGroupPermissionOp(session, perm, subject, object)
+    else: # RQLExpression
+        expr = session.execute('Any EXPR WHERE X eid %(x)s, X expression EXPR',
+                               {'x': object}, 'x')[0][0]
+        AddRQLExpressionPermissionOp(session, perm, subject, expr)
+    
+
+        
+class DelGroupPermissionOp(AddGroupPermissionOp):
+    """synchronize schema when a *_permission relation has been deleted from a group"""
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            erschema = self.schema[self.name]
+        except KeyError:
+            # duh, schema not found, log error and skip operation
+            self.error('no schema for %s', self.name)
+            return
+        groups = list(erschema.get_groups(self.perm))
+        try:            
+            groups.remove(self.group)
+            erschema.set_groups(self.perm, groups)
+        except ValueError:
+            self.error('can\'t remove permission %s on %s to group %s',
+                self.perm, erschema.type, self.group)
+
+        
+class DelRQLExpressionPermissionOp(AddRQLExpressionPermissionOp):
+    """synchronize schema when a *_permission relation has been deleted from an rql expression"""
+        
+    def commit_event(self):
+        """the observed connections pool has been commited"""
+        try:
+            erschema = self.schema[self.name]
+        except KeyError:
+            # duh, schema not found, log error and skip operation
+            self.error('no schema for %s', self.name)
+            return
+        rqlexprs = list(erschema.get_rqlexprs(self.perm))
+        for i, rqlexpr in enumerate(rqlexprs):
+            if rqlexpr.expression == self.expr:
+                rqlexprs.pop(i)
+                break
+        else:
+            self.error('can\'t remove permission %s on %s for expression %s',
+                self.perm, erschema.type, self.expr)
+            return
+        erschema.set_rqlexprs(self.perm, rqlexprs)
+
+                
+def before_del_permission(session, subject, rtype, object):
+    """delete entity/relation *_permission, need to update schema
+
+    skip the operation if the related type is being deleted
+    """
+    if subject in session.query_data('pendingeids', ()):
+        return
+    perm = rtype.split('_', 1)[0]
+    if session.describe(object)[0] == 'EGroup':
+        DelGroupPermissionOp(session, perm, subject, object)
+    else: # RQLExpression
+        expr = session.execute('Any EXPR WHERE X eid %(x)s, X expression EXPR',
+                               {'x': object}, 'x')[0][0]
+        DelRQLExpressionPermissionOp(session, perm, subject, expr)
+
+
+def rebuild_infered_relations(session, subject, rtype, object):
+    # registering a schema operation will trigger a call to
+    # repo.set_schema() on commit which will in turn rebuild
+    # infered relation definitions
+    UpdateSchemaOp(session)
+
+
+def _register_schema_hooks(hm):
+    """register schema related hooks on the hooks manager"""
+    # schema synchronisation #####################
+    # before/after add
+    hm.register_hook(before_add_eetype, 'before_add_entity', 'EEType')
+    hm.register_hook(before_add_ertype, 'before_add_entity', 'ERType')
+    hm.register_hook(after_add_eetype, 'after_add_entity', 'EEType')
+    hm.register_hook(after_add_ertype, 'after_add_entity', 'ERType')
+    hm.register_hook(after_add_efrdef, 'after_add_entity', 'EFRDef')
+    hm.register_hook(after_add_enfrdef, 'after_add_entity', 'ENFRDef')
+    # before/after update
+    hm.register_hook(before_update_eetype, 'before_update_entity', 'EEType')
+    hm.register_hook(before_update_ertype, 'before_update_entity', 'ERType')
+    hm.register_hook(after_update_ertype, 'after_update_entity', 'ERType')
+    hm.register_hook(after_update_erdef, 'after_update_entity', 'EFRDef')
+    hm.register_hook(after_update_erdef, 'after_update_entity', 'ENFRDef')
+    # before/after delete
+    hm.register_hook(before_del_eetype, 'before_delete_entity', 'EEType')
+    hm.register_hook(after_del_eetype, 'after_delete_entity', 'EEType')
+    hm.register_hook(before_del_ertype, 'before_delete_entity', 'ERType')
+    hm.register_hook(after_del_relation_type, 'after_delete_relation', 'relation_type')
+    hm.register_hook(rebuild_infered_relations, 'after_add_relation', 'specializes')
+    hm.register_hook(rebuild_infered_relations, 'after_delete_relation', 'specializes')    
+    # constraints synchronization hooks
+    hm.register_hook(after_add_econstraint, 'after_add_entity', 'EConstraint')
+    hm.register_hook(after_update_econstraint, 'after_update_entity', 'EConstraint')
+    hm.register_hook(before_delete_constrained_by, 'before_delete_relation', 'constrained_by')
+    hm.register_hook(after_add_constrained_by, 'after_add_relation', 'constrained_by')
+    # permissions synchronisation ################
+    for perm in ('read_permission', 'add_permission',
+                 'delete_permission', 'update_permission'):
+        hm.register_hook(after_add_permission, 'after_add_relation', perm)
+        hm.register_hook(before_del_permission, 'before_delete_relation', perm)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/schemaserial.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,491 @@
+"""functions for schema / permissions (de)serialization using RQL
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from itertools import chain
+
+from logilab.common.shellutils import ProgressBar
+
+from yams import schema as schemamod, buildobjs as ybo
+
+from cubicweb.schema import CONSTRAINTS, ETYPE_NAME_MAP
+
+def group_mapping(cursor, interactive=True):
+    """create a group mapping from an rql cursor
+
+    A group mapping has standard group names as key (managers, owners at least)
+    and the actual EGroup entity's eid as associated value.
+    In interactive mode (the default), missing groups'eid will be prompted
+    from the user.
+    """
+    res = {}
+    for eid, name in cursor.execute('Any G, N WHERE G is EGroup, G name N'):
+        res[name] = eid
+    if not interactive:
+        return res
+    missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res]
+    if missing:
+        print 'some native groups are missing but the following groups have been found:'
+        print '\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())
+        print 
+        print 'enter the eid of a to group to map to each missing native group'
+        print 'or just type enter to skip permissions granted to a group'
+        for group in missing:
+            while True:
+                value = raw_input('eid for group %s: ' % group).strip()
+                if not value:
+                    continue
+                try:
+                    res[group] = int(value)
+                except ValueError:
+                    print 'eid should be an integer'
+                    continue
+    return res
+
+# schema / perms deserialization ##############################################
+
+def deserialize_schema(schema, session):
+    """return a schema according to information stored in an rql database
+    as ERType and EEType entities
+    """
+    # print 'reading schema from the database...'
+    index = {}
+    permsdict = deserialize_ertype_permissions(session)
+    schema.reading_from_database = True
+    for eid, etype, desc, meta in session.execute('Any X, N, D, M WHERE '
+                                                  'X is EEType, X name N, '
+                                                  'X description D, X meta M',
+                                                  build_descr=False):
+        # base types are already in the schema, skip them
+        if etype in schemamod.BASE_TYPES:
+            # just set the eid
+            eschema = schema.eschema(etype)
+            eschema.eid = eid
+            index[eid] = eschema
+            continue
+        if etype in ETYPE_NAME_MAP: # XXX <2.45 bw compat
+            print 'fixing etype name from %s to %s' % (etype, ETYPE_NAME_MAP[etype])
+            # can't use write rql queries at this point, use raw sql
+            session.system_sql('UPDATE EEType SET name=%(n)s WHERE eid=%(x)s',
+                               {'x': eid, 'n': ETYPE_NAME_MAP[etype]})
+            session.system_sql('UPDATE entities SET type=%(n)s WHERE type=%(x)s',
+                               {'x': etype, 'n': ETYPE_NAME_MAP[etype]})
+            session.commit(False)
+            try:
+                session.system_sql('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s',
+                                   {'x': etype, 'n': ETYPE_NAME_MAP[etype]})
+            except:
+                pass
+            tocleanup = [eid]
+            tocleanup += (eid for eid, (eidetype, uri, extid) in session.repo._type_source_cache.items()
+                          if etype == eidetype)
+            session.repo.clear_caches(tocleanup)
+            session.commit(False)
+            etype = ETYPE_NAME_MAP[etype]
+        etype = ybo.EntityType(name=etype, description=desc, meta=meta, eid=eid)
+        eschema = schema.add_entity_type(etype)
+        index[eid] = eschema
+        set_perms(eschema, permsdict.get(eid, {}))
+    try:
+        rset = session.execute('Any XN, ETN WHERE X is EEType, X name XN, '
+                               'X specializes ET, ET name ETN')
+    except: # `specializes` relation not available for versions prior to 2.50
+        session.rollback(False)
+    else:
+        for etype, stype in rset:
+            eschema = schema.eschema(etype)
+            seschema = schema.eschema(stype)
+            eschema._specialized_type = stype
+            seschema._specialized_by.append(etype)
+    for eid, rtype, desc, meta, sym, il in session.execute(
+        'Any X,N,D,M,S,I WHERE X is ERType, X name N, X description D, '
+        'X meta M, X symetric S, X inlined I', build_descr=False):
+        try:
+            # bw compat: fulltext_container added in 2.47
+            ft_container = session.execute('Any FTC WHERE X eid %(x)s, X fulltext_container FTC',
+                                           {'x': eid}).rows[0][0]
+        except:
+            ft_container = None
+            session.rollback(False)
+        rtype = ybo.RelationType(name=rtype, description=desc, meta=bool(meta),
+                                 symetric=bool(sym), inlined=bool(il),
+                                 fulltext_container=ft_container, eid=eid)
+        rschema = schema.add_relation_type(rtype)
+        index[eid] = rschema
+        set_perms(rschema, permsdict.get(eid, {}))        
+    cstrsdict = deserialize_rdef_constraints(session)
+    for values in session.execute(
+        'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is EFRDef,'
+        'X relation_type RT, X cardinality CARD, X ordernum ORD, X indexed IDX,'
+        'X description DESC, X internationalizable I18N, X defaultval DFLT,'
+        'X fulltextindexed FTIDX, X from_entity SE, X to_entity OE',
+        build_descr=False):
+        rdefeid, seid, reid, teid, card, ord, desc, idx, ftidx, i18n, default = values
+        constraints = cstrsdict.get(rdefeid, ())
+        frometype = index[seid].type
+        rtype = index[reid].type
+        toetype = index[teid].type
+        rdef = ybo.RelationDefinition(frometype, rtype, toetype, cardinality=card,
+                                  order=ord, description=desc, 
+                                  constraints=constraints,
+                                  indexed=idx, fulltextindexed=ftidx,
+                                  internationalizable=i18n,
+                                  default=default, eid=rdefeid)
+        schema.add_relation_def(rdef)
+    for values in session.execute(
+        'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is ENFRDef, X relation_type RT,'
+        'X cardinality CARD, X ordernum ORD, X description DESC, '
+        'X from_entity SE, X to_entity OE, X composite C', build_descr=False):
+        rdefeid, seid, reid, teid, card, ord, desc, c = values
+        frometype = index[seid].type
+        rtype = index[reid].type
+        toetype = index[teid].type
+        constraints = cstrsdict.get(rdefeid, ())
+        rdef = ybo.RelationDefinition(frometype, rtype, toetype, cardinality=card,
+                                  order=ord, description=desc, 
+                                  composite=c, constraints=constraints,
+                                  eid=rdefeid)
+        schema.add_relation_def(rdef)
+    schema.infer_specialization_rules()
+    session.commit()
+    schema.reading_from_database = False
+
+
+def deserialize_ertype_permissions(session):
+    """return sect action:groups associations for the given
+    entity or relation schema with its eid, according to schema's
+    permissions stored in the database as [read|add|delete|update]_permission
+    relations between EEType/ERType and EGroup entities
+    """
+    res = {}
+    for action in ('read', 'add', 'update', 'delete'):
+        rql = 'Any E,N WHERE G is EGroup, G name N, E %s_permission G' % action
+        for eid, gname in session.execute(rql, build_descr=False):
+            res.setdefault(eid, {}).setdefault(action, []).append(gname)
+        rql = ('Any E,X,EXPR,V WHERE X is RQLExpression, X expression EXPR, '
+               'E %s_permission X, X mainvars V' % action)
+        for eid, expreid, expr, mainvars in session.execute(rql, build_descr=False):
+            # we don't know yet if it's a rql expr for an entity or a relation,
+            # so append a tuple to differentiate from groups and so we'll be
+            # able to instantiate it later
+            res.setdefault(eid, {}).setdefault(action, []).append( (expr, mainvars, expreid) )
+    return res
+
+def set_perms(erschema, permsdict):
+    """set permissions on the given erschema according to the permission
+    definition dictionary as built by deserialize_ertype_permissions for a
+    given erschema's eid
+    """
+    for action in erschema.ACTIONS:
+        actperms = []
+        for something in permsdict.get(action, ()):
+            if isinstance(something, tuple):
+                actperms.append(erschema.rql_expression(*something))
+            else: # group name
+                actperms.append(something)
+        erschema.set_permissions(action, actperms)            
+
+
+def deserialize_rdef_constraints(session):
+    """return the list of relation definition's constraints as instances"""
+    res = {}
+    for rdefeid, ceid, ct, val in session.execute(
+        'Any E, X,TN,V WHERE E constrained_by X, X is EConstraint, '
+        'X cstrtype T, T name TN, X value V', build_descr=False):
+        cstr = CONSTRAINTS[ct].deserialize(val)
+        cstr.eid = ceid
+        res.setdefault(rdefeid, []).append(cstr)
+    return res
+        
+        
+# schema / perms serialization ################################################
+
+def serialize_schema(cursor, schema, verbose=False):
+    """synchronize schema and permissions in the database according to
+    current schema
+    """
+    print 'serializing the schema, this may take some time'
+    eschemas = schema.entities()
+    aller = eschemas + schema.relations()
+    if not verbose:
+        pb_size = len(aller) + len(CONSTRAINTS) + len([x for x in eschemas if x.specializes()])
+        pb = ProgressBar(pb_size)
+    for cstrtype in CONSTRAINTS:
+        rql = 'INSERT EConstraintType X: X name "%s"' % cstrtype
+        if verbose:
+            print rql
+        cursor.execute(rql)
+        if not verbose:
+            pb.update()
+    groupmap = group_mapping(cursor, interactive=False)
+    for ertype in aller:
+        # skip eid and has_text relations
+        if ertype in ('eid', 'identity', 'has_text',):
+            pb.update()
+            continue
+        for rql, kwargs in erschema2rql(schema[ertype]):
+            if verbose:
+                print rql % kwargs
+            cursor.execute(rql, kwargs)
+        for rql, kwargs in erperms2rql(schema[ertype], groupmap):
+            if verbose:
+                print rql
+            cursor.execute(rql, kwargs)
+        if not verbose:
+            pb.update()
+    for rql, kwargs in specialize2rql(schema):
+        if verbose:
+            print rql % kwargs
+        cursor.execute(rql, kwargs)
+        if not verbose:
+            pb.update()
+    print
+
+
+def _ervalues(erschema):
+    try:
+        type_ = unicode(erschema.type)
+    except UnicodeDecodeError, e:
+        raise Exception("can't decode %s [was %s]" % (erschema.type, e))
+    try:
+        desc = unicode(erschema.description) or u''
+    except UnicodeDecodeError, e:
+        raise Exception("can't decode %s [was %s]" % (erschema.description, e))
+    return {
+        'name': type_,
+        'meta': erschema.meta,
+        'final': erschema.is_final(),
+        'description': desc,
+        }
+
+def eschema_relations_values(eschema):
+    values = _ervalues(eschema)
+    relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+    return relations, values
+
+# XXX 2.47 migration
+HAS_FULLTEXT_CONTAINER = True
+
+def rschema_relations_values(rschema):
+    values = _ervalues(rschema)
+    values['final'] = rschema.is_final()
+    values['symetric'] = rschema.symetric
+    values['inlined'] = rschema.inlined
+    if HAS_FULLTEXT_CONTAINER:
+        if isinstance(rschema.fulltext_container, str):
+            values['fulltext_container'] = unicode(rschema.fulltext_container)
+        else:
+            values['fulltext_container'] = rschema.fulltext_container
+    relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+    return relations, values
+
+def _rdef_values(rschema, objtype, props):
+    amap = {'order': 'ordernum'}
+    values = {}
+    for prop, default in rschema.rproperty_defs(objtype).iteritems():
+        if prop in ('eid', 'constraints', 'uid', 'infered'):
+            continue
+        value = props.get(prop, default)
+        if prop in ('indexed', 'fulltextindexed', 'internationalizable'):
+            value = bool(value)
+        elif prop == 'ordernum':
+            value = int(value)
+        elif isinstance(value, str):
+            value = unicode(value)
+        values[amap.get(prop, prop)] = value
+    return values
+    
+def nfrdef_relations_values(rschema, objtype, props):
+    values = _rdef_values(rschema, objtype, props)
+    relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+    return relations, values
+    
+def frdef_relations_values(rschema, objtype, props):
+    values = _rdef_values(rschema, objtype, props)
+    default = values['default']
+    del values['default']
+    if default is not None:
+        if default is False:
+            default = u''
+        elif not isinstance(default, unicode):
+            default = unicode(default)
+    values['defaultval'] = default
+    relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+    return relations, values
+
+    
+def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None):
+    if subjtype is None:
+        assert objtype is None
+        assert props is None
+        targets = rschema.iter_rdefs()
+    else:
+        assert not objtype is None
+        targets = [(subjtype, objtype)]
+    for subjtype, objtype in targets:
+        if props is None:
+            _props = rschema.rproperties(subjtype, objtype)
+        else:
+            _props = props
+        # don't serialize infered relations
+        if _props.get('infered'):
+            continue
+        gen = genmap[rschema.is_final()]
+        for rql, values in gen(rschema, subjtype, objtype, _props):
+            yield rql, values
+
+
+def schema2rql(schema, skip=None, allow=None):
+    """return a list of rql insert statements to enter the schema in the
+    database as ERType and EEType entities
+    """
+    assert not (skip is not None and allow is not None), \
+           'can\'t use both skip and allow'
+    all = schema.entities() + schema.relations()
+    if skip is not None:
+        return chain(*[erschema2rql(schema[t]) for t in all if not t in skip])
+    elif allow is not None:
+        return chain(*[erschema2rql(schema[t]) for t in all if t in allow])
+    return chain(*[erschema2rql(schema[t]) for t in all])
+        
+def erschema2rql(erschema):
+    if isinstance(erschema, schemamod.EntitySchema):
+        return eschema2rql(erschema)
+    return rschema2rql(erschema)
+
+def eschema2rql(eschema):
+    """return a list of rql insert statements to enter an entity schema
+    in the database as an EEType entity
+    """
+    relations, values = eschema_relations_values(eschema)
+    # NOTE: 'specializes' relation can't be inserted here since there's no
+    # way to make sure the parent type is inserted before the child type
+    yield 'INSERT EEType X: %s' % ','.join(relations) , values
+
+def specialize2rql(schema):
+    for eschema in schema.entities():
+        for rql, kwargs in eschemaspecialize2rql(eschema):
+            yield rql, kwargs
+
+def eschemaspecialize2rql(eschema):
+    specialized_type = eschema.specializes()
+    if specialized_type:
+        values = {'x': eschema.type, 'et': specialized_type.type}
+        yield 'SET X specializes ET WHERE X name %(x)s, ET name %(et)s', values
+
+def rschema2rql(rschema, addrdef=True):
+    """return a list of rql insert statements to enter a relation schema
+    in the database as an ERType entity
+    """
+    if rschema.type == 'has_text':
+        return
+    relations, values = rschema_relations_values(rschema)
+    yield 'INSERT ERType X: %s' % ','.join(relations), values
+    if addrdef:
+        for rql, values in rdef2rql(rschema):
+            yield rql, values
+            
+def rdef2rql(rschema, subjtype=None, objtype=None, props=None):
+    genmap = {True: frdef2rql, False: nfrdef2rql}
+    return __rdef2rql(genmap, rschema, subjtype, objtype, props)
+
+
+_LOCATE_RDEF_RQL0 = 'X relation_type ER,X from_entity SE,X to_entity OE'
+_LOCATE_RDEF_RQL1 = 'SE name %(se)s,ER name %(rt)s,OE name %(oe)s'
+
+def frdef2rql(rschema, subjtype, objtype, props):
+    relations, values = frdef_relations_values(rschema, objtype, props)
+    relations.append(_LOCATE_RDEF_RQL0)
+    values.update({'se': str(subjtype), 'rt': str(rschema), 'oe': str(objtype)})
+    yield 'INSERT EFRDef X: %s WHERE %s' % (','.join(relations), _LOCATE_RDEF_RQL1), values
+    for rql, values in rdefrelations2rql(rschema, subjtype, objtype, props):
+        yield rql + ', EDEF is EFRDef', values
+            
+def nfrdef2rql(rschema, subjtype, objtype, props):
+    relations, values = nfrdef_relations_values(rschema, objtype, props)
+    relations.append(_LOCATE_RDEF_RQL0)
+    values.update({'se': str(subjtype), 'rt': str(rschema), 'oe': str(objtype)})
+    yield 'INSERT ENFRDef X: %s WHERE %s' % (','.join(relations), _LOCATE_RDEF_RQL1), values
+    for rql, values in rdefrelations2rql(rschema, subjtype, objtype, props):
+        yield rql + ', EDEF is ENFRDef', values
+                
+def rdefrelations2rql(rschema, subjtype, objtype, props):
+    iterators = []
+    for constraint in props['constraints']:
+        iterators.append(constraint2rql(rschema, subjtype, objtype, constraint))
+    return chain(*iterators)
+
+def constraint2rql(rschema, subjtype, objtype, constraint):
+    values = {'ctname': unicode(constraint.type()),
+              'value': unicode(constraint.serialize()),
+              'rt': str(rschema), 'se': str(subjtype), 'oe': str(objtype)}
+    yield 'INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \
+CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, \
+ER name %(rt)s, SE name %(se)s, OE name %(oe)s', values
+
+def perms2rql(schema, groupmapping):
+    """return rql insert statements to enter the schema's permissions in
+    the database as [read|add|delete|update]_permission relations between
+    EEType/ERType and EGroup entities
+
+    groupmapping is a dictionnary mapping standard group names to
+    eids
+    """
+    for etype in sorted(schema.entities()):
+        yield erperms2rql(schema[etype], groupmapping)
+    for rtype in sorted(schema.relations()):
+        yield erperms2rql(schema[rtype], groupmapping)
+
+def erperms2rql(erschema, groupmapping):
+    """return rql insert statements to enter the entity or relation
+    schema's permissions in the database as
+    [read|add|delete|update]_permission relations between EEType/ERType
+    and EGroup entities
+    """
+    etype = isinstance(erschema, schemamod.EntitySchema) and 'EEType' or 'ERType'
+    for action in erschema.ACTIONS:
+        for group in sorted(erschema.get_groups(action)):
+            try:
+                yield ('SET X %s_permission Y WHERE X is %s, X name "%s", Y eid %s'
+                       % (action, etype, erschema, groupmapping[group]), None)
+            except KeyError:
+                continue
+        for rqlexpr in sorted(erschema.get_rqlexprs(action)):
+            yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, '
+                   'E mainvars %%(v)s, X %s_permission E '
+                   'WHERE X is %s, X name "%s"' % (action, etype, erschema),
+                   {'e': unicode(rqlexpr.expression), 'v': unicode(rqlexpr.mainvars),
+                    't': unicode(rqlexpr.__class__.__name__)})
+
+
+def updateeschema2rql(eschema):
+    relations, values = eschema_relations_values(eschema)
+    values['et'] = eschema.type
+    yield 'SET %s WHERE X is EEType, X name %%(et)s' % ','.join(relations), values
+
+def updaterschema2rql(rschema):
+    relations, values = rschema_relations_values(rschema)
+    values['rt'] = rschema.type
+    yield 'SET %s WHERE X is ERType, X name %%(rt)s' % ','.join(relations), values
+            
+def updaterdef2rql(rschema, subjtype=None, objtype=None, props=None):
+    genmap = {True: updatefrdef2rql, False: updatenfrdef2rql}
+    return __rdef2rql(genmap, rschema, subjtype, objtype, props)
+
+def updatefrdef2rql(rschema, subjtype, objtype, props):
+    relations, values = frdef_relations_values(rschema, objtype, props)
+    values.update({'se': subjtype, 'rt': str(rschema), 'oe': objtype})
+    yield 'SET %s WHERE %s, %s, X is EFRDef' % (','.join(relations),
+                                                 _LOCATE_RDEF_RQL0,
+                                                 _LOCATE_RDEF_RQL1), values
+            
+def updatenfrdef2rql(rschema, subjtype, objtype, props):
+    relations, values = nfrdef_relations_values(rschema, objtype, props)
+    values.update({'se': subjtype, 'rt': str(rschema), 'oe': objtype})
+    yield 'SET %s WHERE %s, %s, X is ENFRDef' % (','.join(relations),
+                                                 _LOCATE_RDEF_RQL0,
+                                                 _LOCATE_RDEF_RQL1), values
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/securityhooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,91 @@
+"""Security hooks: check permissions to add/delete/update entities according to
+the user connected to a session
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb import Unauthorized
+from cubicweb.server.pool import LateOperation
+from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS
+
+def check_entity_attributes(session, entity):
+    eid = entity.eid
+    eschema = entity.e_schema
+    # ._default_set is only there on entity creation to indicate unspecified
+    # attributes which has been set to a default value defined in the schema
+    defaults = getattr(entity, '_default_set', ())
+    for attr in entity.keys():
+        if attr in defaults:
+            continue
+        rschema = eschema.subject_relation(attr)
+        if rschema.is_final(): # non final relation are checked by other hooks
+            # add/delete should be equivalent (XXX: unify them into 'update' ?)
+            rschema.check_perm(session, 'add', eid)
+            
+    
+class CheckEntityPermissionOp(LateOperation):
+    def precommit_event(self):
+        #print 'CheckEntityPermissionOp', self.session.user, self.entity, self.action
+        self.entity.check_perm(self.action)
+        check_entity_attributes(self.session, self.entity)
+        
+    def commit_event(self):
+        pass
+            
+    
+class CheckRelationPermissionOp(LateOperation):
+    def precommit_event(self):
+        self.rschema.check_perm(self.session, self.action, self.fromeid, self.toeid)
+        
+    def commit_event(self):
+        pass
+    
+def after_add_entity(session, entity):
+    if not session.is_super_session:
+        CheckEntityPermissionOp(session, entity=entity, action='add')
+
+def after_update_entity(session, entity):
+    if not session.is_super_session:
+        try:
+            # check user has permission right now, if not retry at commit time
+            entity.check_perm('update')
+            check_entity_attributes(session, entity)
+        except Unauthorized:
+            CheckEntityPermissionOp(session, entity=entity, action='update')
+        
+def before_del_entity(session, eid):
+    if not session.is_super_session:
+        eschema = session.repo.schema[session.describe(eid)[0]]
+        eschema.check_perm(session, 'delete', eid)
+
+
+def before_add_relation(session, fromeid, rtype, toeid):
+    if rtype in BEFORE_ADD_RELATIONS and not session.is_super_session:
+        rschema = session.repo.schema[rtype]
+        rschema.check_perm(session, 'add', fromeid, toeid)
+        
+def after_add_relation(session, fromeid, rtype, toeid):
+    if not rtype in BEFORE_ADD_RELATIONS and not session.is_super_session:
+        rschema = session.repo.schema[rtype]
+        if rtype in ON_COMMIT_ADD_RELATIONS:
+            CheckRelationPermissionOp(session, action='add', rschema=rschema,
+                                      fromeid=fromeid, toeid=toeid)
+        else:
+            rschema.check_perm(session, 'add', fromeid, toeid)
+
+def before_del_relation(session, fromeid, rtype, toeid):
+    if not session.is_super_session:
+        session.repo.schema[rtype].check_perm(session, 'delete', fromeid, toeid)
+
+def register_security_hooks(hm):
+    """register meta-data related hooks on the hooks manager"""
+    hm.register_hook(after_add_entity, 'after_add_entity', '')
+    hm.register_hook(after_update_entity, 'after_update_entity', '')
+    hm.register_hook(before_del_entity, 'before_delete_entity', '')
+    hm.register_hook(before_add_relation, 'before_add_relation', '')
+    hm.register_hook(after_add_relation, 'after_add_relation', '')
+    hm.register_hook(before_del_relation, 'before_delete_relation', '')
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/server.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,149 @@
+"""Pyro RQL server
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+import sys
+import select
+import warnings
+from time import localtime, mktime
+
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.server.repository import Repository
+
+class Finished(Exception):
+    """raise to remove an event from the event loop"""
+
+class TimeEvent:
+    """base event"""
+    # timefunc = staticmethod(localtime)
+    timefunc = localtime
+    
+    def __init__(self, absolute=None, period=None):
+        # local time tuple
+        if absolute is None:
+            absolute = self.timefunc()
+        self.absolute = absolute
+        # optional period in seconds
+        self.period = period
+
+    def is_ready(self):
+        """return  true if the event is ready to be fired"""
+        now = self.timefunc()
+        if self.absolute < now:
+            return True
+        return False
+
+    def fire(self, server):
+        """fire the event
+        must be overridden by concrete events
+        """
+        raise NotImplementedError()
+
+    def update(self):
+        """update the absolute date for the event or raise a finished exception
+        """
+        if self.period is None:
+            raise Finished
+        self.absolute = localtime(mktime(self.absolute) + self.period)
+
+
+class QuitEvent(TimeEvent):
+    """stop the server"""
+    def fire(self, server):
+        server.repo.shutdown()
+        server.quiting = True
+        
+
+class RepositoryServer(object):
+    
+    def __init__(self, config, debug=False):
+        """make the repository available as a PyRO object"""
+        self.config = config
+        self.repo = Repository(config, debug=debug)
+        self.ns = None
+        self.quiting = None
+        # event queue
+        self.events = []
+        # start repository looping tasks
+
+    def add_event(self, event):
+        """add an event to the loop"""
+        self.info('adding event %s', event)
+        self.events.append(event)
+
+    def trigger_events(self):
+        """trigger ready events"""
+        for event in self.events[:]:
+            if event.is_ready():
+                self.info('starting event %s', event)
+                event.fire(self)
+                try:
+                    event.update()
+                except Finished:
+                    self.events.remove(event)
+            
+    def run(self, req_timeout=5.0):
+        """enter the service loop"""
+        while self.quiting is None:
+            try:
+                self.daemon.handleRequests(req_timeout)
+            except select.error:
+                continue
+            self.trigger_events()
+    
+    def quit(self):
+        """stop the server"""
+        self.add_event(QuitEvent())
+
+    def connect(self, host='', port=0):
+        """the connect method on the repository only register to pyro if
+        necessary
+        """
+        self.daemon = self.repo.pyro_register(host)
+            
+    # server utilitities ######################################################
+    
+    def install_sig_handlers(self):
+        """install signal handlers"""
+        import signal
+        self.info('installing signal handlers')
+        signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit())
+        signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit())
+        
+    def daemonize(self, pid_file=None):
+        """daemonize the process"""
+        # fork so the parent can exist
+        if (os.fork()):
+            return -1
+        # deconnect from tty and create a new session
+        os.setsid()
+        # fork again so the parent, (the session group leader), can exit.
+        # as a non-session group leader, we can never regain a controlling
+        # terminal.
+        if (os.fork()):
+            return -1
+        # move to the root to avoit mount pb
+        os.chdir('/')
+        # set paranoid umask
+        os.umask(077)
+        if pid_file is not None:
+            # write pid in a file
+            f = open(pid_file, 'w')
+            f.write(str(os.getpid()))
+            f.close()
+        # filter warnings
+        warnings.filterwarnings('ignore')
+        # close standard descriptors
+        sys.stdin.close()
+        sys.stdout.close()
+        sys.stderr.close()
+
+from logging import getLogger
+from cubicweb import set_log_methods
+LOGGER = getLogger('cubicweb.reposerver')
+set_log_methods(CubicWebConfiguration, LOGGER)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/serverconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,275 @@
+"""server.serverconfig definition
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+from os.path import join, exists
+
+from logilab.common.configuration import Method
+from logilab.common.decorators import wproperty, cached, clear_cache
+
+from cubicweb import CW_SOFTWARE_ROOT, RegistryNotFound
+from cubicweb.toolsutils import env_path, read_config
+from cubicweb.cwconfig import CubicWebConfiguration, merge_options
+
+
+class ServerConfiguration(CubicWebConfiguration):
+    """standalone RQL server"""
+    name = 'repository'
+    if os.environ.get('APYCOT_ROOT'):
+        root = os.environ['APYCOT_ROOT']
+        SCHEMAS_LIB_DIR = '%s/local/share/cubicweb/schemas/' % root
+    elif CubicWebConfiguration.mode == 'dev':
+        SCHEMAS_LIB_DIR = join(CW_SOFTWARE_ROOT, 'schemas')
+        BACKUP_DIR = CubicWebConfiguration.RUNTIME_DIR
+    else:
+        SCHEMAS_LIB_DIR = '/usr/share/cubicweb/schemas/'
+        BACKUP_DIR = '/var/lib/cubicweb/backup/'
+
+    cubicweb_vobject_path = CubicWebConfiguration.cubicweb_vobject_path | set(['sobjects'])
+    cube_vobject_path = CubicWebConfiguration.cube_vobject_path | set(['sobjects', 'hooks'])
+
+    options = merge_options((
+        # ctl configuration
+        ('host',
+         {'type' : 'string',
+          'default': None,
+          'help': 'host name if not correctly detectable through gethostname',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('pid-file',
+         {'type' : 'string',
+          'default': Method('default_pid_file'),
+          'help': 'repository\'s pid file',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('uid',
+         {'type' : 'string',
+          'default': None,
+          'help': 'if this option is set, use the specified user to start \
+the repository rather than the user running the command',
+          'group': 'main', 'inputlevel': 0,
+          }),
+        ('session-time',
+         {'type' : 'int',
+          'default': 30*60,
+          'help': 'session expiration time, default to 30 minutes',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('connections-pool-size',
+         {'type' : 'int',
+          'default': 4,
+          'help': 'size of the connections pools. Each source supporting multiple \
+connections will have this number of opened connections.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('rql-cache-size',
+         {'type' : 'int',
+          'default': 300,
+          'help': 'size of the parsed rql cache size.',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        # email configuration
+        ('default-recipients-mode',
+         {'type' : 'choice',
+          'choices' : ('default-dest-addrs', 'users', 'none'),
+          'default': 'default-dest-addrs',
+          'help': 'when a notification should be sent with no specific rules \
+to find recipients, recipients will be found according to this mode. Available \
+modes are "default-dest-addrs" (emails specified in the configuration \
+variable with the same name), "users" (every users which has activated \
+account with an email set), "none" (no notification).',
+          'group': 'email', 'inputlevel': 1,
+          }),
+        ('default-dest-addrs',
+         {'type' : 'csv',
+          'default': (),
+          'help': 'comma separated list of email addresses that will be used \
+as default recipient when an email is sent and the notification has no \
+specific recipient rules.',
+          'group': 'email', 'inputlevel': 1,
+          }),
+        ('supervising-addrs',
+         {'type' : 'csv',
+          'default': (),
+          'help': 'comma separated list of email addresses that will be \
+notified of every changes.',
+          'group': 'email', 'inputlevel': 2,
+          }),
+        # pyro server.serverconfig
+        ('pyro-port',
+         {'type' : 'int',
+          'default': None,
+          'help': 'Pyro server port. If not set, it will be choosen randomly',
+          'group': 'pyro-server', 'inputlevel': 2,
+          }),
+        ('pyro-id', # XXX reuse pyro-application-id
+         {'type' : 'string',
+          'default': None,
+          'help': 'identifier of the repository in the pyro name server',
+          'group': 'pyro-server', 'inputlevel': 2,
+          }),
+        ) + CubicWebConfiguration.options)
+        
+    # read the schema from the database
+    read_application_schema = True
+    bootstrap_schema = True
+    
+    # check user's state at login time
+    consider_user_state = True
+    
+    # hooks registration configuration
+    # all hooks should be activated during normal execution
+    core_hooks = True
+    usergroup_hooks = True
+    schema_hooks = True
+    notification_hooks = True
+    security_hooks = True
+    application_hooks = True
+
+    # should some hooks be deactivated during [pre|post]create script execution
+    free_wheel = False
+    
+    # list of enables sources when sources restriction is necessary
+    # (eg repository initialization at least)
+    _enabled_sources = None
+    @wproperty
+    def enabled_sources(self, sourceuris=None):
+        self._enabled_sources = sourceuris
+        clear_cache(self, 'sources')
+        
+    @classmethod
+    def schemas_lib_dir(cls):
+        """application schema directory"""
+        return env_path('CW_SCHEMA_LIB', cls.SCHEMAS_LIB_DIR, 'schemas')
+
+    @classmethod
+    def backup_dir(cls):
+        """backup directory where a stored db backups before migration"""
+        return env_path('CW_BACKUP', cls.BACKUP_DIR, 'run time')
+
+    def bootstrap_cubes(self):
+        from logilab.common.textutils import get_csv
+        for line in file(join(self.apphome, 'bootstrap_cubes')):
+            line = line.strip()
+            if not line or line.startswith('#'):
+                continue
+            self.init_cubes(self.expand_cubes(get_csv(line)))
+            break
+        else:
+            # no cubes
+            self.init_cubes(())
+        
+    def write_bootstrap_cubes_file(self, cubes):
+        stream = file(join(self.apphome, 'bootstrap_cubes'), 'w')
+        stream.write('# this is a generated file only used for bootstraping\n')
+        stream.write('# you should not have to edit this\n')
+        stream.write('%s\n' % ','.join(cubes))
+        stream.close()
+        
+    def sources_file(self):
+        return join(self.apphome, 'sources')
+    
+    # this method has to be cached since when the server is running using a
+    # restricted user, this user usually don't have access to the sources
+    # configuration file (#16102)
+    @cached
+    def sources(self):
+        """return a dictionnaries containing sources definitions indexed by
+        sources'uri
+        """
+        allsources = read_config(self.sources_file())
+        if self._enabled_sources is None:
+            return allsources
+        return dict((uri, config) for uri, config in allsources.items()
+                    if uri in self._enabled_sources or uri == 'admin')
+    
+    def pyro_enabled(self):
+        """pyro is always enabled in standalone repository configuration"""
+        return True
+        
+    def load_hooks(self, vreg):
+        hooks = {}
+        for path in reversed([self.apphome] + self.cubes_path()):
+            hooksfile = join(path, 'application_hooks.py')
+            if exists(hooksfile):
+                self.warning('application_hooks.py is deprecated, use dynamic '
+                             'objects to register hooks (%s)', hooksfile)
+                context = {}
+                # Use execfile rather than `load_module_from_name` because 
+                # the latter gets fooled by the `sys.modules` cache when 
+                # loading different configurations one after the other
+                # (another fix would have been to do :
+                #    sys.modules.pop('applications_hooks')
+                #  or to modify load_module_from_name so that it provides
+                #  a use_cache optional parameter
+                execfile(hooksfile, context, context)
+                for event, hooksdef in context['HOOKS'].items():
+                    for ertype, hookcbs in hooksdef.items():
+                        hooks.setdefault(event, {}).setdefault(ertype, []).extend(hookcbs)
+        try:
+            apphookdefs = vreg.registry_objects('hooks')
+        except RegistryNotFound:
+            return hooks
+        for hookdef in apphookdefs:
+            for event, ertype in hookdef.register_to():
+                if ertype == 'Any':
+                    ertype = ''
+                cb = hookdef.make_callback(event)
+                hooks.setdefault(event, {}).setdefault(ertype, []).append(cb)
+        return hooks
+    
+    def load_schema(self, expand_cubes=False):
+        from cubicweb.schema import CubicWebSchemaLoader
+        if expand_cubes:
+            # in case some new dependencies have been introduced, we have to
+            # reinitialize cubes so the full filesystem schema is read
+            origcubes = self.cubes()
+            self._cubes = None
+            self.init_cubes(self.expand_cubes(origcubes))
+        schema = CubicWebSchemaLoader().load(self)
+        if expand_cubes:
+            # restaure original value
+            self._cubes = origcubes
+        return schema
+    
+    def load_bootstrap_schema(self):
+        from cubicweb.schema import BootstrapSchemaLoader
+        schema = BootstrapSchemaLoader().load(self)
+        schema.name = 'bootstrap'
+        return schema
+    
+    def set_sources_mode(self, sources):
+        if 'migration' in sources:
+            from cubicweb.server.sources import source_adapter
+            assert len(sources) == 1
+            enabled_sources = []
+            for uri, config in self.sources().iteritems():
+                if uri == 'admin':
+                    continue
+                if source_adapter(config).connect_for_migration:
+                    enabled_sources.append(uri)
+                else:
+                    print 'not connecting to source', uri, 'during migration'
+        elif 'all' in sources:
+            assert len(sources) == 1
+            enabled_sources= None
+        else:
+            known_sources = self.sources()
+            for uri in sources:
+                assert uri in known_sources, uri
+            enabled_sources = sources
+        self._enabled_sources = enabled_sources
+        clear_cache(self, 'sources')
+        
+    def migration_handler(self, schema=None, interactive=True,
+                          cnx=None, repo=None, connect=True):
+        """return a migration handler instance"""
+        from cubicweb.server.migractions import ServerMigrationHelper
+        return ServerMigrationHelper(self, schema, interactive=interactive,
+                                     cnx=cnx, repo=repo, connect=connect,
+                                     verbosity=getattr(self, 'verbosity', 0))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/serverctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,733 @@
+"""cubicweb-ctl commands and command handlers specific to the server.serverconfig
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+
+from logilab.common.configuration import REQUIRED, Configuration, ini_format_section
+
+from cubicweb import AuthenticationError, ExecutionError, ConfigurationError
+from cubicweb.toolsutils import Command, CommandHandler, pop_arg, cmd_run, \
+     register_commands, confirm, restrict_perms_to_user
+from cubicweb.server.serverconfig import ServerConfiguration
+
+
+# utility functions ###########################################################
+
+def source_cnx(source, dbname=None, special_privs=False):
+    """open and return a connection to the system database defined in the
+    given server.serverconfig
+    """
+    from getpass import getpass
+    from logilab.common.db import get_connection
+    dbhost = source['db-host']
+    if dbname is None:
+        dbname = source['db-name']
+    driver = source['db-driver']
+    print '**** connecting to %s database %s@%s' % (driver, dbname, dbhost),
+    if not special_privs and source.get('db-user'):
+        user = source['db-user']
+        print 'as', user
+        if source.get('db-password'):
+            password = source['db-password']
+        else:
+            password = getpass('password: ')
+    else:
+        print
+        if special_privs:
+            print 'WARNING'
+            print 'the user will need the following special access rights on the database:'
+            print special_privs
+            print
+        default_user = source.get('db-user', os.environ.get('USER', ''))
+        user = raw_input('user (%r by default): ' % default_user)
+        user = user or default_user
+        if user == source.get('db-user') and source.get('db-password'):
+            password = source['db-password']
+        else:
+            password = getpass('password: ')
+    return get_connection(driver, dbhost, dbname, user, password=password,
+                          port=source.get('db-port'))
+
+def system_source_cnx(source, dbms_system_base=False, special_privs=None):
+    """shortcut to get a connextion to the application system database
+    defined in the given config. If <dbms_system_base> is True,
+    connect to the dbms system database instead (for task such as
+    create/drop the application database)
+    """
+    if dbms_system_base:
+        from logilab.common.adbh import get_adv_func_helper
+        system_db = get_adv_func_helper(source['db-driver']).system_database()
+        special_privs = special_privs or 'CREATE/DROP DATABASE'
+        return source_cnx(source, system_db, special_privs=special_privs)
+    return source_cnx(source, special_privs=special_privs)
+
+def _db_sys_cnx(source, what, db=None, user=None):
+    """return a connection on the RDMS system table (to create/drop a user
+    or a database
+    """
+    from logilab.common.adbh import get_adv_func_helper
+    special_privs = ''
+    driver = source['db-driver']
+    helper = get_adv_func_helper(driver)
+    if user is not None and helper.users_support:
+        special_privs += '%s USER' % what
+    if db is not None:
+        special_privs += ' %s DATABASE' % what
+    # connect on the dbms system base to create our base
+    cnx = system_source_cnx(source, True, special_privs=special_privs)
+    # disable autocommit (isolation_level(1)) because DROP and
+    # CREATE DATABASE can't be executed in a transaction
+    try:
+        cnx.set_isolation_level(0)
+    except AttributeError:
+        # set_isolation_level() is psycopg specific
+        pass
+    return cnx
+    
+def generate_sources_file(sourcesfile, sourcescfg, keys=None):
+    """serialize repository'sources configuration into a INI like file
+
+    the `keys` parameter may be used to sort sections
+    """
+    from cubicweb.server.sources import SOURCE_TYPES
+    if keys is None:
+        keys = sourcescfg.keys()
+    else:
+        for key in sourcescfg:
+            if not key in keys:
+                keys.append(key)
+    stream = open(sourcesfile, 'w')
+    for uri in keys:
+        sconfig = sourcescfg[uri]
+        if isinstance(sconfig, dict):
+            # get a Configuration object
+            _sconfig = Configuration(options=SOURCE_TYPES[sconfig['adapter']].options)
+            for attr, val in sconfig.items():
+                if attr == 'uri': 
+                    continue
+                if attr == 'adapter':
+                    _sconfig.adapter = val
+                else:
+                    _sconfig.set_option(attr, val)
+            sconfig = _sconfig
+        optsbysect = list(sconfig.options_by_section())
+        assert len(optsbysect) == 1
+        ini_format_section(stream, uri, optsbysect[0][1])
+        if hasattr(sconfig, 'adapter'):
+            print >> stream
+            print >> stream, '# adapter for this source (YOU SHOULD NOT CHANGE THIS)'
+            print >> stream, 'adapter=%s' % sconfig.adapter
+        print >> stream
+
+def repo_cnx(config):
+    """return a in-memory repository and a db api connection it"""
+    from cubicweb.dbapi import in_memory_cnx
+    from cubicweb.server.utils import manager_userpasswd
+    try:
+        login = config.sources()['admin']['login']
+        pwd = config.sources()['admin']['password']
+    except KeyError:
+        login, pwd = manager_userpasswd()
+    while True:
+        try:
+            return in_memory_cnx(config, login, pwd)
+        except AuthenticationError:
+            print 'wrong user/password'
+        login, pwd = manager_userpasswd()
+    
+# repository specific command handlers ########################################
+
+class RepositoryCreateHandler(CommandHandler):
+    cmdname = 'create'
+    cfgname = 'repository'
+
+    def bootstrap(self, cubes, inputlevel=0):
+        """create an application by copying files from the given cube and by
+        asking information necessary to build required configuration files
+        """
+        from cubicweb.server.sources import SOURCE_TYPES
+        config = self.config
+        print 'application\'s repository configuration'
+        print '-' * 72
+        config.input_config('email', inputlevel)
+        if config.pyro_enabled():
+            config.input_config('pyro-server', inputlevel)
+        print
+        print 'repository sources configuration'
+        print '-' * 72
+        sourcesfile = config.sources_file()
+        sconfig = Configuration(options=SOURCE_TYPES['native'].options)
+        sconfig.adapter = 'native'
+        sconfig.input_config(inputlevel=inputlevel)
+        sourcescfg = {'system': sconfig}
+        while raw_input('enter another source [y/N]: ').strip().lower() == 'y':
+            sourcetype = raw_input('source type (%s): ' % ', '.join(SOURCE_TYPES.keys()))
+            sconfig = Configuration(options=SOURCE_TYPES[sourcetype].options)
+            sconfig.adapter = sourcetype
+            sourceuri = raw_input('source uri: ').strip()
+            assert not sourceuri in sourcescfg
+            sconfig.input_config(inputlevel=inputlevel)
+            sourcescfg[sourceuri] = sconfig
+            # module names look like cubes.mycube.themodule
+            sourcecube = SOURCE_TYPES[sourcetype].module.split('.', 2)[1]
+            # if the source adapter is coming from an external component, ensure
+            # it's specified in used cubes
+            if sourcecube != 'cubicweb' and not sourcecube in cubes:
+                cubes.append(sourcecube)
+        sconfig = Configuration(options=USER_OPTIONS)
+        sconfig.input_config(inputlevel=inputlevel)
+        sourcescfg['admin'] = sconfig
+        generate_sources_file(sourcesfile, sourcescfg, ['admin', 'system'])
+        restrict_perms_to_user(sourcesfile)
+        # remember selected cubes for later initialization of the database
+        config.write_bootstrap_cubes_file(cubes)
+        
+    def postcreate(self):
+        if confirm('do you want to create repository\'s system database?'):
+            cmd_run('db-create', self.config.appid)
+        else:
+            print 'nevermind, you can do it later using the db-create command'
+            
+USER_OPTIONS =  (
+    ('login', {'type' : 'string',
+               'default': REQUIRED,
+               'help': "cubicweb manager account's login "
+               '(this user will be created)',
+               'inputlevel': 0,
+               }),
+    ('password', {'type' : 'password',
+                  'help': "cubicweb manager account's password",
+                  'inputlevel': 0,
+                  }),
+    )
+
+
+class RepositoryDeleteHandler(CommandHandler):
+    cmdname = 'delete'
+    cfgname = 'repository'
+
+    def cleanup(self):
+        """remove application's configuration and database"""
+        from logilab.common.adbh import get_adv_func_helper
+        source = self.config.sources()['system']
+        dbname = source['db-name']
+        helper = get_adv_func_helper(source['db-driver'])
+        if confirm('delete database %s ?' % dbname):
+            user = source['db-user'] or None
+            cnx = _db_sys_cnx(source, 'DROP DATABASE', user=user)
+            cursor = cnx.cursor()
+            try:
+                cursor.execute('DROP DATABASE %s' % dbname)
+                print 'database %s dropped' % dbname
+                # XXX should check we are not connected as user
+                if user and helper.users_support and \
+                       confirm('delete user %s ?' % user, default_is_yes=False):
+                    cursor.execute('DROP USER %s' % user)
+                    print 'user %s dropped' % user
+                cnx.commit()
+            except:
+                cnx.rollback()
+                raise
+
+    
+class RepositoryStartHandler(CommandHandler):
+    cmdname = 'start'
+    cfgname = 'repository'
+
+    def start_command(self, ctlconf, debug):
+        command = ['cubicweb-ctl start-repository ']
+        if debug:
+            command.append('--debug')
+        command.append(self.config.appid)
+        return ' '.join(command)
+        
+
+class RepositoryStopHandler(CommandHandler):
+    cmdname = 'stop'
+    cfgname = 'repository'
+
+    def poststop(self):
+        """if pyro is enabled, ensure the repository is correctly
+        unregistered
+        """
+        if self.config.pyro_enabled():
+            from cubicweb.server.repository import pyro_unregister
+            pyro_unregister(self.config)
+    
+
+# repository specific commands ################################################
+class CreateApplicationDBCommand(Command):
+    """Create the system database of an application (run after 'create').
+    
+    You will be prompted for a login / password to use to connect to
+    the system database.  The given user should have almost all rights
+    on the database (ie a super user on the dbms allowed to create
+    database, users, languages...).
+
+    <application>
+      the identifier of the application to initialize.
+    """
+    name = 'db-create'
+    arguments = '<application>'
+    
+    options = (
+        ("create-db",
+         {'short': 'c', 'type': "yn", 'metavar': '<y or n>',
+          'default': True,
+          'help': 'create the database (yes by default)'}),
+        )
+    def run(self, args):
+        """run the command with its specific arguments"""
+        from logilab.common.adbh import get_adv_func_helper
+        from indexer import get_indexer
+        appid = pop_arg(args, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        create_db = self.config.create_db
+        source = config.sources()['system']
+        driver = source['db-driver']
+        helper = get_adv_func_helper(driver)
+        if create_db:
+            # connect on the dbms system base to create our base
+            dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER')
+            cursor = dbcnx.cursor()
+            try:
+                if helper.users_support:
+                    user = source['db-user']
+                    if not helper.user_exists(cursor, user) and \
+                           confirm('create db user %s ?' % user, default_is_yes=False):
+                        helper.create_user(source['db-user'], source['db-password'])
+                        print 'user %s created' % user
+                dbname = source['db-name']
+                if dbname in helper.list_databases(cursor):
+                    if confirm('DB %s already exists -- do you want to drop it ?' % dbname):
+                        cursor.execute('DROP DATABASE %s' % dbname)
+                    else:
+                        return
+                if dbcnx.logged_user != source['db-user']:
+                    helper.create_database(cursor, dbname, source['db-user'],
+                                           source['db-encoding'])
+                else:
+                    helper.create_database(cursor, dbname,
+                                           encoding=source['db-encoding'])
+                dbcnx.commit()
+                print 'database %s created' % source['db-name']
+            except:
+                dbcnx.rollback()
+                raise
+        cnx = system_source_cnx(source, special_privs='LANGUAGE C') 
+        cursor = cnx.cursor()
+        indexer = get_indexer(driver)
+        indexer.init_extensions(cursor)
+        # postgres specific stuff        
+        if driver == 'postgres':
+            # install plpythonu/plpgsql language if not installed by the cube
+            for extlang in ('plpythonu', 'plpgsql'):
+                helper.create_language(cursor, extlang)
+        cursor.close()
+        cnx.commit()
+        print 'database for application %s created and necessary extensions installed' % appid
+        print
+        if confirm('do you want to initialize the system database?'):
+            cmd_run('db-init', config.appid)
+        else:
+            print 'nevermind, you can do it later using the db-init command'
+
+    
+class InitApplicationCommand(Command):
+    """Initialize the system database of an application (run after 'db-create').
+    
+    You will be prompted for a login / password to use to connect to
+    the system database.  The given user should have the create tables,
+    and grant permissions.
+
+    <application>
+      the identifier of the application to initialize.
+    """
+    name = 'db-init'
+    arguments = '<application>'
+    
+    options = (
+        ("drop",
+         {'short': 'd', 'action': 'store_true',
+          'default': False,
+          'help': 'insert drop statements to remove previously existant \
+tables, indexes... (no by default)'}),
+        )
+
+    def run(self, args):
+        from cubicweb.server import init_repository
+        appid = pop_arg(args, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        init_repository(config, drop=self.config.drop)
+
+
+class GrantUserOnApplicationCommand(Command):
+    """Grant a database user on a repository system database.
+    
+    <application>
+      the identifier of the application
+    <user>
+      the database's user requiring grant access
+    """
+    name = 'db-grant-user'
+    arguments = '<application> <user>'
+
+    options = (
+        ("set-owner",
+         {'short': 'o', 'type' : "yn", 'metavar' : '<yes or no>', 
+          'default' : False,
+          'help': 'Set the user as tables owner if yes (no by default).'}
+         ),
+        )
+    def run(self, args):
+        """run the command with its specific arguments"""
+        from cubicweb.server.sqlutils import sqlexec, sqlgrants
+        appid = pop_arg(args, 1, msg="No application specified !")
+        user = pop_arg(args, msg="No user specified !")
+        config = ServerConfiguration.config_for(appid)
+        source = config.sources()['system']
+        set_owner = self.config.set_owner
+        cnx = system_source_cnx(source, special_privs='GRANT')
+        cursor = cnx.cursor()
+        schema = config.load_schema()
+        try:
+            sqlexec(sqlgrants(schema, source['db-driver'], user,
+                              set_owner=set_owner), cursor)
+        except Exception, ex:
+            cnx.rollback()
+            import traceback
+            traceback.print_exc()
+            print 'An error occured:', ex
+        else:
+            cnx.commit()
+            print 'grants given to %s on application %s' % (appid, user)
+
+
+    
+class StartRepositoryCommand(Command):
+    """Start an CubicWeb RQL server for a given application.
+    
+    The server will be accessible through pyro
+
+    <application>
+      the identifier of the application to initialize.
+    """
+    name = 'start-repository'
+    arguments = '<application>'
+    
+    options = (
+        ("debug",
+         {'short': 'D', 'action' : 'store_true',
+          'help': 'start server in debug mode.'}),
+        )
+
+    def run(self, args):
+        from cubicweb.server.server import RepositoryServer
+        appid = pop_arg(args, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        debug = self.config.debug
+        # create the server
+        server = RepositoryServer(config, debug)
+        # go ! (don't daemonize in debug mode)
+        if not debug and server.daemonize(config['pid-file']) == -1:
+            return
+        uid = config['uid']
+        if uid is not None:
+            try:
+                uid = int(uid)
+            except ValueError:
+                from pwd import getpwnam
+                uid = getpwnam(uid).pw_uid
+            os.setuid(uid)
+        server.install_sig_handlers()
+        server.connect(config['host'], 0)
+        server.run()
+
+
+def _remote_dump(host, appid, output, sudo=False):
+    dmpcmd = 'cubicweb-ctl db-dump -o /tmp/%s.dump %s' % (appid, appid)
+    if sudo:
+        dmpcmd = 'sudo %s' % (dmpcmd)
+    dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd)
+    print dmpcmd
+    if os.system(dmpcmd):
+        raise ExecutionError('Error while dumping the database')
+    if output is None:
+        from mx.DateTime import today
+        date = today().strftime('%Y-%m-%d')
+        output = '%s-%s.dump' % (appid, date)
+    cmd = 'scp %s:/tmp/%s.dump %s' % (host, appid, output)
+    print cmd
+    if os.system(cmd):
+        raise ExecutionError('Error while retrieving the dump')
+    rmcmd = 'ssh -t %s "rm -f /tmp/%s.dump"' % (host, appid)
+    print rmcmd
+    if os.system(rmcmd) and not confirm('an error occured while deleting remote dump. Continue anyway?'):
+        raise ExecutionError('Error while deleting remote dump')
+
+def _local_dump(appid, output):
+    config = ServerConfiguration.config_for(appid)
+    # schema=1 to avoid unnecessary schema loading
+    mih = config.migration_handler(connect=False, schema=1)
+    mih.backup_database(output, askconfirm=False)
+
+def _local_restore(appid, backupfile, drop):
+    config = ServerConfiguration.config_for(appid)
+    # schema=1 to avoid unnecessary schema loading
+    mih = config.migration_handler(connect=False, schema=1)
+    mih.restore_database(backupfile, drop)
+    repo = mih.repo_connect()
+    # version of the database
+    dbversions = repo.get_versions()
+    mih.shutdown()
+    if not dbversions:
+        print "bad or missing version information in the database, don't upgrade file system"
+        return
+    # version of installed software
+    eversion = dbversions['cubicweb']
+    status = application_status(config, eversion, dbversions)
+    # * database version > installed software
+    if status == 'needsoftupgrade':
+        print "database is using some earlier version than installed software!"
+        print "please upgrade your software and then upgrade the instance"
+        print "using command 'cubicweb-ctl upgrade %s'" % config.appid
+        return
+    # * database version < installed software, an upgrade will be necessary
+    #   anyway, just rewrite vc.conf and warn user he has to upgrade
+    if status == 'needapplupgrade':
+        print "database is using some older version than installed software."
+        print "You'll have to upgrade the instance using command"
+        print "'cubicweb-ctl upgrade %s'" % config.appid
+        return
+    # * database version = installed software, database version = instance fs version
+    #   ok!
+
+
+def application_status(config, cubicwebapplversion, vcconf):
+    cubicwebversion = config.cubicweb_version()
+    if cubicwebapplversion > cubicwebversion:
+        return 'needsoftupgrade'
+    if cubicwebapplversion < cubicwebversion:
+        return 'needapplupgrade'
+    for cube in config.cubes():
+        try:
+            softversion = config.cube_version(cube)
+        except ConfigurationError:
+            print "no cube version information for %s, is the cube installed?" % cube
+            continue
+        try:
+            applversion = vcconf[cube]
+        except KeyError:
+            print "no cube version information for %s in version configuration" % cube
+            continue            
+        if softversion == applversion:
+            continue
+        if softversion > applversion:
+            return 'needsoftupgrade'
+        elif softversion < applversion:
+            return 'needapplupgrade'
+    return None
+    
+
+class DBDumpCommand(Command):
+    """Backup the system database of an application.
+    
+    <application>
+      the identifier of the application to backup
+      format [[user@]host:]appname
+    """
+    name = 'db-dump'
+    arguments = '<application>'
+
+    options = (
+        ("output",
+         {'short': 'o', 'type' : "string", 'metavar' : '<file>', 
+          'default' : None,
+          'help': 'Specify the backup file where the backup will be stored.'}
+         ),
+        ('sudo',
+         {'short': 's', 'action' : 'store_true',
+          'default' : False,
+          'help': 'Use sudo on the remote host.'}
+         ),
+        )
+
+    def run(self, args):
+        appid = pop_arg(args, 1, msg="No application specified !")
+        if ':' in appid:
+            host, appid = appid.split(':')
+            _remote_dump(host, appid, self.config.output, self.config.sudo)
+        else:
+            _local_dump(appid, self.config.output)
+
+
+class DBRestoreCommand(Command):
+    """Restore the system database of an application.
+    
+    <application>
+      the identifier of the application to restore
+    """
+    name = 'db-restore'
+    arguments = '<application> <backupfile>'
+
+    options = (
+        ("no-drop",
+         {'short': 'n', 'action' : 'store_true', 
+          'default' : False,
+          'help': 'for some reason the database doesn\'t exist and so '
+          'should not be dropped.'}
+         ),
+        )
+
+    def run(self, args):
+        appid = pop_arg(args, 1, msg="No application specified !")
+        backupfile = pop_arg(args, msg="No backup file specified !")
+        _local_restore(appid, backupfile, not self.config.no_drop)
+
+
+class DBCopyCommand(Command):
+    """Copy the system database of an application (backup and restore).
+    
+    <src-application>
+      the identifier of the application to backup
+      format [[user@]host:]appname
+
+    <dest-application>
+      the identifier of the application to restore
+    """
+    name = 'db-copy'
+    arguments = '<src-application> <dest-application>'
+
+    options = (
+        ("no-drop",
+         {'short': 'n', 'action' : 'store_true', 
+          'default' : False,
+          'help': 'For some reason the database doesn\'t exist and so '
+          'should not be dropped.'}
+         ),
+        ("keep-dump",
+         {'short': 'k', 'action' : 'store_true',
+          'default' : False,
+          'help': 'Specify that the dump file should not be automatically removed.'}
+         ),
+        ('sudo',
+         {'short': 's', 'action' : 'store_true',
+          'default' : False,
+          'help': 'Use sudo on the remote host.'}
+         ),
+        )
+
+    def run(self, args):
+        import tempfile
+        srcappid = pop_arg(args, 1, msg="No source application specified !")
+        destappid = pop_arg(args, msg="No destination application specified !")
+        output = tempfile.mktemp()
+        if ':' in srcappid:
+            host, srcappid = srcappid.split(':')
+            _remote_dump(host, srcappid, output, self.config.sudo)
+        else:
+            _local_dump(srcappid, output)
+        _local_restore(destappid, output, not self.config.no_drop)
+        if self.config.keep_dump:
+            print 'you can get the dump file at', output
+        else:
+            os.remove(output)
+
+        
+class CheckRepositoryCommand(Command):
+    """Check integrity of the system database of an application.
+    
+    <application>
+      the identifier of the application to check
+    """
+    name = 'db-check'
+    arguments = '<application>'
+
+    options = (
+        ("checks",
+         {'short': 'c', 'type' : "csv", 'metavar' : '<check list>', 
+          'default' : ('entities', 'relations', 'metadata', 'schema', 'text_index'),
+          'help': 'Comma separated list of check to run. By default run all \
+checks, i.e. entities, relations, text_index and metadata.'}
+         ),
+        
+        ("autofix",
+         {'short': 'a', 'type' : "yn", 'metavar' : '<yes or no>', 
+          'default' : False,
+          'help': 'Automatically correct integrity problems if this option \
+is set to "y" or "yes", else only display them'}
+         ),
+        ("reindex",
+         {'short': 'r', 'type' : "yn", 'metavar' : '<yes or no>', 
+          'default' : False,
+          'help': 're-indexes the database for full text search if this \
+option is set to "y" or "yes" (may be long for large database).'}
+         ),
+        
+        )
+
+    def run(self, args):
+        from cubicweb.server.checkintegrity import check
+        appid = pop_arg(args, 1, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        repo, cnx = repo_cnx(config)
+        check(repo, cnx,
+              self.config.checks, self.config.reindex, self.config.autofix)
+
+
+class RebuildFTICommand(Command):
+    """Rebuild the full-text index of the system database of an application.
+    
+    <application>
+      the identifier of the application to rebuild
+    """
+    name = 'db-rebuild-fti'
+    arguments = '<application>'
+
+    options = ()
+
+    def run(self, args):
+        from cubicweb.server.checkintegrity import reindex_entities
+        appid = pop_arg(args, 1, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        repo, cnx = repo_cnx(config)
+        session = repo._get_session(cnx.sessionid, setpool=True)
+        reindex_entities(repo.schema, session)
+        cnx.commit()
+
+    
+class SynchronizeApplicationSchemaCommand(Command):
+    """Synchronize persistent schema with cube schema.
+        
+    Will synchronize common stuff between the cube schema and the
+    actual persistent schema, but will not add/remove any entity or relation.
+
+    <application>
+      the identifier of the application to synchronize.
+    """
+    name = 'schema-sync'
+    arguments = '<application>'
+
+    def run(self, args):
+        appid = pop_arg(args, msg="No application specified !")
+        config = ServerConfiguration.config_for(appid)
+        mih = config.migration_handler()
+        mih.cmd_synchronize_schema()
+
+
+register_commands( (CreateApplicationDBCommand,                   
+                    InitApplicationCommand,
+                    GrantUserOnApplicationCommand,
+                    StartRepositoryCommand,
+                    DBDumpCommand,
+                    DBRestoreCommand,
+                    DBCopyCommand,
+                    CheckRepositoryCommand,
+                    RebuildFTICommand,
+                    SynchronizeApplicationSchemaCommand,
+                    ) )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/session.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,603 @@
+"""Repository users' and internal' sessions.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import threading
+from time import time
+from types import NoneType
+from decimal import Decimal
+
+from mx.DateTime import DateTimeType, DateTimeDeltaType
+from rql.nodes import VariableRef, Function
+from yams import BASE_TYPES
+
+from cubicweb import RequestSessionMixIn, Binary
+from cubicweb.dbapi import ConnectionProperties
+from cubicweb.common.utils import make_uid
+from cubicweb.server.rqlrewrite import RQLRewriter
+
+def etype_from_pyobj(value):
+    """guess yams type from python value"""
+    # note:
+    # * Password is not selectable so no problem)
+    # * use type(value) and not value.__class__ since mx instances have no
+    #   __class__ attribute
+    # * XXX Date, Time
+    return {bool: 'Boolean',
+            int: 'Int',
+            long: 'Int',
+            float: 'Float',
+            Decimal: 'Decimal',
+            unicode: 'String',
+            NoneType: None,
+            Binary: 'Bytes',
+            DateTimeType: 'Datetime',
+            DateTimeDeltaType: 'Interval',
+            }[type(value)]
+
+def is_final(rqlst, variable, args):
+    # try to find if this is a final var or not
+    for select in rqlst.children:
+        for sol in select.solutions:
+            etype = variable.get_type(sol, args)
+            if etype is None:
+                continue
+            if etype in BASE_TYPES:
+                return True
+            return False   
+
+def _make_description(selected, args, solution):
+    """return a description for a result set"""
+    description = []
+    for term in selected:
+        description.append(term.get_type(solution, args))
+    return description
+
+#XXX rql <= 0.18.3 bw compat
+from rql import stmts
+if not hasattr(stmts.Union, 'get_variable_variables'):
+    def _union_get_variable_variables(self):
+        """return the set of variable names which take different type according to
+        the solution
+        """
+        change = set()
+        values = {}
+        for select in self.children:
+            change.update(select.get_variable_variables(values))
+        return change
+    stmts.Union.get_variable_variables = _union_get_variable_variables
+                        
+    def _select_get_variable_variables(self, _values=None):
+        """return the set of variable names which take different type according to
+        the solution
+        """
+        change = set()
+        if _values is None:
+            _values = {}
+        for solution in self.solutions:
+            for vname, etype in solution.iteritems():
+                if not vname in _values:
+                    _values[vname] = etype
+                elif _values[vname] != etype:
+                    change.add(vname)
+        return change
+    stmts.Select.get_variable_variables = _select_get_variable_variables
+
+class Session(RequestSessionMixIn):
+    """tie session id, user, connections pool and other session data all
+    together
+    """
+    
+    def __init__(self, user, repo, cnxprops=None, _id=None):
+        super(Session, self).__init__(repo.vreg)
+        self.id = _id or make_uid(user.login.encode('UTF8'))
+        cnxprops = cnxprops or ConnectionProperties('inmemory')
+        self.user = user
+        self.repo = repo
+        self.cnxtype = cnxprops.cnxtype
+        self.creation = time()
+        self.timestamp = self.creation
+        self.is_internal_session = False
+        self.is_super_session = False
+        # short cut to querier .execute method
+        self._execute = repo.querier.execute
+        # shared data, used to communicate extra information between the client
+        # and the rql server
+        self.data = {}
+        # i18n initialization
+        self.set_language(cnxprops.lang)
+        self._threaddata = threading.local()
+        
+    def get_mode(self):
+        return getattr(self._threaddata, 'mode', 'read')
+    def set_mode(self, value):
+        self._threaddata.mode = value
+    # transaction mode (read/write), resetted to read on commit / rollback
+    mode = property(get_mode, set_mode)
+
+    def get_commit_state(self):
+        return getattr(self._threaddata, 'commit_state', None)
+    def set_commit_state(self, value):
+        self._threaddata.commit_state = value
+    commit_state = property(get_commit_state, set_commit_state)
+    
+    # set according to transaction mode for each query
+    @property
+    def pool(self):
+        return getattr(self._threaddata, 'pool', None)
+    
+    # pending transaction operations
+    @property
+    def pending_operations(self):
+        try:
+            return self._threaddata.pending_operations
+        except AttributeError:
+            self._threaddata.pending_operations = []
+            return self._threaddata.pending_operations
+    
+    # rql rewriter
+    @property
+    def rql_rewriter(self):
+        try:
+            return self._threaddata._rewriter
+        except AttributeError:
+            self._threaddata._rewriter = RQLRewriter(self.repo.querier, self)
+            return self._threaddata._rewriter
+    
+    # transaction queries data
+    @property
+    def _query_data(self):
+        try:
+            return self._threaddata._query_data
+        except AttributeError:
+            self._threaddata._query_data = {}
+            return self._threaddata._query_data
+    
+    def set_language(self, language):
+        """i18n configuration for translation"""
+        vreg = self.vreg
+        language = language or self.user.property_value('ui.language')
+        try:
+            self._ = self.__ = vreg.config.translations[language]
+        except KeyError:
+            language = vreg.property_value('ui.language')
+            try:
+                self._ = self.__ = vreg.config.translations[language]
+            except KeyError:
+                self._ = self.__ = unicode
+        self.lang = language
+        
+    def change_property(self, prop, value):
+        assert prop == 'lang' # this is the only one changeable property for now
+        self.set_language(value)
+
+    def __str__(self):
+        return '<%ssession %s (%s 0x%x)>' % (self.cnxtype, self.user.login, 
+                                             self.id, id(self))
+
+    def etype_class(self, etype):
+        """return an entity class for the given entity type"""
+        return self.vreg.etype_class(etype)
+    
+    def entity(self, eid):
+        """return a result set for the given eid"""
+        return self.eid_rset(eid).get_entity(0, 0)
+        
+    def _touch(self):
+        """update latest session usage timestamp and reset mode to read
+        """
+        self.timestamp = time()
+        self.local_perm_cache.clear()
+        self._threaddata.mode = 'read'
+        
+    def set_pool(self):
+        """the session need a pool to execute some queries"""
+        if self.pool is None:
+            self._threaddata.pool = self.repo._get_pool()
+            try:                
+                self._threaddata.pool.pool_set(self)
+            except:
+                self.repo._free_pool(self.pool)
+                self._threaddata.pool = None
+                raise
+        return self._threaddata.pool
+            
+    def reset_pool(self):
+        """the session has no longer using its pool, at least for some time
+        """
+        # pool may be none if no operation has been done since last commit
+        # or rollback
+        if self.pool is not None and self.mode == 'read':
+            # even in read mode, we must release the current transaction
+            self.repo._free_pool(self.pool)
+            self.pool.pool_reset(self)
+            self._threaddata.pool = None
+            
+    def system_sql(self, sql, args=None):
+        """return a sql cursor on the system database"""
+        if not sql.split(None, 1)[0].upper() == 'SELECT':
+            self.mode = 'write'
+        cursor = self.pool['system']
+        self.pool.source('system').doexec(cursor, sql, args)
+        return cursor
+
+    def actual_session(self):
+        """return the original parent session if any, else self"""
+        return self        
+
+    # shared data handling ###################################################
+    
+    def get_shared_data(self, key, default=None, pop=False):
+        """return value associated to `key` in session data"""
+        if pop:
+            return self.data.pop(key, default)
+        else:
+            return self.data.get(key, default)
+        
+    def set_shared_data(self, key, value, querydata=False):
+        """set value associated to `key` in session data"""
+        if querydata:
+            self.set_query_data(key, value)
+        else:
+            self.data[key] = value
+        
+    # request interface #######################################################
+    
+    def set_entity_cache(self, entity):
+        # no entity cache in the server, too high risk of inconsistency
+        # between pre/post hooks
+        pass
+
+    def entity_cache(self, eid):
+        raise KeyError(eid)
+
+    def base_url(self):
+        return self.repo.config['base-url'] or u''
+        
+    def from_controller(self):
+        """return the id (string) of the controller issuing the request (no
+        sense here, always return 'view')
+        """
+        return 'view'
+    
+    def source_defs(self):
+        return self.repo.source_defs()
+
+    def describe(self, eid):
+        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+        return self.repo.type_and_source_from_eid(eid, self)
+    
+    # db-api like interface ###################################################
+
+    def source_from_eid(self, eid):
+        """return the source where the entity with id <eid> is located"""
+        return self.repo.source_from_eid(eid, self)
+
+    def decorate_rset(self, rset, propagate=False):
+        rset.vreg = self.vreg
+        rset.req = propagate and self or self.actual_session()
+        return rset
+
+    @property
+    def super_session(self):
+        try:
+            csession = self._threaddata.childsession
+        except AttributeError:
+            if self.is_super_session:
+                csession = self
+            else:
+                csession = ChildSession(self)
+            self._threaddata.childsession = csession
+        # need shared pool set
+        self.set_pool()
+        return csession
+        
+    def unsafe_execute(self, rql, kwargs=None, eid_key=None, build_descr=False,
+                       propagate=False):
+        """like .execute but with security checking disabled (this method is
+        internal to the server, it's not part of the db-api)
+
+        if `propagate` is true, the super_session will be attached to the result
+        set instead of the parent session, hence further query done through
+        entities fetched from this result set will bypass security as well
+        """
+        return self.super_session.execute(rql, kwargs, eid_key, build_descr,
+                                          propagate)
+
+    @property
+    def cursor(self):
+        """return a rql cursor"""
+        return self
+    
+    def execute(self, rql, kwargs=None, eid_key=None, build_descr=True,
+                propagate=False):
+        """db-api like method directly linked to the querier execute method
+
+        Becare that unlike actual cursor.execute, `build_descr` default to
+        false
+        """
+        rset = self._execute(self, rql, kwargs, eid_key, build_descr)
+        return self.decorate_rset(rset, propagate)
+    
+    def commit(self, reset_pool=True):
+        """commit the current session's transaction"""
+        if self.pool is None:
+            assert not self.pending_operations
+            self._query_data.clear()
+            self._touch()
+            return
+        if self.commit_state:
+            return
+        # on rollback, an operation should have the following state
+        # information:
+        # - processed by the precommit/commit event or not
+        # - if processed, is it the failed operation
+        try:
+            for trstate in ('precommit', 'commit'):
+                processed = []
+                self.commit_state = trstate
+                try:
+                    while self.pending_operations:
+                        operation = self.pending_operations.pop(0)
+                        operation.processed = trstate
+                        processed.append(operation)
+                        operation.handle_event('%s_event' % trstate)
+                    self.pending_operations[:] = processed
+                    self.debug('%s session %s done', trstate, self.id)
+                except:
+                    self.exception('error while %sing', trstate)
+                    operation.failed = True
+                    for operation in processed:
+                        operation.handle_event('revert%s_event' % trstate)
+                    self.rollback(reset_pool)
+                    raise
+            self.pool.commit()
+        finally:
+            self._touch()
+            self.commit_state = None
+            self.pending_operations[:] = []
+            self._query_data.clear()
+            if reset_pool:
+                self.reset_pool()
+                        
+    def rollback(self, reset_pool=True):
+        """rollback the current session's transaction"""
+        if self.pool is None:
+            assert not self.pending_operations
+            self._query_data.clear()
+            self._touch()
+            return
+        try:
+            while self.pending_operations:
+                try:
+                    operation = self.pending_operations.pop(0)
+                    operation.handle_event('rollback_event')
+                except:
+                    self.critical('rollback error', exc_info=sys.exc_info())
+                    continue
+            self.pool.rollback()
+        finally:
+            self._touch()
+            self.pending_operations[:] = []
+            self._query_data.clear()
+            if reset_pool:
+                self.reset_pool()
+        
+    def close(self):
+        """do not close pool on session close, since they are shared now"""
+        self.rollback()
+        
+    # transaction data/operations management ##################################
+    
+    def add_query_data(self, key, value):
+        self._query_data.setdefault(key, []).append(value)
+    
+    def set_query_data(self, key, value):
+        self._query_data[key] = value
+        
+    def query_data(self, key, default=None, setdefault=False, pop=False):
+        if setdefault:
+            assert not pop
+            return self._query_data.setdefault(key, default)
+        if pop:
+            return self._query_data.pop(key, default)
+        else:
+            return self._query_data.get(key, default)
+        
+    def add_operation(self, operation, index=None):
+        """add an observer"""
+        assert self.commit_state != 'commit'
+        if index is not None:
+            self.pending_operations.insert(index, operation)
+        else:
+            self.pending_operations.append(operation)
+            
+    # querier helpers #########################################################
+    
+    def build_description(self, rqlst, args, result):
+        """build a description for a given result"""
+        if len(rqlst.children) == 1 and len(rqlst.children[0].solutions) == 1:
+            # easy, all lines are identical
+            selected = rqlst.children[0].selection
+            solution = rqlst.children[0].solutions[0]
+            description = _make_description(selected, args, solution)
+            return [tuple(description)] * len(result)
+        # hard, delegate the work :o)
+        return self.manual_build_descr(rqlst, args, result)
+
+    def manual_build_descr(self, rqlst, args, result):
+        """build a description for a given result by analysing each row
+        
+        XXX could probably be done more efficiently during execution of query
+        """
+        # not so easy, looks for variable which changes from one solution
+        # to another
+        unstables = rqlst.get_variable_variables()
+        basedescription = []
+        todetermine = []
+        selected = rqlst.children[0].selection # sample selection
+        for i, term in enumerate(selected):
+            if isinstance(term, Function) and term.descr().rtype is not None:
+                basedescription.append(term.get_type(term.descr().rtype, args))
+                continue
+            for vref in term.get_nodes(VariableRef):
+                if vref.name in unstables:
+                    basedescription.append(None)
+                    todetermine.append( (i, is_final(rqlst, vref.variable, args)) )
+                    break
+            else:
+                # sample etype
+                etype = rqlst.children[0].solutions[0]
+                basedescription.append(term.get_type(etype, args))
+        if not todetermine:
+            return [tuple(basedescription)] * len(result)
+        return self._build_descr(result, basedescription, todetermine)
+    
+    def _build_descr(self, result, basedescription, todetermine):
+        description = []
+        etype_from_eid = self.describe
+        for row in result:
+            row_descr = basedescription
+            for index, isfinal in todetermine:
+                value = row[index]
+                if value is None:
+                    # None value inserted by an outer join, no type
+                    row_descr[index] = None
+                    continue
+                if isfinal:
+                    row_descr[index] = etype_from_pyobj(value)
+                else:
+                    row_descr[index] = etype_from_eid(value)[0]
+            description.append(tuple(row_descr))
+        return description
+
+    
+class ChildSession(Session):
+    """child (or internal) session are used to hijack the security system
+    """
+    cnxtype = 'inmemory'
+    
+    def __init__(self, parent_session):
+        self.id = None
+        self.is_internal_session = False
+        self.is_super_session = True
+        # session which has created this one
+        self.parent_session = parent_session
+        self.user = InternalManager()
+        self.repo = parent_session.repo
+        self.vreg = parent_session.vreg
+        self.data = parent_session.data
+        self.encoding = parent_session.encoding
+        self.lang = parent_session.lang
+        self._ = self.__ = parent_session._
+        # short cut to querier .execute method
+        self._execute = self.repo.querier.execute
+    
+    @property
+    def super_session(self):
+        return self
+
+    def get_mode(self):
+        return self.parent_session.mode
+    def set_mode(self, value):
+        self.parent_session.set_mode(value)
+    mode = property(get_mode, set_mode)
+
+    def get_commit_state(self):
+        return self.parent_session.commit_state
+    def set_commit_state(self, value):
+        self.parent_session.set_commit_state(value)
+    commit_state = property(get_commit_state, set_commit_state)
+    
+    @property
+    def pool(self):
+        return self.parent_session.pool
+    @property
+    def pending_operations(self):
+        return self.parent_session.pending_operations
+    @property
+    def _query_data(self):
+        return self.parent_session._query_data
+        
+    def set_pool(self):
+        """the session need a pool to execute some queries"""
+        self.parent_session.set_pool()
+            
+    def reset_pool(self):
+        """the session has no longer using its pool, at least for some time
+        """
+        self.parent_session.reset_pool()
+
+    def actual_session(self):
+        """return the original parent session if any, else self"""
+        return self.parent_session
+        
+    def commit(self, reset_pool=True):
+        """commit the current session's transaction"""
+        self.parent_session.commit(reset_pool)
+        
+    def rollback(self, reset_pool=True):
+        """rollback the current session's transaction"""
+        self.parent_session.rollback(reset_pool)
+        
+    def close(self):
+        """do not close pool on session close, since they are shared now"""
+        self.rollback()
+        
+    def user_data(self):
+        """returns a dictionnary with this user's information"""
+        return self.parent_session.user_data()
+
+
+class InternalSession(Session):
+    """special session created internaly by the repository"""
+    
+    def __init__(self, repo, cnxprops=None):
+        super(InternalSession, self).__init__(_IMANAGER, repo, cnxprops,
+                                              _id='internal')
+        self.cnxtype = 'inmemory'
+        self.is_internal_session = True
+        self.is_super_session = True
+    
+    @property
+    def super_session(self):
+        return self
+
+
+class InternalManager(object):
+    """a manager user with all access rights used internally for task such as
+    bootstrapping the repository or creating regular users according to
+    repository content
+    """
+    def __init__(self):
+        self.eid = -1
+        self.login = u'__internal_manager__'
+        self.properties = {}
+
+    def matching_groups(self, groups):
+        return 1
+
+    def is_in_group(self, group):
+        return True
+
+    def owns(self, eid):
+        return True
+    
+    def has_permission(self, pname, contexteid=None):
+        return True
+
+    def property_value(self, key):
+        if key == 'ui.language':
+            return 'en'
+        return None
+
+_IMANAGER= InternalManager()
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Session, getLogger('cubicweb.session'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/sources/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,376 @@
+"""cubicweb server sources support
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logging import getLogger
+
+from cubicweb import set_log_methods
+
+
+class AbstractSource(object):
+    """an abstract class for sources"""
+
+    # boolean telling if modification hooks should be called when something is
+    # modified in this source
+    should_call_hooks = True
+    # boolean telling if the repository should connect to this source during
+    # migration
+    connect_for_migration = True
+    
+    # mappings telling which entities and relations are available in the source
+    # keys are supported entity/relation types and values are boolean indicating
+    # wether the support is read-only (False) or read-write (True)
+    support_entities = {}
+    support_relations = {}
+    # a global identifier for this source, which has to be set by the source
+    # instance
+    uri = None
+    # a reference to the system information helper
+    repo = None
+    # a reference to the application'schema (may differs from the source'schema)
+    schema = None
+    
+    def __init__(self, repo, appschema, source_config, *args, **kwargs):
+        self.repo = repo
+        self.uri = source_config['uri']
+        set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
+        self.set_schema(appschema)
+        self.support_relations['identity'] = False
+        
+    def init_creating(self):
+        """method called by the repository once ready to create a new instance"""
+        pass
+ 
+    def init(self):
+        """method called by the repository once ready to handle request"""
+        pass
+    
+    def reset_caches(self):
+        """method called during test to reset potential source caches"""
+        pass
+    
+    def clear_eid_cache(self, eid, etype):
+        """clear potential caches for the given eid"""
+        pass
+    
+    def __repr__(self):
+        return '<%s source>' % self.uri
+
+    def __cmp__(self, other):
+        """simple comparison function to get predictable source order, with the
+        system source at last
+        """
+        if self.uri == other.uri:
+            return 0
+        if self.uri == 'system':
+            return 1
+        if other.uri == 'system':
+            return -1
+        return cmp(self.uri, other.uri)
+        
+    def set_schema(self, schema):
+        """set the application'schema"""
+        self.schema = schema
+        
+    def support_entity(self, etype, write=False):
+        """return true if the given entity's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+        """
+        try:
+            wsupport = self.support_entities[etype]
+        except KeyError:
+            return False
+        if write:
+            return wsupport
+        return True
+    
+    def support_relation(self, rtype, write=False):
+        """return true if the given relation's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+
+        current implementation return true if the relation is defined into 
+        `support_relations` or if it is a final relation of a supported entity 
+        type
+        """
+        try:
+            wsupport = self.support_relations[rtype]
+        except KeyError:
+            rschema = self.schema.rschema(rtype)
+            if not rschema.is_final() or rschema == 'has_text':
+                return False
+            for etype in rschema.subjects():
+                try:
+                    wsupport = self.support_entities[etype]
+                    break
+                except KeyError:
+                    continue
+            else:
+                return False
+        if write:
+            return wsupport
+        return True    
+    
+    def eid2extid(self, eid, session=None):
+        return self.repo.eid2extid(self, eid, session)
+
+    def extid2eid(self, value, etype, session=None, insert=True):
+        return self.repo.extid2eid(self, value, etype, session, insert)
+
+    PUBLIC_KEYS = ('adapter', 'uri')
+    def remove_sensitive_information(self, sourcedef):
+        """remove sensitive information such as login / password from source
+        definition
+        """
+        for key in sourcedef.keys():
+            if not key in self.PUBLIC_KEYS:
+                sourcedef.pop(key)
+
+    def cleanup_entities_info(self, session):
+        """cleanup system tables from information for entities coming from
+        this source. This should be called when a source is removed to
+        properly cleanup the database
+        """
+        # fti / entities tables cleanup
+        dbhelper = session.pool.source('system').dbhelper
+        # sqlite doesn't support DELETE FROM xxx USING yyy
+        session.system_sql('DELETE FROM %s WHERE %s.%s IN (SELECT eid FROM '
+                           'entities WHERE entities.source=%%(uri)s)'
+                           % (dbhelper.fti_table, dbhelper.fti_table,
+                              dbhelper.fti_uid_attr),
+                           {'uri': self.uri})
+        session.system_sql('DELETE FROM entities WHERE source=%(uri)s',
+                           {'uri': self.uri})
+
+    # abstract methods to overide (at least) in concrete source classes #######
+    
+    def get_connection(self):
+        """open and return a connection to the source"""
+        raise NotImplementedError()
+    
+    def check_connection(self, cnx):
+        """check connection validity, return None if the connection is still valid
+        else a new connection (called when the pool using the given connection is
+        being attached to a session)
+
+        do nothing by default
+        """
+        pass
+    
+    def pool_reset(self, cnx):
+        """the pool using the given connection is being reseted from its current
+        attached session
+
+        do nothing by default
+        """
+        pass
+    
+    def authenticate(self, session, login, password):
+        """if the source support EUser entity type, it should implements
+        this method which should return EUser eid for the given login/password
+        if this account is defined in this source and valid login / password is
+        given. Else raise `AuthenticationError`
+        """
+        raise NotImplementedError()
+    
+    def syntax_tree_search(self, session, union,
+                           args=None, cachekey=None, varmap=None, debug=0):
+        """return result from this source for a rql query (actually from a rql 
+        syntax tree and a solution dictionary mapping each used variable to a 
+        possible type). If cachekey is given, the query necessary to fetch the
+        results (but not the results themselves) may be cached using this key.
+        """
+        raise NotImplementedError()
+                
+    def flying_insert(self, table, session, union, args=None, varmap=None):
+        """similar as .syntax_tree_search, but inserts data in the temporary
+        table (on-the-fly if possible, eg for the system source whose the given
+        cursor come from). If not possible, inserts all data by calling
+        .executemany().
+        """
+        res = self.syntax_tree_search(session, union, args, varmap=varmap)
+        session.pool.source('system')._manual_insert(res, table, session)
+
+        
+    # system source don't have to implement the two methods below
+    
+    def before_entity_insertion(self, session, lid, etype, eid):
+        """called by the repository when an eid has been attributed for an
+        entity stored here but the entity has not been inserted in the system
+        table yet.
+        
+        This method must return the an Entity instance representation of this
+        entity.
+        """
+        entity = self.repo.vreg.etype_class(etype)(session, None)
+        entity.set_eid(eid)
+        return entity
+    
+    def after_entity_insertion(self, session, lid, entity):
+        """called by the repository after an entity stored here has been
+        inserted in the system table.
+        """
+        pass
+
+    # read-only sources don't have to implement methods below
+
+    def get_extid(self, entity):
+        """return the external id for the given newly inserted entity"""
+        raise NotImplementedError()
+        
+    def add_entity(self, session, entity):
+        """add a new entity to the source"""
+        raise NotImplementedError()
+        
+    def update_entity(self, session, entity):
+        """update an entity in the source"""
+        raise NotImplementedError()
+
+    def delete_entity(self, session, etype, eid):
+        """delete an entity from the source"""
+        raise NotImplementedError()
+
+    def add_relation(self, session, subject, rtype, object):
+        """add a relation to the source"""
+        raise NotImplementedError()
+    
+    def delete_relation(self, session, subject, rtype, object):
+        """delete a relation from the source"""
+        raise NotImplementedError()
+
+    # system source interface #################################################
+
+    def eid_type_source(self, session, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        raise NotImplementedError()
+    
+    def create_eid(self, session):
+        raise NotImplementedError()
+
+    def add_info(self, session, entity, source, extid=None):
+        """add type and source info for an eid into the system table"""
+        raise NotImplementedError()
+
+    def delete_info(self, session, eid, etype, uri, extid):
+        """delete system information on deletion of an entity by transfering
+        record from the entities table to the deleted_entities table
+        """
+        raise NotImplementedError()
+        
+    def fti_unindex_entity(self, session, eid):
+        """remove text content for entity with the given eid from the full text
+        index
+        """
+        raise NotImplementedError()
+        
+    def fti_index_entity(self, session, entity):
+        """add text content of a created/modified entity to the full text index
+        """
+        raise NotImplementedError()
+        
+    def modified_entities(self, session, etypes, mtime):
+        """return a 2-uple:
+        * list of (etype, eid) of entities of the given types which have been
+          modified since the given timestamp (actually entities whose full text
+          index content has changed)
+        * list of (etype, eid) of entities of the given types which have been
+          deleted since the given timestamp
+        """
+        raise NotImplementedError()
+
+    # sql system source interface #############################################
+
+    def sqlexec(self, session, sql, args=None):
+        """execute the query and return its result"""
+        raise NotImplementedError()
+    
+    def temp_table_def(self, selection, solution, table, basemap):
+        raise NotImplementedError()
+    
+    def create_index(self, session, table, column, unique=False):
+        raise NotImplementedError()
+            
+    def drop_index(self, session, table, column, unique=False):
+        raise NotImplementedError()
+
+    def create_temp_table(self, session, table, schema):
+        raise NotImplementedError()
+
+    def clean_temp_data(self, session, temptables):
+        """remove temporary data, usually associated to temporary tables"""
+        pass
+
+        
+class TrFunc(object):
+    """lower, upper"""
+    def __init__(self, trname, index, attrname=None):
+        self._tr = trname.lower()
+        self.index = index
+        self.attrname = attrname
+        
+    def apply(self, resdict):
+        value = resdict.get(self.attrname)
+        if value is not None:
+            return getattr(value, self._tr)()
+        return None
+
+
+class GlobTrFunc(TrFunc):
+    """count, sum, max, min, avg"""
+    funcs = {
+        'count': len,
+        'sum': sum,
+        'max': max,
+        'min': min,
+        # XXX avg
+        }
+    def apply(self, result):
+        """have to 'groupby' manually. For instance, if we 'count' for index 1:
+        >>> self.apply([(1, 2), (3, 4), (1, 5)])
+        [(1, 7), (3, 4)]
+        """
+        keys, values = [], {}
+        for row in result:
+            key = tuple(v for i, v in enumerate(row) if i != self.index)
+            value = row[self.index]
+            try:
+                values[key].append(value)
+            except KeyError:
+                keys.append(key)
+                values[key] = [value]
+        result = []
+        trfunc = self.funcs[self._tr]
+        for key in keys:
+            row = list(key)
+            row.insert(self.index, trfunc(values[key]))
+            result.append(row)
+        return result
+
+
+class ConnectionWrapper(object):
+    def __init__(self, cnx=None):
+        self.cnx = cnx
+    def commit(self):
+        pass
+    def rollback(self):
+        pass
+    def cursor(self):
+        return None # no actual cursor support
+
+from cubicweb.server import SOURCE_TYPES
+
+def source_adapter(source_config):
+    adapter_type = source_config['adapter'].lower()
+    try:
+        return SOURCE_TYPES[adapter_type]
+    except KeyError:
+        raise RuntimeError('Unknown adapter %r' % adapter_type)
+    
+def get_source(source_config, global_schema, repo):
+    """return a source adapter according to the adapter field in the
+    source's configuration
+    """
+    return source_adapter(source_config)(repo, global_schema, source_config)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/sources/native.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,606 @@
+"""Adapters for native cubicweb sources.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from threading import Lock
+
+from mx.DateTime import now
+
+from logilab.common.cache import Cache
+from logilab.common.configuration import REQUIRED
+from logilab.common.adbh import get_adv_func_helper
+
+from indexer import get_indexer
+
+from cubicweb import UnknownEid, AuthenticationError, Binary, server
+from cubicweb.server.utils import crypt_password
+from cubicweb.server.sqlutils import SQLAdapterMixIn
+from cubicweb.server.rqlannotation import set_qdata
+from cubicweb.server.sources import AbstractSource
+from cubicweb.server.sources.rql2sql import SQLGenerator
+
+
+NONSYSTEM_ETYPES = set()
+NONSYSTEM_RELATIONS = set()
+
+class LogCursor(object):
+    def __init__(self, cursor):
+        self.cu = cursor
+        
+    def execute(self, query, args=None):
+        """Execute a query.
+        it's a function just so that it shows up in profiling
+        """
+        if server.DEBUG:
+            print 'exec', query, args
+        try:
+            self.cu.execute(str(query), args)
+        except Exception, ex:
+            print "sql: %r\n args: %s\ndbms message: %r" % (
+                query, args, ex.args[0])
+            raise
+        
+    def fetchall(self):
+        return self.cu.fetchall()
+        
+    def fetchone(self):
+        return self.cu.fetchone()
+    
+def make_schema(selected, solution, table, typemap):
+    """return a sql schema to store RQL query result"""
+    sql = []
+    varmap = {}
+    for i, term in enumerate(selected):
+        name = 'C%s' % i
+        key = term.as_string()
+        varmap[key] = '%s.%s' % (table, name)
+        ttype = term.get_type(solution)
+        try:
+            sql.append('%s %s' % (name, typemap[ttype]))
+        except KeyError:
+            # assert not schema(ttype).is_final()
+            sql.append('%s %s' % (name, typemap['Int']))
+    return ','.join(sql), varmap
+
+def _modified_sql(table, etypes):
+    # XXX protect against sql injection
+    if len(etypes) > 1:
+        restr = 'type IN (%s)' % ','.join("'%s'" % etype for etype in etypes)
+    else:
+        restr = "type='%s'" % etypes[0]
+    if table == 'entities':
+        attr = 'mtime'
+    else:
+        attr = 'dtime'
+    return 'SELECT type, eid FROM %s WHERE %s AND %s > %%(time)s' % (
+        table, restr, attr)
+
+
+class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
+    """adapter for source using the native cubicweb schema (see below)
+    """
+    # need default value on class since migration doesn't call init method
+    has_deleted_entitites_table = True
+    
+    passwd_rql = "Any P WHERE X is EUser, X login %(login)s, X upassword P"
+    auth_rql = "Any X WHERE X is EUser, X login %(login)s, X upassword %(pwd)s"
+    _sols = ({'X': 'EUser', 'P': 'Password'},)
+    
+    options = (
+        ('db-driver',
+         {'type' : 'string',
+          'default': 'postgres',
+          'help': 'database driver (postgres or sqlite)',
+          'group': 'native-source', 'inputlevel': 1,
+          }),
+        ('db-host',
+         {'type' : 'string',
+          'default': '',
+          'help': 'database host',
+          'group': 'native-source', 'inputlevel': 1,
+          }),
+        ('db-name',
+         {'type' : 'string',
+          'default': REQUIRED,
+          'help': 'database name',
+          'group': 'native-source', 'inputlevel': 0,
+          }),
+        ('db-user',
+         {'type' : 'string',
+          'default': 'cubicweb',
+          'help': 'database user',
+          'group': 'native-source', 'inputlevel': 0,
+          }),
+        ('db-password',
+         {'type' : 'password',
+          'default': '',
+          'help': 'database password',
+          'group': 'native-source', 'inputlevel': 0,
+          }),
+        ('db-encoding',
+         {'type' : 'string',
+          'default': 'utf8',
+          'help': 'database encoding',
+          'group': 'native-source', 'inputlevel': 1,
+          }),
+    )
+    
+    def __init__(self, repo, appschema, source_config, *args, **kwargs):
+        SQLAdapterMixIn.__init__(self, source_config)
+        AbstractSource.__init__(self, repo, appschema, source_config,
+                                *args, **kwargs)
+        # sql generator
+        self._rql_sqlgen = SQLGenerator(appschema, self.dbhelper,
+                                        self.encoding)
+        # full text index helper
+        self.indexer = get_indexer(self.dbdriver, self.encoding)
+        # advanced functionality helper
+        self.dbhelper.fti_uid_attr = self.indexer.uid_attr
+        self.dbhelper.fti_table = self.indexer.table
+        self.dbhelper.fti_restriction_sql = self.indexer.restriction_sql
+        self.dbhelper.fti_need_distinct_query = self.indexer.need_distinct
+        # sql queries cache
+        self._cache = Cache(repo.config['rql-cache-size'])
+        self._temp_table_data = {}
+        self._eid_creation_lock = Lock()
+
+    def reset_caches(self):
+        """method called during test to reset potential source caches"""
+        self._cache = Cache(self.repo.config['rql-cache-size'])
+    
+    def clear_eid_cache(self, eid, etype):
+        """clear potential caches for the given eid"""
+        self._cache.pop('%s X WHERE X eid %s' % (etype, eid), None)
+        self._cache.pop('Any X WHERE X eid %s' % eid, None)
+        
+    def sqlexec(self, session, sql, args=None):
+        """execute the query and return its result"""
+        cursor = session.pool[self.uri]
+        self.doexec(cursor, sql, args)
+        return self.process_result(cursor)
+    
+    def init_creating(self):
+        # check full text index availibility
+        pool = self.repo._get_pool()
+        if not self.indexer.has_fti_table(pool['system']):
+            self.error('no text index table')
+            self.indexer = None
+        self.repo._free_pool(pool)
+
+    def init(self):
+        self.init_creating() 
+        pool = self.repo._get_pool()
+        # XXX cubicweb < 2.42 compat
+        if 'deleted_entities' in self.dbhelper.list_tables(pool['system']):
+            self.has_deleted_entitites_table = True
+        else:
+            self.has_deleted_entitites_table = False
+        self.repo._free_pool(pool)
+        
+    # ISource interface #######################################################
+
+    def compile_rql(self, rql):
+        rqlst = self.repo.querier._rqlhelper.parse(rql)
+        rqlst.restricted_vars = ()
+        rqlst.children[0].solutions = self._sols
+        self.repo.querier.sqlgen_annotate(rqlst)
+        set_qdata(rqlst, ())
+        return rqlst
+    
+    def set_schema(self, schema):
+        """set the application'schema"""
+        self._cache = Cache(self.repo.config['rql-cache-size'])
+        self.cache_hit, self.cache_miss, self.no_cache = 0, 0, 0
+        self.schema = schema
+        try:
+            self._rql_sqlgen.schema = schema
+        except AttributeError:
+            pass # __init__
+        if 'EUser' in schema: # probably an empty schema if not true...
+            # rql syntax trees used to authenticate users
+            self._passwd_rqlst = self.compile_rql(self.passwd_rql)
+            self._auth_rqlst = self.compile_rql(self.auth_rql)
+                
+    def support_entity(self, etype, write=False):
+        """return true if the given entity's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+        """
+        return not etype in NONSYSTEM_ETYPES
+    
+    def support_relation(self, rtype, write=False):
+        """return true if the given relation's type is handled by this adapter
+        if write is true, return true only if it's a RW support
+        """
+        if write:
+            return not rtype in NONSYSTEM_RELATIONS
+        # due to current multi-sources implementation, the system source
+        # can't claim not supporting a relation            
+        return True #not rtype == 'content_for'
+
+    def authenticate(self, session, login, password):
+        """return EUser eid for the given login/password if this account is
+        defined in this source, else raise `AuthenticationError`
+
+        two queries are needed since passwords are stored crypted, so we have
+        to fetch the salt first
+        """
+        args = {'login': login, 'pwd' : password}
+        if password is not None:
+            rset = self.syntax_tree_search(session, self._passwd_rqlst, args)
+            try:
+                pwd = rset[0][0]
+            except IndexError:
+                raise AuthenticationError('bad login')
+            # passwords are stored using the bytea type, so we get a StringIO
+            if pwd is not None:
+                args['pwd'] = crypt_password(password, pwd.getvalue()[:2])
+        # get eid from login and (crypted) password
+        rset = self.syntax_tree_search(session, self._auth_rqlst, args)
+        try:
+            return rset[0][0]
+        except IndexError:
+            raise AuthenticationError('bad password')
+    
+    def syntax_tree_search(self, session, union, args=None, cachekey=None, 
+                           varmap=None):
+        """return result from this source for a rql query (actually from
+        a rql syntax tree and a solution dictionary mapping each used
+        variable to a possible type). If cachekey is given, the query
+        necessary to fetch the results (but not the results themselves)
+        may be cached using this key.
+        """
+        if server.DEBUG:
+            print 'RQL FOR NATIVE SOURCE', self.uri, cachekey
+            if varmap:
+                print 'USING VARMAP', varmap
+            print union.as_string()
+            if args: print 'ARGS', args
+            print 'SOLUTIONS', ','.join(str(s.solutions) for s in union.children)
+        # remember number of actually selected term (sql generation may append some)
+        if cachekey is None:
+            self.no_cache += 1
+            # generate sql query if we are able to do so (not supported types...)
+            sql, query_args = self._rql_sqlgen.generate(union, args, varmap)
+        else:
+            # sql may be cached
+            try:
+                sql, query_args = self._cache[cachekey]
+                self.cache_hit += 1
+            except KeyError:
+                self.cache_miss += 1
+                sql, query_args = self._rql_sqlgen.generate(union, args, varmap)
+                self._cache[cachekey] = sql, query_args
+        args = self.merge_args(args, query_args)
+        cursor = session.pool[self.uri]
+        assert isinstance(sql, basestring), repr(sql)
+        try:
+            self.doexec(cursor, sql, args)
+        except (self.dbapi_module.OperationalError,
+                self.dbapi_module.InterfaceError):
+            # FIXME: better detection of deconnection pb
+            self.info("request failed '%s' ... retry with a new cursor", sql)
+            session.pool.reconnect(self)
+            cursor = session.pool[self.uri]
+            self.doexec(cursor, sql, args)
+        res = self.process_result(cursor)
+        if server.DEBUG:
+            print '------>', res
+        return res
+                
+    def flying_insert(self, table, session, union, args=None, varmap=None):
+        """similar as .syntax_tree_search, but inserts data in the
+        temporary table (on-the-fly if possible, eg for the system
+        source whose the given cursor come from). If not possible,
+        inserts all data by calling .executemany().
+        """
+        if self.uri == 'system':
+            if server.DEBUG:
+                print 'FLYING RQL FOR SOURCE', self.uri
+                if varmap:
+                    print 'USING VARMAP', varmap
+                print union.as_string()
+                print 'SOLUTIONS', ','.join(str(s.solutions) for s in union.children)
+            # generate sql queries if we are able to do so
+            sql, query_args = self._rql_sqlgen.generate(union, args, varmap)
+            query = 'INSERT INTO %s %s' % (table, sql.encode(self.encoding))
+            self.doexec(session.pool[self.uri], query,
+                        self.merge_args(args, query_args))
+# XXX commented until it's proved to be necessary
+#             # XXX probably inefficient
+#             tempdata = self._temp_table_data.setdefault(table, set())
+#             cursor = session.pool[self.uri]
+#             cursor.execute('select * from %s' % table)
+#             for row in cursor.fetchall():
+#                 print 'data', row
+#                 tempdata.add(tuple(row))
+        else:
+            super(NativeSQLSource, self).flying_insert(table, session, union,
+                                                       args, varmap)
+        
+    def _manual_insert(self, results, table, session):
+        """insert given result into a temporary table on the system source"""
+        #print 'manual insert', table, results
+        if not results:
+            return
+        #cursor.execute('select * from %s'%table)
+        #assert len(cursor.fetchall())== 0
+        encoding = self.encoding
+        # added chr to be sqlite compatible
+        query_args = ['%%(%s)s' % i for i in xrange(len(results[0]))]
+        query = 'INSERT INTO %s VALUES(%s)' % (table, ','.join(query_args))
+        kwargs_list = []
+#        tempdata = self._temp_table_data.setdefault(table, set())
+        for row in results:
+            kwargs = {}
+            row = tuple(row)
+# XXX commented until it's proved to be necessary
+#             if row in tempdata:
+#                 continue
+#             tempdata.add(row)
+            for index, cell in enumerate(row):
+                if type(cell) is unicode:
+                    cell = cell.encode(encoding)
+                elif isinstance(cell, Binary):
+                    cell = self.binary(cell.getvalue())
+                kwargs[str(index)] = cell
+            kwargs_list.append(kwargs)
+        self.doexecmany(session.pool[self.uri], query, kwargs_list)
+
+    def clean_temp_data(self, session, temptables):
+        """remove temporary data, usually associated to temporary tables"""
+        if temptables:
+            cursor = session.pool[self.uri]
+            for table in temptables:
+                try:
+                    self.doexec(cursor,'DROP TABLE %s' % table)
+                except:
+                    pass
+                try:
+                    del self._temp_table_data[table]
+                except KeyError:
+                    continue
+    
+    def add_entity(self, session, entity):
+        """add a new entity to the source"""
+        attrs = self.preprocess_entity(entity)
+        sql = self.sqlgen.insert(str(entity.e_schema), attrs)
+        self.doexec(session.pool[self.uri], sql, attrs)
+        
+    def update_entity(self, session, entity):
+        """replace an entity in the source"""
+        attrs = self.preprocess_entity(entity)
+        sql = self.sqlgen.update(str(entity.e_schema), attrs, ['eid'])
+        self.doexec(session.pool[self.uri], sql, attrs)
+
+    def delete_entity(self, session, etype, eid):
+        """delete an entity from the source"""
+        attrs = {'eid': eid}
+        sql = self.sqlgen.delete(etype, attrs)
+        self.doexec(session.pool[self.uri], sql, attrs)
+
+    def add_relation(self, session, subject, rtype, object):
+        """add a relation to the source"""
+        attrs = {'eid_from': subject, 'eid_to': object}
+        sql = self.sqlgen.insert('%s_relation' % rtype, attrs)
+        self.doexec(session.pool[self.uri], sql, attrs)
+    
+    def delete_relation(self, session, subject, rtype, object):
+        """delete a relation from the source"""
+        rschema = self.schema.rschema(rtype)
+        if rschema.inlined:
+            etype = session.describe(subject)[0]
+            sql = 'UPDATE %s SET %s=NULL WHERE eid=%%(eid)s' % (etype, rtype)
+            attrs = {'eid' : subject}
+        else:
+            attrs = {'eid_from': subject, 'eid_to': object}
+            sql = self.sqlgen.delete('%s_relation' % rtype, attrs)
+        self.doexec(session.pool[self.uri], sql, attrs)    
+
+    def doexec(self, cursor, query, args=None):
+        """Execute a query.
+        it's a function just so that it shows up in profiling
+        """
+        #t1 = time()
+        if server.DEBUG:
+            print 'exec', query, args
+        #import sys
+        #sys.stdout.flush()
+        # str(query) to avoid error if it's an unicode string
+        try:
+            cursor.execute(str(query), args)
+        except Exception, ex:
+            self.critical("sql: %r\n args: %s\ndbms message: %r",
+                          query, args, ex.args[0])
+            raise
+        
+    def doexecmany(self, cursor, query, args):
+        """Execute a query.
+        it's a function just so that it shows up in profiling
+        """
+        #t1 = time()
+        if server.DEBUG:
+            print 'execmany', query, 'with', len(args), 'arguments'
+        #import sys
+        #sys.stdout.flush()
+        # str(query) to avoid error if it's an unicode string
+        try:
+            cursor.executemany(str(query), args)
+        except:
+            self.critical("sql many: %r\n args: %s", query, args)
+            raise
+        
+    # short cut to method requiring advanced db helper usage ##################
+            
+    def create_index(self, session, table, column, unique=False):
+        cursor = LogCursor(session.pool[self.uri])
+        self.dbhelper.create_index(cursor, table, column, unique)
+            
+    def drop_index(self, session, table, column, unique=False):
+        cursor = LogCursor(session.pool[self.uri])
+        self.dbhelper.drop_index(cursor, table, column, unique)
+
+    # system source interface #################################################
+
+    def eid_type_source(self, session, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        sql = 'SELECT type, source, extid FROM entities WHERE eid=%s' % eid
+        try:
+            res = session.system_sql(sql).fetchone()
+        except:
+            raise UnknownEid(eid)
+        if res is None:
+            raise UnknownEid(eid)
+        return res
+
+    def extid2eid(self, session, source, lid):
+        """get eid from a local id. An eid is attributed if no record is found"""
+        cursor = session.system_sql('SELECT eid FROM entities WHERE '
+                                    'extid=%(x)s AND source=%(s)s',
+                                    # str() necessary with pg 8.3
+                                    {'x': str(lid), 's': source.uri})
+        # XXX testing rowcount cause strange bug with sqlite, results are there
+        #     but rowcount is 0
+        #if cursor.rowcount > 0: 
+        try:
+            result = cursor.fetchone()
+            if result:
+                eid = result[0]
+                return eid            
+        except:
+            pass
+        return None
+    
+    def temp_table_def(self, selected, sol, table):
+        return make_schema(selected, sol, table, self.dbhelper.TYPE_MAPPING)
+
+    def create_temp_table(self, session, table, schema):
+        # we don't want on commit drop, this may cause problem when
+        # running with an ldap source, and table will be deleted manually any way
+        # on commit
+        sql = self.dbhelper.sql_temporary_table(table, schema, False)
+        self.doexec(session.pool[self.uri], sql)
+    
+    def create_eid(self, session):
+        self._eid_creation_lock.acquire()
+        try:
+            cursor = session.pool[self.uri]
+            for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
+                self.doexec(cursor, sql)
+            return cursor.fetchone()[0]
+        finally:
+            self._eid_creation_lock.release()
+
+    def add_info(self, session, entity, source, extid=None):
+        """add type and source info for an eid into the system table"""
+        # begin by inserting eid/type/source/extid into the entities table
+        attrs = {'type': str(entity.e_schema), 'eid': entity.eid,
+                 'extid': extid, 'source': source.uri, 'mtime': now()}
+        session.system_sql(self.sqlgen.insert('entities', attrs), attrs)
+
+    def delete_info(self, session, eid, etype, uri, extid):
+        """delete system information on deletion of an entity by transfering
+        record from the entities table to the deleted_entities table
+        """
+        attrs = {'eid': eid}
+        session.system_sql(self.sqlgen.delete('entities', attrs), attrs)
+        if self.has_deleted_entitites_table:
+            attrs = {'type': etype, 'eid': eid, 'extid': extid,
+                     'source': uri, 'dtime': now()}
+            session.system_sql(self.sqlgen.insert('deleted_entities', attrs), attrs)
+        
+    def fti_unindex_entity(self, session, eid):
+        """remove text content for entity with the given eid from the full text
+        index
+        """
+        try:
+            self.indexer.cursor_unindex_object(eid, session.pool['system'])
+        except:
+            if self.indexer is not None:
+                self.exception('error while unindexing %s', eid)
+        
+    def fti_index_entity(self, session, entity):
+        """add text content of a created/modified entity to the full text index
+        """
+        self.info('reindexing %r', entity.eid)
+        try:
+            self.indexer.cursor_reindex_object(entity.eid, entity,
+                                               session.pool['system'])
+        except:
+            if self.indexer is not None:
+                self.exception('error while reindexing %s', entity)
+        # update entities.mtime
+        attrs = {'eid': entity.eid, 'mtime': now()}
+        session.system_sql(self.sqlgen.update('entities', attrs, ['eid']), attrs)
+        
+    def modified_entities(self, session, etypes, mtime):
+        """return a 2-uple:
+        * list of (etype, eid) of entities of the given types which have been
+          modified since the given timestamp (actually entities whose full text
+          index content has changed)
+        * list of (etype, eid) of entities of the given types which have been
+          deleted since the given timestamp
+        """
+        modsql = _modified_sql('entities', etypes)
+        cursor = session.system_sql(modsql, {'time': mtime})
+        modentities = cursor.fetchall()
+        delsql = _modified_sql('deleted_entities', etypes)
+        cursor = session.system_sql(delsql, {'time': mtime})
+        delentities = cursor.fetchall()
+        return modentities, delentities
+
+
+def sql_schema(driver):
+    helper = get_adv_func_helper(driver)
+    schema = """
+/* Create the repository's system database */
+
+%s
+
+CREATE TABLE entities (
+  eid INTEGER PRIMARY KEY NOT NULL,
+  type VARCHAR(64) NOT NULL,
+  source VARCHAR(64) NOT NULL,
+  mtime TIMESTAMP NOT NULL,
+  extid VARCHAR(256)
+);
+CREATE INDEX entities_type_idx ON entities(type);
+CREATE INDEX entities_mtime_idx ON entities(mtime);
+CREATE INDEX entities_extid_idx ON entities(extid);
+
+CREATE TABLE deleted_entities (
+  eid INTEGER PRIMARY KEY NOT NULL,
+  type VARCHAR(64) NOT NULL,
+  source VARCHAR(64) NOT NULL,
+  dtime TIMESTAMP NOT NULL,
+  extid VARCHAR(256)
+);
+CREATE INDEX deleted_entities_type_idx ON deleted_entities(type);
+CREATE INDEX deleted_entities_dtime_idx ON deleted_entities(dtime);
+CREATE INDEX deleted_entities_extid_idx ON deleted_entities(extid);
+""" % helper.sql_create_sequence('entities_id_seq')
+    return schema
+
+
+def sql_drop_schema(driver):
+    helper = get_adv_func_helper(driver)
+    return """
+%s
+DROP TABLE entities;
+DROP TABLE deleted_entities;
+""" % helper.sql_drop_sequence('entities_id_seq')
+
+
+def grant_schema(user, set_owner=True):
+    result = ''
+    if set_owner:
+        result = 'ALTER TABLE entities OWNER TO %s;\n' % user
+        result += 'ALTER TABLE deleted_entities OWNER TO %s;\n' % user
+        result += 'ALTER TABLE entities_id_seq OWNER TO %s;\n' % user
+    result += 'GRANT ALL ON entities TO %s;\n' % user
+    result += 'GRANT ALL ON deleted_entities TO %s;\n' % user
+    result += 'GRANT ALL ON entities_id_seq TO %s;\n' % user
+    return result
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/sources/rql2sql.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1202 @@
+"""RQL to SQL generator for native sources.
+
+
+SQL queries optimization
+~~~~~~~~~~~~~~~~~~~~~~~~
+1. EUser X WHERE X in_group G, G name 'users':
+
+   EUser is the only subject entity type for the in_group relation,
+   which allow us to do ::
+
+     SELECT eid_from FROM in_group, EGroup
+     WHERE in_group.eid_to = EGroup.eid_from
+     AND EGroup.name = 'users'
+
+
+2. Any X WHERE X nonfinal1 Y, Y nonfinal2 Z
+
+   -> direct join between nonfinal1 and nonfinal2, whatever X,Y, Z (unless
+      inlined...)
+      
+      NOT IMPLEMENTED (and quite hard to implement)
+
+Potential optimization information is collected by the querier, sql generation
+is done according to this information
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import threading
+
+from rql import BadRQLQuery, CoercionError
+from rql.stmts import Union, Select
+from rql.nodes import (SortTerm, VariableRef, Constant, Function, Not,
+                       Variable, ColumnAlias, Relation, SubQuery, Exists)
+
+from cubicweb import server
+from cubicweb.server.utils import cleanup_solutions
+
+def _new_var(select, varname): 
+    newvar = select.get_variable(varname)
+    if not 'relations' in newvar.stinfo:
+        # not yet initialized
+        newvar.prepare_annotation()
+        newvar.stinfo['scope'] = select
+        newvar._q_invariant = False
+    return newvar
+
+def _fill_to_wrap_rel(var, newselect, towrap, schema):
+    for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']:
+        rschema = schema.rschema(rel.r_type)
+        if rschema.inlined:
+            towrap.add( (var, rel) )
+            for vref in rel.children[1].iget_nodes(VariableRef):
+                newivar = _new_var(newselect, vref.name)
+                newselect.selection.append(VariableRef(newivar))
+                _fill_to_wrap_rel(vref.variable, newselect, towrap, schema)
+        elif rschema.is_final():
+            towrap.add( (var, rel) )
+   
+def rewrite_unstable_outer_join(select, solutions, unstable, schema):
+    """if some optional variables are unstable, they should be selected in a
+    subquery. This function check this and rewrite the rql syntax tree if
+    necessary (in place). Return a boolean telling if the tree has been modified
+    """
+    torewrite = set()
+    modified = False
+    for varname in tuple(unstable):
+        var = select.defined_vars[varname]
+        if not var.stinfo['optrelations']:
+            continue
+        modified = True
+        unstable.remove(varname)
+        torewrite.add(var)
+        newselect = Select()
+        newselect.need_distinct = False
+        myunion = Union()
+        myunion.append(newselect)
+        # extract aliases / selection
+        newvar = _new_var(newselect, var.name)
+        newselect.selection = [VariableRef(newvar)]
+        for avar in select.defined_vars.itervalues():
+            if avar.stinfo['attrvar'] is var:
+                newavar = _new_var(newselect, avar.name)
+                newavar.stinfo['attrvar'] = newvar
+                newselect.selection.append(VariableRef(newavar))
+        towrap_rels = set()
+        _fill_to_wrap_rel(var, newselect, towrap_rels, schema)
+        # extract relations
+        for var, rel in towrap_rels:
+            newrel = rel.copy(newselect)
+            newselect.add_restriction(newrel)
+            select.remove_node(rel)
+            var.stinfo['relations'].remove(rel)
+            newvar.stinfo['relations'].add(newrel)
+            if rel.optional in ('left', 'both'):
+                newvar.stinfo['optrelations'].add(newrel)
+            for vref in newrel.children[1].iget_nodes(VariableRef):
+                var = vref.variable
+                var.stinfo['relations'].add(newrel)
+                var.stinfo['rhsrelations'].add(newrel)
+                if rel.optional in ('right', 'both'):
+                    var.stinfo['optrelations'].add(newrel)                
+        # extract subquery solutions
+        solutions = [sol.copy() for sol in solutions]
+        cleanup_solutions(newselect, solutions)
+        newselect.set_possible_types(solutions)
+        # full sub-query
+        aliases = [VariableRef(select.get_variable(avar.name, i))
+                   for i, avar in enumerate(newselect.selection)]
+        select.add_subquery(SubQuery(aliases, myunion), check=False)
+    return modified
+
+def _new_solutions(rqlst, solutions):
+    """first filter out subqueries variables from solutions"""
+    newsolutions = []
+    for origsol in solutions:
+        asol = {}
+        for vname in rqlst.defined_vars:
+            asol[vname] = origsol[vname]
+        if not asol in newsolutions:
+            newsolutions.append(asol)
+    return newsolutions
+
+def remove_unused_solutions(rqlst, solutions, varmap, schema):
+    """cleanup solutions: remove solutions where invariant variables are taking
+    different types
+    """
+    newsolutions = _new_solutions(rqlst, solutions)
+    existssols = {}
+    unstable = set()
+    for vname, var in rqlst.defined_vars.iteritems():
+        vtype = newsolutions[0][vname]
+        if var._q_invariant or vname in varmap:
+            for i in xrange(len(newsolutions)-1, 0, -1):
+                if vtype != newsolutions[i][vname]:
+                    newsolutions.pop(i)
+        elif not var.scope is rqlst:
+            # move appart variables which are in a EXISTS scope and are variating
+            try:
+                thisexistssols, thisexistsvars = existssols[var.scope]
+            except KeyError:
+                thisexistssols = [newsolutions[0]]
+                thisexistsvars = set()
+                existssols[var.scope] = thisexistssols, thisexistsvars
+            for i in xrange(len(newsolutions)-1, 0, -1):
+                if vtype != newsolutions[i][vname]:
+                    thisexistssols.append(newsolutions.pop(i))
+                    thisexistsvars.add(vname)
+        else:
+            # remember unstable variables
+            for i in xrange(1, len(newsolutions)):
+                if vtype != newsolutions[i][vname]:
+                    unstable.add(vname)
+    if len(newsolutions) > 1:
+        if rewrite_unstable_outer_join(rqlst, newsolutions, unstable, schema):
+            # remove variables extracted to subqueries from solutions
+            newsolutions = _new_solutions(rqlst, newsolutions)
+    return newsolutions, existssols, unstable
+
+def relation_info(relation):
+    lhs, rhs = relation.get_variable_parts()
+    try:
+        lhs = lhs.variable
+        lhsconst = lhs.stinfo['constnode']
+    except AttributeError:
+        lhsconst = lhs
+        lhs = None
+    except KeyError:
+        lhsconst = None # ColumnAlias
+    try:
+        rhs = rhs.variable
+        rhsconst = rhs.stinfo['constnode']
+    except AttributeError:
+        rhsconst = rhs
+        rhs = None
+    except KeyError:
+        rhsconst = None # ColumnAlias
+    return lhs, lhsconst, rhs, rhsconst
+
+def switch_relation_field(sql, table=''):
+    switchedsql = sql.replace(table + '.eid_from', '__eid_from__')
+    switchedsql = switchedsql.replace(table + '.eid_to',
+                                      table + '.eid_from')
+    return switchedsql.replace('__eid_from__', table + '.eid_to')
+
+def sort_term_selection(sorts, selectedidx, rqlst, groups):
+    # XXX beurk
+    if isinstance(rqlst, list):
+        def append(term):
+            rqlst.append(term)
+    else:
+        def append(term):
+            rqlst.selection.append(term.copy(rqlst))
+    for sortterm in sorts:
+        term = sortterm.term
+        if not isinstance(term, Constant) and not str(term) in selectedidx:
+            selectedidx.append(str(term))
+            append(term)
+            if groups:
+                for vref in term.iget_nodes(VariableRef):
+                    if not vref in groups:
+                        groups.append(vref)
+        
+def fix_selection(rqlst, selectedidx, needwrap, sorts, groups, having):
+    if sorts:
+        sort_term_selection(sorts, selectedidx, rqlst, not needwrap and groups)
+    if needwrap:
+        if groups:
+            for vref in groups:
+                if not vref.name in selectedidx:
+                    selectedidx.append(vref.name)
+                    rqlst.selection.append(vref)
+        if having:
+            for term in having:
+                for vref in term.iget_nodes(VariableRef):
+                    if not vref.name in selectedidx:
+                        selectedidx.append(vref.name)
+                        rqlst.selection.append(vref)
+
+# IGenerator implementation for RQL->SQL ######################################
+
+
+class StateInfo(object):
+    def __init__(self, existssols, unstablevars):
+        self.existssols = existssols
+        self.unstablevars = unstablevars
+        self.subtables = {}
+        
+    def reset(self, solution):
+        """reset some visit variables"""
+        self.solution = solution
+        self.count = 0
+        self.done = set()
+        self.tables = self.subtables.copy()
+        self.actual_tables = [[]]
+        for _, tsql in self.tables.itervalues():
+            self.actual_tables[-1].append(tsql)
+        self.outer_tables = {}
+        self.duplicate_switches = []
+        self.attr_vars = {}
+        self.aliases = {}
+        self.restrictions = []
+        self._restr_stack = []
+        
+    def add_restriction(self, restr):
+        if restr:
+            self.restrictions.append(restr)
+            
+    def iter_exists_sols(self, exists):
+        if not exists in self.existssols:
+            yield 1
+            return
+        thisexistssols, thisexistsvars = self.existssols[exists]
+        origsol = self.solution
+        origtables = self.tables
+        done = self.done
+        for thisexistssol in thisexistssols:
+            for vname in self.unstablevars:
+                if thisexistssol[vname] != origsol[vname] and vname in thisexistsvars:
+                    break
+            else:
+                self.tables = origtables.copy()
+                self.solution = thisexistssol
+                yield 1
+                # cleanup self.done from stuff specific to exists
+                for var in thisexistsvars:
+                    if var in done:
+                        done.remove(var)
+                for rel in exists.iget_nodes(Relation):
+                    if rel in done:
+                        done.remove(rel)
+        self.solution = origsol
+        self.tables = origtables
+
+    def push_scope(self):
+        self.actual_tables.append([])
+        self._restr_stack.append(self.restrictions)
+        self.restrictions = []
+
+    def pop_scope(self):
+        restrictions = self.restrictions
+        self.restrictions = self._restr_stack.pop()
+        return restrictions, self.actual_tables.pop()
+    
+    
+class SQLGenerator(object):
+    """
+    generation of SQL from the fully expanded RQL syntax tree
+    SQL is designed to be used with a CubicWeb SQL schema
+
+    Groups and sort are not handled here since they should not be handled at
+    this level (see cubicweb.server.querier)
+    
+    we should not have errors here !
+
+    WARNING: a CubicWebSQLGenerator instance is not thread safe, but generate is
+    protected by a lock
+    """
+    
+    def __init__(self, schema, dbms_helper, dbencoding='UTF-8'):
+        self.schema = schema
+        self.dbms_helper = dbms_helper
+        self.dbencoding = dbencoding
+        self.keyword_map = {'NOW' : self.dbms_helper.sql_current_timestamp,
+                            'TODAY': self.dbms_helper.sql_current_date,
+                            }
+        if not self.dbms_helper.union_parentheses_support:
+            self.union_sql = self.noparen_union_sql
+        self._lock = threading.Lock()
+        
+    def generate(self, union, args=None, varmap=None):
+        """return SQL queries and a variable dictionnary from a RQL syntax tree
+
+        :partrqls: a list of couple (rqlst, solutions)
+        :args: optional dictionary with values of substitutions used in the query
+        :varmap: optional dictionary mapping variable name to a special table
+          name, in case the query as to fetch data from temporary tables
+
+        return an sql string and a dictionary with substitutions values
+        """
+        if args is None:
+            args = {}
+        if varmap is None:
+            varmap =  {}
+        self._lock.acquire()
+        self._args = args
+        self._varmap = varmap
+        self._query_attrs = {}
+        self._state = None
+        try:
+            # union query for each rqlst / solution
+            sql = self.union_sql(union)
+            # we are done
+            return sql, self._query_attrs
+        finally:
+            self._lock.release()
+
+    def union_sql(self, union, needalias=False): # pylint: disable-msg=E0202
+        if len(union.children) == 1:
+            return self.select_sql(union.children[0], needalias)
+        sqls = ('(%s)' % self.select_sql(select, needalias)
+                for select in union.children)
+        return '\nUNION ALL\n'.join(sqls)
+
+    def noparen_union_sql(self, union, needalias=False):
+        # needed for sqlite backend which doesn't like parentheses around
+        # union query. This may cause bug in some condition (sort in one of
+        # the subquery) but will work in most case
+        # see http://www.sqlite.org/cvstrac/tktview?tn=3074
+        sqls = (self.select_sql(select, needalias)
+                for i, select in enumerate(union.children))
+        return '\nUNION ALL\n'.join(sqls)
+    
+    def select_sql(self, select, needalias=False):
+        """return SQL queries and a variable dictionnary from a RQL syntax tree
+
+        :select: a selection statement of the syntax tree (`rql.stmts.Select`)
+        :solution: a dictionnary containing variables binding.
+          A solution's dictionnary has variable's names as key and variable's
+          types as values
+        :needwrap: boolean telling if the query will be wrapped in an outer
+          query (to deal with aggregat and/or grouping)
+        """
+        distinct = selectsortterms = select.need_distinct
+        sorts = select.orderby
+        groups = select.groupby
+        having = select.having
+        # remember selection, it may be changed and have to be restored
+        origselection = select.selection[:]
+        # check if the query will have union subquery, if it need sort term
+        # selection (union or distinct query) and wrapping (union with groups)
+        needwrap = False
+        sols = select.solutions
+        if len(sols) > 1:
+            # remove invariant from solutions
+            sols, existssols, unstable = remove_unused_solutions(
+                select, sols, self._varmap, self.schema)
+            if len(sols) > 1:
+                # if there is still more than one solution, a UNION will be
+                # generated and so sort terms have to be selected
+                selectsortterms = True
+                # and if select is using group by or aggregat, a wrapping
+                # query will be necessary
+                if groups or select.has_aggregat:
+                    select.select_only_variables()
+                    needwrap = True                        
+        else:
+            existssols, unstable = {}, ()
+        state = StateInfo(existssols, unstable)
+        # treat subqueries
+        self._subqueries_sql(select, state)
+        # generate sql for this select node
+        selectidx = [str(term) for term in select.selection]
+        if needwrap:
+            outerselection = origselection[:]
+            if sorts and selectsortterms:
+                outerselectidx = [str(term) for term in outerselection]
+                if distinct:
+                    sort_term_selection(sorts, outerselectidx,
+                                        outerselection, groups)
+            else:
+                outerselectidx = selectidx[:]
+        fix_selection(select, selectidx, needwrap,
+                      selectsortterms and sorts, groups, having)
+        if needwrap:
+            fselectidx = outerselectidx
+            fneedwrap = len(outerselection) != len(origselection)
+        else:
+            fselectidx = selectidx
+            fneedwrap = len(select.selection) != len(origselection)
+        if fneedwrap:
+            needalias = True
+        self._in_wrapping_query = False
+        self._state = state
+        try:
+            sql = self._solutions_sql(select, sols, distinct, needalias or needwrap)
+            # generate groups / having before wrapping query selection to
+            # get correct column aliases
+            self._in_wrapping_query = needwrap
+            if groups:
+                # no constant should be inserted in GROUP BY else the backend will
+                # interpret it as a positional index in the selection
+                groups = ','.join(vref.accept(self) for vref in groups
+                                  if not isinstance(vref, Constant))
+            if having:
+                # filter out constants as for GROUP BY
+                having = ','.join(vref.accept(self) for vref in having
+                                  if not isinstance(vref, Constant))
+            if needwrap:
+                sql = '%s FROM (%s) AS T1' % (self._selection_sql(outerselection, distinct,
+                                                                  needalias),
+                                              sql)
+            if groups:
+                sql += '\nGROUP BY %s' % groups
+            if having:
+                sql += '\nHAVING %s' % having
+            # sort
+            if sorts:
+                sql += '\nORDER BY %s' % ','.join(self._sortterm_sql(sortterm, 
+                                                                     fselectidx)
+                                                  for sortterm in sorts)
+                if fneedwrap:
+                    selection = ['T1.C%s' % i for i in xrange(len(origselection))]
+                    sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql)
+        finally:
+            select.selection = origselection
+        # limit / offset
+        limit = select.limit
+        if limit:
+            sql += '\nLIMIT %s' % limit
+        offset = select.offset
+        if offset:
+            sql += '\nOFFSET %s' % offset
+        return sql
+
+    def _subqueries_sql(self, select, state):
+        for i, subquery in enumerate(select.with_):
+            sql = self.union_sql(subquery.query, needalias=True)
+            tablealias = '_T%s' % i
+            sql = '(%s) AS %s' % (sql, tablealias)
+            state.subtables[tablealias] = (0, sql)
+            for vref in subquery.aliases:
+                alias = vref.variable
+                alias._q_sqltable = tablealias
+                alias._q_sql = '%s.C%s' % (tablealias, alias.colnum)
+
+    def _solutions_sql(self, select, solutions, distinct, needalias):
+        sqls = []
+        for solution in solutions:
+            self._state.reset(solution)
+            # visit restriction subtree
+            if select.where is not None:
+                self._state.add_restriction(select.where.accept(self))
+            sql = [self._selection_sql(select.selection, distinct, needalias)]
+            if self._state.restrictions:
+                sql.append('WHERE %s' % ' AND '.join(self._state.restrictions))
+            # add required tables
+            assert len(self._state.actual_tables) == 1, self._state.actual_tables
+            tables = self._state.actual_tables[-1]
+            if tables:
+                # sort for test predictability
+                sql.insert(1, 'FROM %s' % ', '.join(sorted(tables)))
+            elif self._state.restrictions and self.dbms_helper.needs_from_clause:
+                sql.insert(1, 'FROM (SELECT 1) AS _T')
+            sqls.append('\n'.join(sql))
+        if distinct:
+            return '\nUNION\n'.join(sqls)
+        else:
+            return '\nUNION ALL\n'.join(sqls)
+        
+    def _selection_sql(self, selected, distinct, needaliasing=False):
+        clause = []
+        for term in selected:
+            sql = term.accept(self)
+            if needaliasing:
+                colalias = 'C%s' % len(clause)
+                clause.append('%s AS %s' % (sql, colalias))
+                if isinstance(term, VariableRef):
+                    self._state.aliases[term.name] = colalias
+            else:
+                clause.append(sql)
+        if distinct:
+            return 'SELECT DISTINCT %s' % ', '.join(clause)
+        return 'SELECT %s' % ', '.join(clause)
+
+    def _sortterm_sql(self, sortterm, selectidx):
+        term = sortterm.term
+        try:
+            sqlterm = str(selectidx.index(str(term)) + 1)
+        except ValueError:
+            # Constant node or non selected term
+            sqlterm = str(term.accept(self))
+        if sortterm.asc:
+            return sqlterm
+        else:
+            return '%s DESC' % sqlterm
+
+    def visit_and(self, et):
+        """generate SQL for a AND subtree"""
+        res = []
+        for c in et.children:
+            part = c.accept(self)
+            if part:
+                res.append(part)
+        return ' AND '.join(res)
+
+    def visit_or(self, ou):
+        """generate SQL for a OR subtree"""
+        res = []
+        for c in ou.children:
+            part = c.accept(self)
+            if part:
+                res.append('(%s)' % part)
+        if res:
+            if len(res) > 1:
+                return '(%s)' % ' OR '.join(res)
+            return res[0]
+        return ''
+    
+    def visit_not(self, node):
+        self._state.push_scope()
+        csql = node.children[0].accept(self)
+        sqls, tables = self._state.pop_scope()
+        if node in self._state.done or not csql:
+            # already processed or no sql generated by children
+            self._state.actual_tables[-1] += tables
+            self._state.restrictions += sqls
+            return csql
+        if isinstance(node.children[0], Exists):
+            assert not sqls, (sqls, str(node.stmt))
+            assert not tables, (tables, str(node.stmt))
+            return 'NOT %s' % csql
+        sqls.append(csql)
+        if tables:
+            select = 'SELECT 1 FROM %s' % ','.join(tables)
+        else:
+            select = 'SELECT 1'
+        if sqls:
+            sql = 'NOT EXISTS(%s WHERE %s)' % (select, ' AND '.join(sqls))
+        else:
+            sql = 'NOT EXISTS(%s)' % select
+        return sql
+
+    def visit_exists(self, exists):
+        """generate SQL name for a exists subquery"""
+        sqls = []
+        for dummy in self._state.iter_exists_sols(exists):
+            sql = self._visit_exists(exists)
+            if sql:
+                sqls.append(sql)
+        if not sqls:
+            return ''
+        return 'EXISTS(%s)' % ' UNION '.join(sqls)
+            
+    def _visit_exists(self, exists):
+        self._state.push_scope()
+        restriction = exists.children[0].accept(self)
+        restrictions, tables = self._state.pop_scope()
+        if restriction:
+            restrictions.append(restriction)
+        restriction = ' AND '.join(restrictions)
+        if not restriction:
+            return ''
+        if not tables:
+            # XXX could leave surrounding EXISTS() in this case no?
+            sql = 'SELECT 1 WHERE %s' % restriction 
+        else:
+            sql = 'SELECT 1 FROM %s WHERE %s' % (', '.join(tables), restriction)
+        return sql
+
+    
+    def visit_relation(self, relation):
+        """generate SQL for a relation"""
+        rtype = relation.r_type
+        # don't care of type constraint statement (i.e. relation_type = 'is')
+        if relation.is_types_restriction():
+            return ''
+        lhs, rhs = relation.get_parts()
+        rschema = self.schema.rschema(rtype)
+        if rschema.is_final():
+            if rtype == 'eid' and lhs.variable._q_invariant and \
+                   lhs.variable.stinfo['constnode']:
+                # special case where this restriction is already generated by
+                # some other relation
+                return ''
+            # attribute relation
+            if rtype == 'has_text':
+                sql = self._visit_has_text_relation(relation)
+            else:
+                rhs_vars = rhs.get_nodes(VariableRef)
+                if rhs_vars:
+                    # if variable(s) in the RHS
+                    sql = self._visit_var_attr_relation(relation, rhs_vars)
+                else:
+                    # no variables in the RHS
+                    sql = self._visit_attribute_relation(relation)
+                if relation.neged(strict=True):
+                    self._state.done.add(relation.parent)
+                    sql = 'NOT (%s)' % sql
+        else:
+            if rtype == 'is' and rhs.operator == 'IS':
+                # special case "C is NULL"
+                if lhs.name in self._varmap:
+                    lhssql = self._varmap[lhs.name]
+                else:
+                    lhssql = lhs.accept(self)
+                return '%s%s' % (lhssql, rhs.accept(self))
+            if '%s.%s' % (lhs, relation.r_type) in self._varmap:
+                # relation has already been processed by a previous step
+                return
+            if relation.optional:
+                # check it has not already been treaten (to get necessary
+                # information to add an outer join condition)
+                if relation in self._state.done:
+                    return
+                # OPTIONAL relation, generate a left|right outer join
+                sql = self._visit_outer_join_relation(relation, rschema)
+            elif rschema.inlined:
+                sql = self._visit_inlined_relation(relation)
+#             elif isinstance(relation.parent, Not):
+#                 self._state.done.add(relation.parent)
+#                 # NOT relation
+#                 sql = self._visit_not_relation(relation, rschema)
+            else:
+                # regular (non final) relation
+                sql = self._visit_relation(relation, rschema)
+        return sql
+
+    def _visit_inlined_relation(self, relation):
+        lhsvar, _, rhsvar, rhsconst = relation_info(relation)
+        # we are sure here to have a lhsvar
+        assert lhsvar is not None
+        lhssql = self._inlined_var_sql(lhsvar, relation.r_type)
+        if isinstance(relation.parent, Not):
+            self._state.done.add(relation.parent)
+            sql = "%s IS NULL" % lhssql
+            if rhsvar is not None and not rhsvar._q_invariant:
+                sql = '(%s OR %s!=%s)' % (sql, lhssql, rhsvar.accept(self))
+            return sql
+        if rhsconst is not None:
+            return '%s=%s' % (lhssql, rhsconst.accept(self))
+        if isinstance(rhsvar, Variable) and not rhsvar.name in self._varmap:
+            # if the rhs variable is only linked to this relation, this mean we
+            # only want the relation to exists, eg NOT NULL in case of inlined
+            # relation
+            if len(rhsvar.stinfo['relations']) == 1 and rhsvar._q_invariant:
+                return '%s IS NOT NULL' % lhssql
+            if rhsvar._q_invariant:
+                return self._extra_join_sql(relation, lhssql, rhsvar)
+        return '%s=%s' % (lhssql, rhsvar.accept(self))
+
+    def _process_relation_term(self, relation, rid, termvar, termconst, relfield):
+        if termconst or isinstance(termvar, ColumnAlias) or not termvar._q_invariant:
+            termsql = termconst and termconst.accept(self) or termvar.accept(self)
+            yield '%s.%s=%s' % (rid, relfield, termsql)
+        elif termvar._q_invariant:
+            # if the variable is mapped, generate restriction anyway
+            if termvar.name in self._varmap:
+                termsql = termvar.accept(self)
+                yield '%s.%s=%s' % (rid, relfield, termsql)
+            extrajoin = self._extra_join_sql(relation, '%s.%s' % (rid, relfield), termvar)
+            if extrajoin:
+                yield extrajoin
+        
+    def _visit_relation(self, relation, rschema):
+        """generate SQL for a relation
+
+        implements optimization 1.
+        """
+        if relation.r_type == 'identity':
+            # special case "X identity Y"
+            lhs, rhs = relation.get_parts()
+            if isinstance(relation.parent, Not):
+                self._state.done.add(relation.parent)
+                return 'NOT %s%s' % (lhs.accept(self), rhs.accept(self))
+            return '%s%s' % (lhs.accept(self), rhs.accept(self))
+        lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
+        rid = self._relation_table(relation)
+        sqls = []
+        sqls += self._process_relation_term(relation, rid, lhsvar, lhsconst, 'eid_from')
+        sqls += self._process_relation_term(relation, rid, rhsvar, rhsconst, 'eid_to')
+        sql = ' AND '.join(sqls)
+        if rschema.symetric:
+            sql = '(%s OR %s)' % (sql, switch_relation_field(sql))
+        return sql
+
+    def _visit_outer_join_relation(self, relation, rschema):
+        """
+        left outer join syntax (optional=='right'):
+          X relation Y?
+          
+        right outer join syntax (optional=='left'):
+          X? relation Y
+          
+        full outer join syntaxes (optional=='both'):
+          X? relation Y?
+
+        if relation is inlined:
+           if it's a left outer join:
+           -> X LEFT OUTER JOIN Y ON (X.relation=Y.eid)
+           elif it's a right outer join:
+           -> Y LEFT OUTER JOIN X ON (X.relation=Y.eid)
+           elif it's a full outer join:
+           -> X FULL OUTER JOIN Y ON (X.relation=Y.eid)
+        else:
+           if it's a left outer join:
+           -> X LEFT OUTER JOIN relation ON (relation.eid_from=X.eid)
+              LEFT OUTER JOIN Y ON (relation.eid_to=Y.eid)
+           elif it's a right outer join:
+           -> Y LEFT OUTER JOIN relation ON (relation.eid_to=Y.eid)
+              LEFT OUTER JOIN X ON (relation.eid_from=X.eid)
+           elif it's a full outer join:
+           -> X FULL OUTER JOIN Y ON (X.relation=Y.eid)
+        """
+        lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
+        if relation.optional == 'right':
+            joinattr, restrattr = 'eid_from', 'eid_to'
+        else:
+            lhsvar, rhsvar = rhsvar, lhsvar
+            lhsconst, rhsconst = rhsconst, lhsconst
+            joinattr, restrattr = 'eid_to', 'eid_from'
+        if relation.optional == 'both':
+            outertype = 'FULL'
+        else:
+            outertype = 'LEFT'
+        if rschema.inlined or relation.r_type == 'identity':
+            self._state.done.add(relation)
+            t1 = self._var_table(lhsvar)
+            if relation.r_type == 'identity':
+                attr = 'eid'
+            else:
+                attr = relation.r_type
+            # reset lhs/rhs, we need the initial order now
+            lhs, rhs = relation.get_variable_parts()
+            if '%s.%s' % (lhs.name, attr) in self._varmap:
+                lhssql = self._varmap['%s.%s' % (lhs.name, attr)]
+            else:
+                lhssql = '%s.%s' % (self._var_table(lhs.variable), attr)
+            if not rhsvar is None:
+                t2 = self._var_table(rhsvar)
+                if t2 is None:
+                    if rhsconst is not None:
+                        # inlined relation with invariant as rhs
+                        condition = '%s=%s' % (lhssql, rhsconst.accept(self))
+                        if relation.r_type != 'identity':
+                            condition = '(%s OR %s IS NULL)' % (condition, lhssql)
+                        if not lhsvar.stinfo['optrelations']:
+                            return condition
+                        self.add_outer_join_condition(lhsvar, t1, condition)
+                    return
+            else:
+                condition = '%s=%s' % (lhssql, rhsconst.accept(self))
+                self.add_outer_join_condition(lhsvar, t1, condition)
+            join = '%s OUTER JOIN %s ON (%s=%s)' % (
+                outertype, self._state.tables[t2][1], lhssql, rhs.accept(self))
+            self.replace_tables_by_outer_join(join, t1, t2)
+            return ''
+        lhssql = lhsconst and lhsconst.accept(self) or lhsvar.accept(self)
+        rhssql = rhsconst and rhsconst.accept(self) or rhsvar.accept(self)
+        rid = self._relation_table(relation)
+        if not lhsvar:
+            join = ''
+            toreplace = []
+            maintable = rid
+        else:
+            join = '%s OUTER JOIN %s ON (%s.%s=%s' % (
+                outertype, self._state.tables[rid][1], rid, joinattr, lhssql)
+            toreplace = [rid]
+            maintable = self._var_table(lhsvar)
+            if rhsconst:
+                join += ' AND %s.%s=%s)' % (rid, restrattr, rhssql)
+            else:
+                join += ')'
+        if not rhsconst:
+            rhstable = self._var_table(rhsvar)
+            if rhstable:
+                assert rhstable is not None, rhsvar
+                join += ' %s OUTER JOIN %s ON (%s.%s=%s)' % (
+                    outertype, self._state.tables[rhstable][1], rid, restrattr, rhssql)
+                toreplace.append(rhstable)
+        self.replace_tables_by_outer_join(join, maintable, *toreplace)
+        return ''
+
+    def _visit_var_attr_relation(self, relation, rhs_vars):
+        """visit an attribute relation with variable(s) in the RHS
+
+        attribute variables are used either in the selection or for
+        unification (eg X attr1 A, Y attr2 A). In case of selection,
+        nothing to do here.
+        """
+        contextrels = {}
+        attrvars = self._state.attr_vars
+        for var in rhs_vars:
+            try:
+                contextrels[var.name] = attrvars[var.name]
+            except KeyError:
+                attrvars[var.name] = relation
+        if not contextrels:
+            relation.children[1].accept(self, contextrels)
+            return ''
+        # at least one variable is already in attr_vars, this means we have to
+        # generate unification expression
+        lhssql = self._inlined_var_sql(relation.children[0].variable,
+                                       relation.r_type)
+        return '%s%s' % (lhssql, relation.children[1].accept(self, contextrels))
+    
+    def _visit_attribute_relation(self, relation):
+        """generate SQL for an attribute relation"""
+        lhs, rhs = relation.get_parts()
+        rhssql = rhs.accept(self)
+        table = self._var_table(lhs.variable)
+        if table is None:
+            assert relation.r_type == 'eid'
+            lhssql = lhs.accept(self)
+        else:
+            try:
+                lhssql = self._varmap['%s.%s' % (lhs.name, relation.r_type)]
+            except KeyError:
+                lhssql = '%s.%s' % (table, relation.r_type)
+        try:
+            if relation._q_needcast == 'TODAY':
+                sql = 'DATE(%s)%s' % (lhssql, rhssql)
+            # XXX which cast function should be used
+            #elif relation._q_needcast == 'NOW':
+            #    sql = 'TIMESTAMP(%s)%s' % (lhssql, rhssql)
+            else:
+                sql = '%s%s' % (lhssql, rhssql)
+        except AttributeError:
+            sql = '%s%s' % (lhssql, rhssql)
+        if lhs.variable.stinfo['optrelations']:
+            self.add_outer_join_condition(lhs.variable, table, sql)
+        else:
+            return sql
+
+    def _visit_has_text_relation(self, relation):
+        """generate SQL for a has_text relation"""
+        lhs, rhs = relation.get_parts()
+        const = rhs.children[0]
+        alias = self._fti_table(relation)
+        jointo = lhs.accept(self)
+        restriction = ''
+        lhsvar = lhs.variable
+        me_is_principal = lhsvar.stinfo.get('principal') is relation
+        if me_is_principal:
+            if not lhsvar.stinfo['typerels']:
+                # the variable is using the fti table, no join needed
+                jointo = None
+            elif not lhsvar.name in self._varmap:
+                # join on entities instead of etype's table to get result for
+                # external entities on multisources configurations
+                ealias = lhsvar._q_sqltable = lhsvar.name
+                jointo = lhsvar._q_sql = '%s.eid' % ealias
+                self.add_table('entities AS %s' % ealias, ealias)
+                if not lhsvar._q_invariant or len(lhsvar.stinfo['possibletypes']) == 1:
+                    restriction = " AND %s.type='%s'" % (ealias, self._state.solution[lhs.name])
+                else:
+                    etypes = ','.join("'%s'" % etype for etype in lhsvar.stinfo['possibletypes'])
+                    restriction = " AND %s.type IN (%s)" % (ealias, etypes)
+        if isinstance(relation.parent, Not):
+            self._state.done.add(relation.parent)
+            not_ = True
+        else:
+            not_ = False
+        return self.dbms_helper.fti_restriction_sql(alias, const.eval(self._args),
+                                                    jointo, not_) + restriction
+        
+    def visit_comparison(self, cmp, contextrels=None):
+        """generate SQL for a comparaison"""
+        if len(cmp.children) == 2:
+            lhs, rhs = cmp.children
+        else:
+            lhs = None
+            rhs = cmp.children[0]
+        operator = cmp.operator
+        if operator in ('IS', 'LIKE', 'ILIKE'):
+            if operator == 'ILIKE' and not self.dbms_helper.ilike_support:
+                operator = ' LIKE '
+            else:
+                operator = ' %s ' % operator
+        elif isinstance(rhs, Function) and rhs.name == 'IN':
+            assert operator == '='
+            operator = ' '
+        if lhs is None:
+            return '%s%s'% (operator, rhs.accept(self, contextrels))
+        return '%s%s%s'% (lhs.accept(self, contextrels), operator,
+                          rhs.accept(self, contextrels))
+            
+    def visit_mathexpression(self, mexpr, contextrels=None):
+        """generate SQL for a mathematic expression"""
+        lhs, rhs = mexpr.get_parts()
+        # check for string concatenation
+        operator = mexpr.operator
+        try:
+            if mexpr.operator == '+' and mexpr.get_type(self._state.solution, self._args) == 'String':
+                operator = '||'
+        except CoercionError:
+            pass
+        return '(%s %s %s)'% (lhs.accept(self, contextrels), operator,
+                              rhs.accept(self, contextrels))
+        
+    def visit_function(self, func, contextrels=None):
+        """generate SQL name for a function"""
+        # function_description will check function is supported by the backend
+        self.dbms_helper.function_description(func.name) 
+        return '%s(%s)' % (func.name, ', '.join(c.accept(self, contextrels)
+                                                for c in func.children))
+
+    def visit_constant(self, constant, contextrels=None):
+        """generate SQL name for a constant"""
+        value = constant.value
+        if constant.type is None:
+            return 'NULL'
+        if constant.type == 'Int' and  isinstance(constant.parent, SortTerm):
+            return constant.value
+        if constant.type in ('Date', 'Datetime'):
+            rel = constant.relation()
+            if rel is not None:
+                rel._q_needcast = value
+            return self.keyword_map[value]()
+        if constant.type == 'Substitute':
+            _id = constant.value
+            if isinstance(_id, unicode):
+                _id = _id.encode()
+        else:
+            _id = str(id(constant)).replace('-', '', 1)
+            if isinstance(value, unicode):
+                value = value.encode(self.dbencoding)
+            self._query_attrs[_id] = value
+        return '%%(%s)s' % _id
+        
+    def visit_variableref(self, variableref, contextrels=None):
+        """get the sql name for a variable reference"""
+        # use accept, .variable may be a variable or a columnalias
+        return variableref.variable.accept(self, contextrels)
+
+    def visit_columnalias(self, colalias, contextrels=None):
+        """get the sql name for a subquery column alias"""
+        if colalias.name in self._varmap:
+            sql = self._varmap[colalias.name]
+            self.add_table(sql.split('.', 1)[0])
+            return sql
+        return colalias._q_sql
+    
+    def visit_variable(self, variable, contextrels=None):
+        """get the table name and sql string for a variable"""
+        if contextrels is None and variable.name in self._state.done:
+            if self._in_wrapping_query:
+                return 'T1.%s' % self._state.aliases[variable.name]
+            return variable._q_sql
+        self._state.done.add(variable.name)
+        vtablename = None
+        if contextrels is None and variable.name in self._varmap:
+            sql, vtablename = self._var_info(variable)            
+        elif variable.stinfo['attrvar']:
+            # attribute variable (systematically used in rhs of final
+            # relation(s)), get table name and sql from any rhs relation
+            sql = self._linked_var_sql(variable, contextrels)
+        elif variable._q_invariant:
+            # since variable is invariant, we know we won't found final relation
+            principal = variable.stinfo['principal']
+            if principal is None:
+                vtablename = variable.name
+                self.add_table('entities AS %s' % variable.name, vtablename)
+                sql = '%s.eid' % vtablename
+                if variable.stinfo['typerels']:
+                    # add additional restriction on entities.type column
+                    pts = variable.stinfo['possibletypes']
+                    if len(pts) == 1:
+                        etype = iter(variable.stinfo['possibletypes']).next()
+                        restr = "%s.type='%s'" % (vtablename, etype)
+                    else:
+                        etypes = ','.join("'%s'" % et for et in pts)
+                        restr = '%s.type IN (%s)' % (vtablename, etypes)
+                    self._state.add_restriction(restr)
+            elif principal.r_type == 'has_text':
+                sql = '%s.%s' % (self._fti_table(principal),
+                                 self.dbms_helper.fti_uid_attr)
+            elif principal in variable.stinfo['rhsrelations']:
+                if self.schema.rschema(principal.r_type).inlined:
+                    sql = self._linked_var_sql(variable, contextrels)
+                else:
+                    sql = '%s.eid_to' % self._relation_table(principal)
+            else:
+                sql = '%s.eid_from' % self._relation_table(principal)
+        else:
+            # standard variable: get table name according to etype and use .eid
+            # attribute
+            sql, vtablename = self._var_info(variable)
+        variable._q_sqltable = vtablename
+        variable._q_sql = sql
+        return sql
+
+    # various utilities #######################################################
+
+    def _extra_join_sql(self, relation, sql, var):
+        # if rhs var is invariant, and this relation is not its principal,
+        # generate extra join
+        try:
+            if not var.stinfo['principal'] is relation:
+                # need a predicable result for tests
+                return '%s=%s' % tuple(sorted((sql, var.accept(self))))
+        except KeyError:
+            # no principal defined, relation is necessarily the principal and
+            # so nothing to return here
+            pass
+        return ''
+    
+    def _var_info(self, var):
+        # if current var or one of its attribute is selected , it *must*
+        # appear in the toplevel's FROM even if we're currently visiting
+        # a EXISTS node
+        if var.sqlscope is var.stmt:
+            scope = 0
+        else:
+            scope = -1
+        try:
+            sql = self._varmap[var.name]
+            table = sql.split('.', 1)[0]
+            if scope == -1:
+                scope = self._varmap_table_scope(var.stmt, table)
+            self.add_table(table, scope=scope)
+        except KeyError:
+            etype = self._state.solution[var.name]
+            # XXX this check should be moved in rql.stcheck
+            if self.schema.eschema(etype).is_final():
+                raise BadRQLQuery(var.stmt.root)
+            table = var.name
+            sql = '%s.eid' % table
+            self.add_table('%s AS %s' % (etype, table), table, scope=scope)
+        return sql, table
+    
+    def _inlined_var_sql(self, var, rtype):
+        try:
+            sql = self._varmap['%s.%s' % (var.name, rtype)]
+            scope = var.sqlscope is var.stmt and 0 or -1
+            self.add_table(sql.split('.', 1)[0], scope=scope)
+        except KeyError:
+            sql = '%s.%s' % (self._var_table(var), rtype)
+            #self._state.done.add(var.name)
+        return sql
+        
+    def _linked_var_sql(self, variable, contextrels=None):
+        if contextrels is None:
+            try:
+                return self._varmap[variable.name]            
+            except KeyError:
+                pass
+        rel = (contextrels and contextrels.get(variable.name) or 
+               variable.stinfo.get('principal') or
+               iter(variable.stinfo['rhsrelations']).next())
+        linkedvar = rel.children[0].variable
+        if rel.r_type == 'eid':
+            return linkedvar.accept(self)
+        if isinstance(linkedvar, ColumnAlias):
+            raise BadRQLQuery('variable %s should be selected by the subquery'
+                              % variable.name)
+        try:
+            sql = self._varmap['%s.%s' % (linkedvar.name, rel.r_type)]
+        except KeyError:
+            linkedvar.accept(self)            
+            sql = '%s.%s' % (linkedvar._q_sqltable, rel.r_type)
+        return sql
+
+    # tables handling #########################################################
+
+    def alias_and_add_table(self, tablename):
+        alias = '%s%s' % (tablename, self._state.count)
+        self._state.count += 1
+        self.add_table('%s AS %s' % (tablename, alias), alias)
+        return alias
+        
+    def add_table(self, table, key=None, scope=-1):
+        if key is None:
+            key = table
+        if key in self._state.tables:
+            return
+        self._state.tables[key] = (len(self._state.actual_tables) - 1, table)
+        self._state.actual_tables[scope].append(table)
+    
+    def replace_tables_by_outer_join(self, substitute, lefttable, *tables):
+        for table in tables:
+            try:
+                scope, alias = self._state.tables[table]
+                self._state.actual_tables[scope].remove(alias)
+            except ValueError: # huum, not sure about what should be done here
+                msg = "%s already used in an outer join, don't know what to do!"
+                raise Exception(msg % table)
+        try:
+            tablealias = self._state.outer_tables[lefttable]
+            actualtables = self._state.actual_tables[-1]
+        except KeyError:
+            tablescope, tablealias = self._state.tables[lefttable]
+            actualtables = self._state.actual_tables[tablescope]
+        outerjoin = '%s %s' % (tablealias, substitute)
+        self._update_outer_tables(lefttable, actualtables, tablealias, outerjoin)
+        for table in tables:
+            self._state.outer_tables[table] = outerjoin
+
+    def add_outer_join_condition(self, var, table, condition):
+        try:
+            tablealias = self._state.outer_tables[table]
+            actualtables = self._state.actual_tables[-1]
+        except KeyError:
+            for rel in var.stinfo['optrelations']:
+                self.visit_relation(rel)
+            assert self._state.outer_tables
+            self.add_outer_join_condition(var, table, condition)
+            return
+        before, after = tablealias.split(' AS %s ' % table, 1)
+        beforep, afterp = after.split(')', 1)
+        outerjoin = '%s AS %s %s AND %s) %s' % (before, table, beforep,
+                                                condition, afterp)
+        self._update_outer_tables(table, actualtables, tablealias, outerjoin)
+
+    def _update_outer_tables(self, table, actualtables, oldalias, newalias):
+        actualtables.remove(oldalias)
+        actualtables.append(newalias)
+        # some tables which have already been used as outer table and replaced
+        # by <oldalias> may not be reused here, though their associated value
+        # in the outer_tables dict has to be updated as well
+        for table, outerexpr in self._state.outer_tables.iteritems():
+            if outerexpr == oldalias:
+                self._state.outer_tables[table] = newalias
+        self._state.outer_tables[table] = newalias        
+        
+    def _var_table(self, var):
+        var.accept(self)#.visit_variable(var)
+        return var._q_sqltable
+
+    def _relation_table(self, relation):
+        """return the table alias used by the given relation"""
+        if relation in self._state.done:
+            return relation._q_sqltable
+        assert not self.schema.rschema(relation.r_type).is_final(), relation.r_type
+        rid = 'rel_%s%s' % (relation.r_type, self._state.count)
+        # relation's table is belonging to the root scope if it is the principal
+        # table of one of it's variable and if that variable belong's to parent 
+        # scope
+        for varref in relation.iget_nodes(VariableRef):
+            var = varref.variable
+            if isinstance(var, ColumnAlias):
+                scope = 0
+                break
+            # XXX may have a principal without being invariant for this generation,
+            #     not sure this is a pb or not
+            if var.stinfo.get('principal') is relation and var.sqlscope is var.stmt:
+                scope = 0
+                break
+        else:
+            scope = -1
+        self._state.count += 1
+        self.add_table('%s_relation AS %s' % (relation.r_type, rid), rid, scope=scope)
+        relation._q_sqltable = rid
+        self._state.done.add(relation)
+        return rid
+    
+    def _fti_table(self, relation):
+        if relation in self._state.done:
+            try:
+                return relation._q_sqltable
+            except AttributeError:
+                pass
+        self._state.done.add(relation)
+        alias = self.alias_and_add_table(self.dbms_helper.fti_table)
+        relation._q_sqltable = alias
+        return alias
+        
+    def _varmap_table_scope(self, select, table):
+        """since a varmap table may be used for multiple variable, its scope is
+        the most outer scope of each variables
+        """
+        scope = -1
+        for varname, alias in self._varmap.iteritems():
+            # check '.' in varname since there are 'X.attribute' keys in varmap
+            if not '.' in varname and alias.split('.', 1)[0] == table:
+                if select.defined_vars[varname].sqlscope is select:
+                    return 0
+        return scope
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/sqlutils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,254 @@
+"""SQL utilities functions and classes.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.shellutils import ProgressBar
+from logilab.common.db import get_dbapi_compliant_module
+from logilab.common.adbh import get_adv_func_helper
+from logilab.common.sqlgen import SQLGenerator
+
+from indexer import get_indexer
+
+from cubicweb import Binary, ConfigurationError
+from cubicweb.common.uilib import remove_html_tags
+from cubicweb.server import SQL_CONNECT_HOOKS
+from cubicweb.server.utils import crypt_password, cartesian_product
+
+
+def sqlexec(sqlstmts, cursor_or_execute, withpb=True, delimiter=';'):
+    """execute sql statements ignoring DROP/ CREATE GROUP or USER statements
+    error. If a cnx is given, commit at each statement
+    """
+    if hasattr(cursor_or_execute, 'execute'):
+        execute = cursor_or_execute.execute
+    else:
+        execute = cursor_or_execute
+    sqlstmts = sqlstmts.split(delimiter)
+    if withpb:
+        pb = ProgressBar(len(sqlstmts))
+    for sql in sqlstmts:
+        sql = sql.strip()
+        if withpb:
+            pb.update()
+        if not sql:
+            continue
+        # some dbapi modules doesn't accept unicode for sql string
+        execute(str(sql))
+    if withpb:
+        print
+
+
+def sqlgrants(schema, driver, user,
+              text_index=True, set_owner=True,
+              skip_relations=(), skip_entities=()):
+    """return sql to give all access privileges to the given user on the system
+    schema
+    """
+    from yams.schema2sql import grant_schema
+    from cubicweb.server.sources import native
+    output = []
+    w = output.append
+    w(native.grant_schema(user, set_owner))
+    w('')
+    if text_index:
+        indexer = get_indexer(driver)
+        w(indexer.sql_grant_user(user))
+        w('')
+    w(grant_schema(schema, user, set_owner, skip_entities=skip_entities))
+    return '\n'.join(output)
+
+                  
+def sqlschema(schema, driver, text_index=True, 
+              user=None, set_owner=False,
+              skip_relations=('has_text', 'identity'), skip_entities=()):
+    """return the system sql schema, according to the given parameters"""
+    from yams.schema2sql import schema2sql
+    from cubicweb.server.sources import native
+    if set_owner:
+        assert user, 'user is argument required when set_owner is true'
+    output = []
+    w = output.append
+    w(native.sql_schema(driver))
+    w('')
+    if text_index:
+        indexer = get_indexer(driver)
+        w(indexer.sql_init_fti())
+        w('')
+    dbhelper = get_adv_func_helper(driver)
+    w(schema2sql(dbhelper, schema, 
+                 skip_entities=skip_entities, skip_relations=skip_relations))
+    if dbhelper.users_support and user:
+        w('')
+        w(sqlgrants(schema, driver, user, text_index, set_owner,
+                    skip_relations, skip_entities))
+    return '\n'.join(output)
+
+                  
+def sqldropschema(schema, driver, text_index=True, 
+                  skip_relations=('has_text', 'identity'), skip_entities=()):
+    """return the sql to drop the schema, according to the given parameters"""
+    from yams.schema2sql import dropschema2sql
+    from cubicweb.server.sources import native
+    output = []
+    w = output.append
+    w(native.sql_drop_schema(driver))
+    w('')
+    if text_index:
+        indexer = get_indexer(driver)
+        w(indexer.sql_drop_fti())
+        w('')
+    w(dropschema2sql(schema,
+                     skip_entities=skip_entities, skip_relations=skip_relations))
+    return '\n'.join(output)
+
+
+
+class SQLAdapterMixIn(object):
+    """Mixin for SQL data sources, getting a connection from a configuration
+    dictionary and handling connection locking
+    """
+    
+    def __init__(self, source_config):
+        try:
+            self.dbdriver = source_config['db-driver'].lower()
+            self.dbname = source_config['db-name']
+        except KeyError:
+            raise ConfigurationError('missing some expected entries in sources file')
+        self.dbhost = source_config.get('db-host')
+        port = source_config.get('db-port')
+        self.dbport = port and int(port) or None
+        self.dbuser = source_config.get('db-user')
+        self.dbpasswd = source_config.get('db-password')
+        self.encoding = source_config.get('db-encoding', 'UTF-8')
+        self.dbapi_module = get_dbapi_compliant_module(self.dbdriver)
+        self.binary = self.dbapi_module.Binary
+        self.dbhelper = self.dbapi_module.adv_func_helper
+        self.sqlgen = SQLGenerator()
+        
+    def get_connection(self, user=None, password=None):
+        """open and return a connection to the database"""
+        if user or self.dbuser:
+            self.info('connecting to %s@%s for user %s', self.dbname,
+                      self.dbhost or 'localhost', user or self.dbuser)
+        else:
+            self.info('connecting to %s@%s', self.dbname,
+                      self.dbhost or 'localhost')
+        cnx = self.dbapi_module.connect(self.dbhost, self.dbname,
+                                        user or self.dbuser,
+                                        password or self.dbpasswd,
+                                        port=self.dbport)
+        init_cnx(self.dbdriver, cnx)
+        #self.dbapi_module.type_code_test(cnx.cursor())
+        return cnx
+
+    def merge_args(self, args, query_args):
+        if args is not None:
+            args = dict(args)
+            for key, val in args.items():
+                # convert cubicweb binary into db binary
+                if isinstance(val, Binary):
+                    val = self.binary(val.getvalue())
+                args[key] = val
+            # should not collide
+            args.update(query_args)
+            return args
+        return query_args
+
+    def process_result(self, cursor):
+        """return a list of CubicWeb compliant values from data in the given cursor
+        """
+        descr = cursor.description
+        encoding = self.encoding
+        process_value = self.dbapi_module.process_value
+        binary = Binary
+        results = cursor.fetchall()
+        for i, line in enumerate(results):
+            result = []
+            for col, value in enumerate(line):
+                if value is None:
+                    result.append(value)
+                    continue
+                result.append(process_value(value, descr[col], encoding, binary))
+            results[i] = result
+        return results
+
+
+    def preprocess_entity(self, entity):
+        """return a dictionary to use as extra argument to cursor.execute
+        to insert/update an entity
+        """
+        attrs = {}
+        eschema = entity.e_schema
+        for attr, value in entity.items():
+            rschema = eschema.subject_relation(attr)
+            if rschema.is_final():
+                atype = str(entity.e_schema.destination(attr))
+                if atype == 'Boolean':
+                    value = self.dbhelper.boolean_value(value)
+                elif atype == 'Password':
+                    # if value is a Binary instance, this mean we got it
+                    # from a query result and so it is already encrypted
+                    if isinstance(value, Binary):
+                        value = value.getvalue()
+                    else:
+                        value = crypt_password(value)
+                elif isinstance(value, Binary):
+                    value = self.binary(value.getvalue())
+            attrs[str(attr)] = value
+        return attrs
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
+
+def init_sqlite_connexion(cnx):
+    # XXX should not be publicly exposed
+    #def comma_join(strings):
+    #    return ', '.join(strings)
+    #cnx.create_function("COMMA_JOIN", 1, comma_join)
+
+    class concat_strings(object):
+        def __init__(self):
+            self.values = []
+        def step(self, value):
+            if value is not None:
+                self.values.append(value)
+        def finalize(self):
+            return ', '.join(self.values)
+    # renamed to GROUP_CONCAT in cubicweb 2.45, keep old name for bw compat for
+    # some time
+    cnx.create_aggregate("CONCAT_STRINGS", 1, concat_strings)
+    cnx.create_aggregate("GROUP_CONCAT", 1, concat_strings)
+    
+    def _limit_size(text, maxsize, format='text/plain'):
+        if len(text) < maxsize:
+            return text
+        if format in ('text/html', 'text/xhtml', 'text/xml'):
+            text = remove_html_tags(text)
+        if len(text) > maxsize:
+            text = text[:maxsize] + '...'
+        return text
+
+    def limit_size3(text, format, maxsize):
+        return _limit_size(text, maxsize, format)
+    cnx.create_function("LIMIT_SIZE", 3, limit_size3)
+
+    def limit_size2(text, maxsize):
+        return _limit_size(text, maxsize)
+    cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
+    import yams.constraints
+    if hasattr(yams.constraints, 'patch_sqlite_decimal'):
+        yams.constraints.patch_sqlite_decimal()
+
+
+sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', [])
+sqlite_hooks.append(init_sqlite_connexion)
+
+def init_cnx(driver, cnx):
+    for hook in SQL_CONNECT_HOOKS.get(driver, ()):
+        hook(cnx)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/ssplanner.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,505 @@
+"""plan execution of rql queries on a single source
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from copy import copy
+
+from rql.stmts import Union, Select
+from rql.nodes import Constant
+
+from cubicweb import QueryError, typed_eid
+
+def add_types_restriction(schema, rqlst, newroot=None, solutions=None):
+    if newroot is None:
+        assert solutions is None
+        if hasattr(rqlst, '_types_restr_added'):
+            return
+        solutions = rqlst.solutions
+        newroot = rqlst
+        rqlst._types_restr_added = True
+    else:
+        assert solutions is not None
+        rqlst = rqlst.stmt
+    eschema = schema.eschema
+    allpossibletypes = {}
+    for solution in solutions:
+        for varname, etype in solution.iteritems():
+            if not varname in newroot.defined_vars or eschema(etype).is_final():
+                continue
+            allpossibletypes.setdefault(varname, set()).add(etype)
+    for varname in sorted(allpossibletypes):
+        try:
+            var = newroot.defined_vars[varname]
+        except KeyError:
+            continue
+        stinfo = var.stinfo
+        if stinfo.get('uidrels'):
+            continue # eid specified, no need for additional type specification
+        try:
+            typerels = rqlst.defined_vars[varname].stinfo.get('typerels')
+        except KeyError:
+            assert varname in rqlst.aliases
+            continue
+        if newroot is rqlst and typerels:
+            mytyperel = iter(typerels).next()
+        else:
+            for vref in newroot.defined_vars[varname].references():
+                rel = vref.relation()
+                if rel and rel.is_types_restriction():
+                    mytyperel = rel
+                    break
+            else:
+                mytyperel = None
+        possibletypes = allpossibletypes[varname]
+        if mytyperel is not None:
+            # variable as already some types restriction. new possible types
+            # can only be a subset of existing ones, so only remove no more
+            # possible types
+            for cst in mytyperel.get_nodes(Constant):
+                if not cst.value in possibletypes:
+                    cst.parent.remove(cst)
+                    try:
+                        stinfo['possibletypes'].remove(cst.value)
+                    except KeyError:
+                        # restriction on a type not used by this query, may
+                        # occurs with X is IN(...)
+                        pass
+        else:
+            # we have to add types restriction
+            if stinfo.get('scope') is not None:
+                rel = var.scope.add_type_restriction(var, possibletypes)
+            else:
+                # tree is not annotated yet, no scope set so add the restriction
+                # to the root
+                rel = newroot.add_type_restriction(var, possibletypes)
+            stinfo['typerels'] = frozenset((rel,))
+            stinfo['possibletypes'] = possibletypes
+        
+class SSPlanner(object):
+    """SingleSourcePlanner: build execution plan for rql queries
+
+    optimized for single source repositories
+    """
+    
+    def __init__(self, schema, rqlhelper):
+        self.schema = schema
+        self.rqlhelper = rqlhelper
+
+    def build_plan(self, plan):
+        """build an execution plan from a RQL query
+        
+        do nothing here, dispatch according to the statement type
+        """
+        build_plan = getattr(self, 'build_%s_plan' % plan.rqlst.TYPE)
+        for step in build_plan(plan, plan.rqlst):
+            plan.add_step(step)
+    
+    def build_select_plan(self, plan, rqlst):
+        """build execution plan for a SELECT RQL query. Suppose only one source
+        is available and so avoid work need for query decomposition among sources
+               
+        the rqlst should not be tagged at this point.
+        """
+        plan.preprocess(rqlst)
+        return (OneFetchStep(plan, rqlst, plan.session.repo.sources),)
+            
+    def build_insert_plan(self, plan, rqlst):
+        """get an execution plan from an INSERT RQL query"""
+        # each variable in main variables is a new entity to insert
+        to_build = {}
+        session = plan.session
+        for etype, var in rqlst.main_variables:
+            # need to do this since entity class is shared w. web client code !
+            to_build[var.name] = session.etype_class(etype)(session, None, None)
+            plan.add_entity_def(to_build[var.name])
+        # add constant values to entity def, mark variables to be selected
+        to_select = plan.relation_definitions(rqlst, to_build)
+        # add necessary steps to add relations and update attributes
+        step = InsertStep(plan) # insert each entity and its relations
+        step.children += self._compute_relation_steps(plan, rqlst.solutions,
+                                                      rqlst.where, to_select)
+        return (step,)
+        
+    def _compute_relation_steps(self, plan, solutions, restriction, to_select):
+        """handle the selection of relations for an insert query"""
+        for edef, rdefs in to_select.items():
+            # create a select rql st to fetch needed data
+            select = Select()
+            eschema = edef.e_schema
+            for i in range(len(rdefs)):
+                rtype, term, reverse = rdefs[i]
+                select.append_selected(term.copy(select))
+                if reverse:
+                    rdefs[i] = rtype, RelationsStep.REVERSE_RELATION
+                else:
+                    rschema = eschema.subject_relation(rtype)
+                    if rschema.is_final() or rschema.inlined:
+                        rdefs[i] = rtype, RelationsStep.FINAL
+                    else:
+                        rdefs[i] = rtype, RelationsStep.RELATION
+            if restriction is not None:
+                select.set_where(restriction.copy(select))
+            step = RelationsStep(plan, edef, rdefs)
+            step.children += self._select_plan(plan, select, solutions)
+            yield step
+    
+    def build_delete_plan(self, plan, rqlst):
+        """get an execution plan from a DELETE RQL query"""
+        # build a select query to fetch entities to delete
+        steps = []
+        for etype, var in rqlst.main_variables:
+            step = DeleteEntitiesStep(plan)
+            step.children += self._sel_variable_step(plan, rqlst.solutions,
+                                                     rqlst.where, etype, var)
+            steps.append(step)
+        for relation in rqlst.main_relations:
+            step = DeleteRelationsStep(plan, relation.r_type)
+            step.children += self._sel_relation_steps(plan, rqlst.solutions,
+                                                      rqlst.where, relation)
+            steps.append(step)
+        return steps
+
+    def _sel_variable_step(self, plan, solutions, restriction, etype, varref):
+        """handle the selection of variables for a delete query"""
+        select = Select()
+        varref = varref.copy(select)
+        select.defined_vars = {varref.name: varref.variable}
+        select.append_selected(varref)
+        if restriction is not None:
+            select.set_where(restriction.copy(select))
+        if etype != 'Any':
+            select.add_type_restriction(varref.variable, etype)
+        return self._select_plan(plan, select, solutions)
+        
+    def _sel_relation_steps(self, plan, solutions, restriction, relation):
+        """handle the selection of relations for a delete query"""
+        select = Select()
+        lhs, rhs = relation.get_variable_parts()
+        select.append_selected(lhs.copy(select))
+        select.append_selected(rhs.copy(select))
+        select.set_where(relation.copy(select))
+        if restriction is not None:
+            select.add_restriction(restriction.copy(select))
+        return self._select_plan(plan, select, solutions)
+    
+    def build_set_plan(self, plan, rqlst):
+        """get an execution plan from an SET RQL query"""
+        select = Select()
+        # extract variables to add to the selection
+        selected_index = {}
+        index = 0
+        relations, attrrelations = [], []
+        getrschema = self.schema.rschema
+        for relation in rqlst.main_relations:
+            if relation.r_type in ('eid', 'has_text', 'identity'):
+                raise QueryError('can not assign to %r relation'
+                                 % relation.r_type)
+            lhs, rhs = relation.get_variable_parts()
+            if not lhs.as_string('utf-8') in selected_index:
+                select.append_selected(lhs.copy(select))
+                selected_index[lhs.as_string('utf-8')] = index
+                index += 1
+            if not rhs.as_string('utf-8') in selected_index:
+                select.append_selected(rhs.copy(select))
+                selected_index[rhs.as_string('utf-8')] = index
+                index += 1
+            rschema = getrschema(relation.r_type)
+            if rschema.is_final() or rschema.inlined:
+                attrrelations.append(relation)
+            else:
+                relations.append(relation)
+        # add step necessary to fetch all selected variables values
+        if rqlst.where is not None:
+            select.set_where(rqlst.where.copy(select))
+        # set distinct to avoid potential duplicate key error
+        select.distinct = True
+        step = UpdateStep(plan, attrrelations, relations, selected_index)
+        step.children += self._select_plan(plan, select, rqlst.solutions)
+        return (step,)
+
+    # internal methods ########################################################
+    
+    def _select_plan(self, plan, select, solutions):
+        union = Union()
+        union.append(select)
+        select.clean_solutions(solutions)
+        add_types_restriction(self.schema, select)        
+        self.rqlhelper.annotate(union)
+        return self.build_select_plan(plan, union)
+
+
+# execution steps and helper functions ########################################
+
+def varmap_test_repr(varmap, tablesinorder):
+    if varmap is None:
+        return varmap
+    maprepr = {}
+    for var, sql in varmap.iteritems():
+        table, col = sql.split('.')
+        maprepr[var] = '%s.%s' % (tablesinorder[table], col)
+    return maprepr
+
+def offset_result(offset, result):
+    offset -= len(result)
+    if offset < 0:
+        result = result[offset:]
+        offset = None
+    elif offset == 0:
+        offset = None
+        result = ()
+    return offset, result
+
+
+class LimitOffsetMixIn(object):
+    limit = offset = None
+    def set_limit_offset(self, limit, offset):
+        self.limit = limit
+        self.offset = offset or None
+
+        
+class Step(object):
+    """base abstract class for execution step"""
+    def __init__(self, plan):
+        self.plan = plan
+        self.children = []
+        
+    def execute_child(self):
+        assert len(self.children) == 1
+        return self.children[0].execute()
+    
+    def execute_children(self):
+        for step in self.children:
+            step.execute()
+        
+    def execute(self):
+        """execute this step and store partial (eg this step) results"""
+        raise NotImplementedError()
+    
+    def mytest_repr(self):
+        """return a representation of this step suitable for test"""
+        return (self.__class__.__name__,)
+    
+    def test_repr(self):
+        """return a representation of this step suitable for test"""
+        return self.mytest_repr() + (
+            [step.test_repr() for step in self.children],)
+
+        
+class OneFetchStep(LimitOffsetMixIn, Step):
+    """step consisting in fetching data from sources and directly returning
+    results
+    """
+    def __init__(self, plan, union, sources, inputmap=None):
+        Step.__init__(self, plan)
+        self.union = union
+        self.sources = sources
+        self.inputmap = inputmap
+        self.set_limit_offset(union.children[-1].limit, union.children[-1].offset)
+
+    def set_limit_offset(self, limit, offset):
+        LimitOffsetMixIn.set_limit_offset(self, limit, offset)
+        for select in self.union.children:
+            select.limit = limit
+            select.offset = offset
+        
+    def execute(self):
+        """call .syntax_tree_search with the given syntax tree on each
+        source for each solution
+        """
+        self.execute_children()
+        session = self.plan.session
+        args = self.plan.args
+        inputmap = self.inputmap
+        union = self.union
+        # do we have to use a inputmap from a previous step ? If so disable
+        # cachekey
+        if inputmap or self.plan.cache_key is None:
+            cachekey = None
+        # union may have been splited into subqueries, rebuild a cache key
+        elif isinstance(self.plan.cache_key, tuple):
+            cachekey = list(self.plan.cache_key)
+            cachekey[0] = union.as_string()
+            cachekey = tuple(cachekey)
+        else:
+            cachekey = union.as_string()
+        result = []
+        # limit / offset processing
+        limit = self.limit
+        offset = self.offset
+        if offset is not None:
+            if len(self.sources) > 1:
+                # we'll have to deal with limit/offset by ourself
+                if union.children[-1].limit:
+                    union.children[-1].limit = limit + offset
+                union.children[-1].offset = None
+            else:
+                offset, limit = None, None
+        for source in self.sources:
+            if offset is None and limit is not None:
+                # modifying the sample rqlst is enough since sql generation
+                # will pick it here as well
+                union.children[-1].limit = limit - len(result)
+            result_ = source.syntax_tree_search(session, union, args, cachekey,
+                                                inputmap)
+            if offset is not None:
+                offset, result_ = offset_result(offset, result_)
+            result += result_
+            if limit is not None:
+                if len(result) >= limit:
+                    return result[:limit]
+        #print 'ONEFETCH RESULT %s' % (result)
+        return result
+
+    def mytest_repr(self):
+        """return a representation of this step suitable for test"""
+        try:
+            inputmap = varmap_test_repr(self.inputmap, self.plan.tablesinorder)
+        except AttributeError:
+            inputmap = self.inputmap
+        return (self.__class__.__name__,
+                sorted((r.as_string(kwargs=self.plan.args), r.solutions)
+                       for r in self.union.children),
+                self.limit, self.offset,
+                sorted(self.sources), inputmap)
+
+
+# UPDATE/INSERT/DELETE steps ##################################################
+
+class RelationsStep(Step):
+    """step consisting in adding attributes/relations to entity defs from a
+    previous FetchStep
+
+    relations values comes from the latest result, with one columns for
+    each relation defined in self.r_defs
+    
+    for one entity definition, we'll construct N entity, where N is the
+    number of the latest result
+    """
+    
+    FINAL = 0
+    RELATION = 1
+    REVERSE_RELATION = 2
+    
+    def __init__(self, plan, e_def, r_defs):
+        Step.__init__(self, plan)
+        # partial entity definition to expand
+        self.e_def = e_def
+        # definition of relations to complete
+        self.r_defs = r_defs
+        
+    def execute(self):
+        """execute this step"""
+        base_e_def = self.e_def
+        result = []
+        for row in self.execute_child():
+            # get a new entity definition for this row
+            e_def = copy(base_e_def)
+            # complete this entity def using row values
+            for i in range(len(self.r_defs)):
+                rtype, rorder = self.r_defs[i]
+                if rorder == RelationsStep.FINAL:
+                    e_def[rtype] = row[i]
+                elif rorder == RelationsStep.RELATION:
+                    self.plan.add_relation_def( (e_def, rtype, row[i]) )
+                    e_def.querier_pending_relations[(rtype, 'subject')] = row[i]
+                else:
+                    self.plan.add_relation_def( (row[i], rtype, e_def) )
+                    e_def.querier_pending_relations[(rtype, 'object')] = row[i]
+            result.append(e_def)
+        self.plan.substitute_entity_def(base_e_def, result)
+
+
+class InsertStep(Step):
+    """step consisting in inserting new entities / relations"""
+    
+    def execute(self):
+        """execute this step"""
+        for step in self.children:
+            assert isinstance(step, RelationsStep)
+            step.plan = self.plan
+            step.execute()
+        # insert entities first
+        result = self.plan.insert_entity_defs()
+        # then relation
+        self.plan.insert_relation_defs()
+        # return eids of inserted entities
+        return result
+
+
+class DeleteEntitiesStep(Step):
+    """step consisting in deleting entities"""
+
+    def execute(self):
+        """execute this step"""
+        todelete = frozenset(typed_eid(eid) for eid, in self.execute_child())
+        session = self.plan.session
+        delete = session.repo.glob_delete_entity
+        # register pending eids first to avoid multiple deletion
+        pending = session.query_data('pendingeids', set(), setdefault=True)
+        actual = todelete - pending
+        pending |= actual
+        for eid in actual:
+            delete(session, eid)
+            
+    
+class DeleteRelationsStep(Step):
+    """step consisting in deleting relations"""
+
+    def __init__(self, plan, rtype):
+        Step.__init__(self, plan)
+        self.rtype = rtype
+        
+    def execute(self):
+        """execute this step"""
+        session = self.plan.session
+        delete = session.repo.glob_delete_relation
+        for subj, obj in self.execute_child():
+            delete(session, subj, self.rtype, obj)
+    
+
+class UpdateStep(Step):
+    """step consisting in updating entities / adding relations from relations
+    definitions and from results fetched in previous step
+    """
+    
+    def __init__(self, plan, attribute_relations, relations, selected_index):
+        Step.__init__(self, plan)
+        self.attribute_relations = attribute_relations
+        self.relations = relations
+        self.selected_index = selected_index
+        
+    def execute(self):
+        """execute this step"""
+        plan = self.plan
+        session = self.plan.session
+        repo = session.repo
+        edefs = {}
+        # insert relations
+        for row in self.execute_child():
+            for relation in self.attribute_relations:
+                lhs, rhs = relation.get_variable_parts()
+                eid = typed_eid(row[self.selected_index[str(lhs)]])
+                try:
+                    edef = edefs[eid]
+                except KeyError:
+                    edefs[eid] = edef = session.eid_rset(eid).get_entity(0, 0)
+                if isinstance(rhs, Constant):
+                    # add constant values to entity def
+                    value = rhs.eval(plan.args)
+                    edef[relation.r_type] = value
+                else:
+                    edef[relation.r_type] = row[self.selected_index[str(rhs)]]
+            for relation in self.relations:
+                subj = row[self.selected_index[str(relation.children[0])]]
+                obj = row[self.selected_index[str(relation.children[1])]]
+                repo.glob_add_relation(session, subj, relation.r_type, obj)
+        # update entities
+        result = []
+        for eid, edef in edefs.iteritems():
+            repo.glob_update_entity(session, edef)
+            result.append( (eid,) )
+        return result
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+ecomment,eclassfolders,eclasstags,ebasket,eemail,efile
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config1/application_hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+"""hooks for config1
+
+ Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+HOOKS = {"after_add_relation" : {"concerned_by" : [lambda: None]}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config1/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+# file generated by cubicweb-ctl
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config1/server-ctl.conf	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+# file generated by cubicweb-ctl
+
+APPLICATION HOME=/home/adim/etc/cubicweb.d/crmadim
+DEBUG=
+HOST=
+LOG TRESHOLD=LOG_DEBUG
+NS GROUP=cubicweb
+NS HOST=
+PID FILE=/home/adim/tmp/crmadim.pid
+PORT=
+QUERY LOG FILE=
+UID=1006
+PROFILE=
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config1/sources	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+# file generated by cubicweb-ctl
+
+[system]
+ADAPTER=native
+DBHOST=crater
+DBDRIVER=postgres
+DBNAME=whatever
+ENCODING=UTF-8
+SPLIT_RELATIONS = True
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config2/application_hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+"""hooks for config2
+
+ Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+HOOKS = {"after_delete_relation" : {"todo_by" : [lambda: 1]}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config2/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+# file generated by cubicweb-ctl
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config2/server-ctl.conf	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+# file generated by cubicweb-ctl
+
+APPLICATION HOME=/home/adim/etc/cubicweb.d/crmadim
+DEBUG=
+HOST=
+LOG TRESHOLD=LOG_DEBUG
+NS GROUP=cubicweb
+NS HOST=
+PID FILE=/home/adim/tmp/crmadim.pid
+PORT=
+QUERY LOG FILE=
+UID=1006
+PROFILE=
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/config2/sources	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+# file generated by cubicweb-ctl
+
+[system]
+ADAPTER=native
+DBHOST=crater
+DBDRIVER=postgres
+DBNAME=whatever
+ENCODING=UTF-8
+SPLIT_RELATIONS = True
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,24 @@
+from cubicweb.server.hooksmanager import SystemHook
+
+CALLED_EVENTS = {}
+
+class StartupHook(SystemHook):
+    events = ('server_startup',)
+    def call(self, repo):
+        CALLED_EVENTS['server_startup'] = True
+
+class ShutdownHook(SystemHook):
+    events = ('server_shutdown',)
+    def call(self, repo):
+        CALLED_EVENTS['server_shutdown'] = True
+
+
+class LoginHook(SystemHook):
+    events = ('session_open',)
+    def call(self, session):
+        CALLED_EVENTS['session_open'] = session.user.login
+
+class LogoutHook(SystemHook):
+    events = ('session_close',)
+    def call(self, session):
+        CALLED_EVENTS['session_close'] = session.user.login
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migration/postcreate.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+"""cubicweb post creation script, set note's workflow"""
+
+todoeid = add_state(u'todo', 'Note', initial=True)
+doneeid = add_state(u'done', 'Note')
+add_transition(u'redoit', 'Note', (doneeid,), todoeid)
+add_transition(u'markasdone', 'Note', (todoeid,), doneeid)
+checkpoint()
+
+pitetre = add_state(u'pitetre', 'Affaire', initial=True)
+encours = add_state(u'en cours', 'Affaire')
+finie = add_state(u'finie', 'Affaire')
+bennon = add_state(u'ben non', 'Affaire')
+add_transition(u'abort', 'Affaire', (pitetre,), bennon)
+add_transition(u'start', 'Affaire', (pitetre,), encours)
+add_transition(u'end', 'Affaire', (encours,), finie)
+checkpoint()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Affaire.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+
+class Affaire(EntityType):
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', ERQLExpression('X concerne S, S owned_by U')),
+        'update': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')),
+        'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')),
+        }
+    
+    ref = String(fulltextindexed=True, indexed=True,
+                 constraints=[SizeConstraint(16)])
+    sujet = String(fulltextindexed=True,
+                 constraints=[SizeConstraint(256)])
+
+class concerne(RelationType):
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('U has_update_permission S')),
+        'delete': ('managers', RRQLExpression('O owned_by U')),
+        }
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Folder2.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,25 @@
+from cubicweb.schema import format_constraint
+
+class Folder2(MetaUserEntityType):
+    """folders are used to classify entities. They may be defined as a tree.
+    When you include the Folder entity, all application specific entities
+    may then be classified using the "filed_under" relation.
+    """
+    name = String(required=True, indexed=True, internationalizable=True,
+                  constraints=[UniqueConstraint(), SizeConstraint(64)])
+    description_format = String(meta=True, internationalizable=True,
+                                default='text/rest', constraints=[format_constraint])
+    description = String(fulltextindexed=True)
+
+    filed_under2 = BothWayRelation(
+        SubjectRelation('Folder2', description=_("parent folder")),
+        ObjectRelation('*'),
+        )
+
+
+class filed_under2(MetaUserRelationType):
+    """indicates that an entity is classified under a folder"""
+    # is_about has been renamed into filed_under
+    #//* is_about Folder
+    #* filed_under Folder
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Note.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,26 @@
+class Note(EntityType):
+    
+    permissions = {'read':   ('managers', 'users', 'guests',),
+                   'update': ('managers', 'owners',),
+                   'delete': ('managers', ),
+                   'add':    ('managers', 
+                              ERQLExpression('X ecrit_part PE, U in_group G, '
+                                             'PE require_permission P, P name "add_note", '
+                                             'P require_group G'),)}
+    
+    date = Datetime()
+    type = String(maxsize=1)
+    whatever = Int()
+    mydate = Date(default='TODAY')
+    para = String(maxsize=512)
+    shortpara = String(maxsize=64)
+    ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')])
+
+class ecrit_par(RelationType):
+    permissions = {'read':   ('managers', 'users', 'guests',),
+                   'delete': ('managers', ),
+                   'add':    ('managers', 
+                              RRQLExpression('O require_permission P, P name "add_note", '
+                                             'U in_group G, P require_group G'),)
+                   }
+    inlined = True
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Personne.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+nom    ivarchar(64) NOT NULL
+prenom ivarchar(64)
+civility char(1) DEFAULT 'M' 
+promo  choice('bon','pasbon')
+titre  ivarchar(128)
+adel   varchar(128)
+ass    varchar(128)
+web    varchar(128)
+tel    integer
+fax    integer
+datenaiss datetime
+test   boolean 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Societe.perms	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+Read: managers, users, guests
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/Societe.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+nom  ivarchar(64)
+web varchar(128)
+tel  integer
+fax  integer
+rncs varchar(32)
+ad1  varchar(128)
+ad2  varchar(128)
+ad3  varchar(128)
+cp   varchar(12)
+ville varchar(32)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/migrschema/relations.rel	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+Personne travaille Societe
+Personne evaluee Note
+EUser evaluee Note
+Societe evaluee Note
+Personne concerne Affaire
+Affaire concerne Societe
+Personne concerne2 Affaire
+
+Personne connait Personne symetric
+
+Societe in_state State inline
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/Affaire.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,34 @@
+from cubicweb.schema import format_constraint
+
+class Affaire(EntityType):
+    permissions = {
+        'read':   ('managers', 
+                   ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')),
+        'add':    ('managers', ERQLExpression('X concerne S, S owned_by U')),
+        'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')),
+        'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')),
+        }
+    
+    ref = String(fulltextindexed=True, indexed=True,
+                 constraints=[SizeConstraint(16)])
+    sujet = String(fulltextindexed=True,
+                   constraints=[SizeConstraint(256)])
+    in_state = SubjectRelation('State', cardinality='1*',
+                               constraints=[RQLConstraint('O state_of ET, ET name "Affaire"')],
+                               description=_('account state'))
+    descr_format = String(meta=True, internationalizable=True,
+                                default='text/rest', constraints=[format_constraint])
+    descr = String(fulltextindexed=True,
+                   description=_('more detailed description'))
+        
+    wf_info_for = ObjectRelation('TrInfo', cardinality='1*', composite='object')
+    depends_on = SubjectRelation('Affaire')
+    
+class concerne(RelationType):
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('U has_update_permission S')),
+        'delete': ('managers', RRQLExpression('O owned_by U')),
+        }
+    
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/Note.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+date varchar(10)
+type char(6)
+para varchar(512)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/Personne.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+nom    ivarchar(64) NOT NULL
+prenom ivarchar(64)
+sexe   char(1) DEFAULT 'M' 
+promo  choice('bon','pasbon')
+titre  ivarchar(128)
+adel   varchar(128)
+ass    varchar(128)
+web    varchar(128)
+tel    integer
+fax    integer
+datenaiss datetime
+test   boolean 
+description text
+firstname ivarchar(64)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/Societe.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,35 @@
+class Societe(EntityType):
+    permissions = {
+        'read': ('managers', 'users', 'guests'),
+        'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')),
+        'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')),
+        'add': ('managers', 'users',)
+        }
+    
+    nom  = String(maxsize=64, fulltextindexed=True)
+    web  = String(maxsize=128)
+    type  = String(maxsize=128) # attribute in common with Note 
+    tel  = Int()
+    fax  = Int()
+    rncs = String(maxsize=128)
+    ad1  = String(maxsize=128)
+    ad2  = String(maxsize=128)
+    ad3  = String(maxsize=128)
+    cp   = String(maxsize=12)
+    ville= String(maxsize=32)
+
+
+class travaille(RelationType):
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('U has_update_permission S')),
+        'delete': ('managers', RRQLExpression('O owned_by U')),
+        }
+
+
+class Division(Societe):
+    __specializes_schema__ = True
+
+class SubDivision(Division):
+    __specializes_schema__ = True
+    travaille_subdivision = ObjectRelation('Personne')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/custom.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+
+
+class test(AttributeRelationType):
+    permissions = {'read': ('managers', 'users', 'guests'),
+                   'delete': ('managers',),
+                   'add': ('managers',)}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/note.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+
+class para(AttributeRelationType):
+    permissions = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', ERQLExpression('X in_state S, S name "todo"')),
+        'delete': ('managers', ERQLExpression('X in_state S, S name "todo"')),
+        }
+
+class in_state(RelationDefinition):
+    subject = 'Note'
+    object = 'State'
+    cardinality = '1*'
+    constraints=[RQLConstraint('S is ET, O state_of ET')]
+    
+class wf_info_for(RelationDefinition):
+    subject = 'TrInfo'
+    object = 'Note'
+    cardinality = '1*'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/schema/relations.rel	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,43 @@
+Personne travaille Societe
+Personne evaluee Note
+EUser evaluee Note
+Societe evaluee Note
+Personne concerne Affaire
+Affaire concerne Societe
+Affaire concerne Note
+
+Note ecrit_par Personne inline CONSTRAINT E concerns P, X version_of P
+Note ecrit_par EUser inline CONSTRAINT
+Personne connait Personne symetric
+
+# not inlined intentionaly
+Comment comments Personne
+
+Note inline1 Affaire inline
+Personne inline2 Affaire inline
+
+Note todo_by EUser
+Affaire todo_by Personne
+
+Folder see_also Folder
+
+
+Affaire documented_by Card
+
+EUser copain EUser
+
+Tag tags EUser
+Tag tags EGroup
+Tag tags State
+Tag tags Note
+Tag tags Card
+Tag tags Affaire
+
+Note filed_under Folder
+Affaire filed_under Folder
+
+Card require_permission EPermission
+Note require_permission EPermission
+Personne require_permission EPermission
+
+EPermission require_state State
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/runtests.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+from logilab.common.testlib import main
+
+if __name__ == '__main__':
+    import sys, os
+    main(os.path.dirname(sys.argv[0]) or '.')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_checkintegrity.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,22 @@
+import sys
+from StringIO import StringIO
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools import init_test_database
+
+
+from cubicweb.server.checkintegrity import check
+
+repo, cnx = init_test_database('sqlite')
+
+class CheckIntegrityTC(TestCase):
+    def test(self):
+        sys.stderr = sys.stdout = StringIO()
+        try:
+            check(repo, cnx, ('entities', 'relations', 'text_index', 'metadata'),
+                  True, True)
+        finally:
+            sys.stderr = sys.__stderr__
+            sys.stdout = sys.__stdout__
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_config.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+"""tests for server config"""
+
+from os.path import join, dirname
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools import TestServerConfiguration
+
+class ConfigTC(TestCase):
+
+    def test_load_hooks_twice(self):
+        class vreg:
+            @staticmethod
+            def registry_objects(registry):
+                return []
+            
+        cfg1 = TestServerConfiguration('data/config1')
+        cfg1.bootstrap_cubes()
+        cfg2 = TestServerConfiguration('data/config2')
+        cfg2.bootstrap_cubes()
+        self.failIf(cfg1.load_hooks(vreg) is cfg2.load_hooks(vreg))
+        self.failUnless('after_add_relation' in cfg1.load_hooks(vreg))
+        self.failUnless('after_delete_relation' in cfg2.load_hooks(vreg))
+        
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_hookhelper.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""unit/functional tests for cubicweb.server.hookhelper"""
+
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import RepositoryBasedTC
+
+from cubicweb.server.pool import LateOperation
+from cubicweb.server.hookhelper import *
+
+
+class HookHelpersTC(RepositoryBasedTC):
+    
+    def setUp(self):
+        RepositoryBasedTC.setUp(self)
+        self.hm = self.repo.hm
+    
+    def test_late_operation(self):
+        session = self.session
+        l1 = LateOperation(session)
+        l2 = LateOperation(session)
+        l3 = Operation(session)
+        self.assertEquals(session.pending_operations, [l3, l1, l2])
+        
+    def test_single_last_operation(self):
+        session = self.session
+        l0 = SingleLastOperation(session)
+        l1 = LateOperation(session)
+        l2 = LateOperation(session)
+        l3 = Operation(session)
+        self.assertEquals(session.pending_operations, [l3, l1, l2, l0])
+        l4 = SingleLastOperation(session)
+        self.assertEquals(session.pending_operations, [l3, l1, l2, l4])
+        
+    def test_global_operation_order(self):
+        from cubicweb.server import hooks, schemahooks
+        session = self.session
+        op1 = hooks.DelayedDeleteOp(session)
+        op2 = schemahooks.DelErdefOp(session)
+        # equivalent operation generated by op2 but replace it here by op3 so we
+        # can check the result...
+        op3 = schemahooks.UpdateSchemaOp(session)
+        op4 = hooks.DelayedDeleteOp(session)
+        op5 = hooks.CheckORelationOp(session)
+        self.assertEquals(session.pending_operations, [op1, op2, op4, op5, op3])
+                          
+       
+    def test_in_state_notification(self):
+        result = []
+        # test both email notification and transition_information
+        # whatever if we can connect to the default stmp server, transaction
+        # should not fail
+        def in_state_changed(session, eidfrom, rtype, eidto):
+            tr = previous_state(session, eidfrom)
+            if tr is None:
+                result.append(tr)
+                return
+            content = u'trÀnsition from %s to %s' % (tr.name, entity_name(session, eidto))
+            result.append(content)
+            SendMailOp(session, msg=content, recipients=['test@logilab.fr'])
+        self.hm.register_hook(in_state_changed,
+                             'before_add_relation', 'in_state')
+        self.execute('INSERT EUser X: X login "paf", X upassword "wouf", X in_state S, X in_group G WHERE S name "activated", G name "users"')
+        self.assertEquals(result, [None])
+        searchedops = [op for op in self.session.pending_operations
+                       if isinstance(op, SendMailOp)]
+        self.assertEquals(len(searchedops), 0,
+                          self.session.pending_operations)
+        self.commit()
+        self.execute('SET X in_state S WHERE X login "paf", S name "deactivated"')
+        self.assertEquals(result, [None, u'trÀnsition from activated to deactivated'])
+        # one to send the mail, one to close the smtp connection
+        searchedops = [op for op in self.session.pending_operations
+                       if isinstance(op, SendMailOp)]
+        self.assertEquals(len(searchedops), 1,
+                          self.session.pending_operations)
+        self.commit()
+        searchedops = [op for op in self.session.pending_operations
+                       if isinstance(op, SendMailOp)]
+        self.assertEquals(len(searchedops), 0,
+                          self.session.pending_operations)
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,585 @@
+# -*- coding: utf-8 -*-
+"""functional tests for core hooks
+
+note: most schemahooks.py hooks are actually tested in unittest_migrations.py
+"""
+
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools.apptest import RepositoryBasedTC, get_versions
+
+from cubicweb.common import ConnectionError, RepositoryError, ValidationError
+from cubicweb.server.repository import *
+
+orig_get_versions = Repository.get_versions
+
+def setup_module(*args):
+    Repository.get_versions = get_versions
+
+def teardown_module(*args):
+    Repository.get_versions = orig_get_versions
+
+
+    
+class CoreHooksTC(RepositoryBasedTC):
+        
+    def test_delete_internal_entities(self):
+        self.assertRaises(RepositoryError, self.execute,
+                          'DELETE EEType X WHERE X name "EEType"')
+        self.assertRaises(RepositoryError, self.execute,
+                          'DELETE ERType X WHERE X name "relation_type"')
+        self.assertRaises(RepositoryError, self.execute,
+                          'DELETE EGroup X WHERE X name "owners"')
+
+    def test_delete_required_relations_subject(self):
+        self.execute('INSERT EUser X: X login "toto", X upassword "hop", X in_group Y, X in_state S '
+                     'WHERE Y name "users", S name "activated"')
+        self.commit()
+        self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"')
+        self.assertRaises(ValidationError, self.commit)
+        self.execute('DELETE X in_group Y WHERE X login "toto"')
+        self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"')
+        self.commit()
+        
+    def test_delete_required_relations_object(self):
+        self.skip('no sample in the schema ! YAGNI ? Kermaat ?')
+    
+    def test_static_vocabulary_check(self):
+        self.assertRaises(ValidationError,
+                          self.execute,
+                          'SET X composite "whatever" WHERE X from_entity FE, FE name "EUser", X relation_type RT, RT name "in_group"')
+    
+    def test_missing_required_relations_subject_inline(self):
+        # missing in_group relation 
+        self.execute('INSERT EUser X: X login "toto", X upassword "hop"')
+        self.assertRaises(ValidationError,
+                          self.commit)
+
+    def test_delete_if_singlecard1(self):
+        self.assertEquals(self.repo.schema['in_state'].inlined, False)
+        ueid, = self.execute('INSERT EUser X: X login "toto", X upassword "hop", X in_group Y, X in_state S '
+                             'WHERE Y name "users", S name "activated"')[0]
+        self.commit()
+        self.execute('SET X in_state S WHERE S name "deactivated", X eid %(x)s', {'x': ueid})
+        rset = self.execute('Any S WHERE X in_state S, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 1)
+        self.assertRaises(Exception, self.execute, 'SET X in_state S WHERE S name "deactivated", X eid %s' % ueid)
+        rset2 = self.execute('Any S WHERE X in_state S, X eid %(x)s', {'x': ueid})
+        self.assertEquals(rset.rows, rset2.rows)
+
+    def test_inlined(self):
+        self.assertEquals(self.repo.schema['sender'].inlined, True)
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        eeid = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                            'WHERE Y is EmailAddress, P is EmailPart')[0][0]
+        self.execute('SET X sender Y WHERE X is Email, Y is EmailAddress')
+        rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid)
+        self.assertEquals(len(rset), 1)
+        
+    def test_composite_1(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.failUnless(self.execute('Email X WHERE X sender Y'))
+        self.commit()
+        self.execute('DELETE Email X')
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEquals(len(rset), 1)
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEquals(len(rset), 0)
+            
+    def test_composite_2(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.commit()
+        self.execute('DELETE Email X')
+        self.execute('DELETE EmailPart X')
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEquals(len(rset), 0)
+            
+    def test_composite_redirection(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y '
+                     'WHERE Y is EmailAddress')
+        self.commit()
+        self.execute('DELETE X parts Y WHERE X messageid "<1234>"')
+        self.execute('SET X parts Y WHERE X messageid "<2345>"')
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>')
+
+    def test_unsatisfied_constraints(self):
+        self.execute('INSERT ENFRDef X: X from_entity FE, X relation_type RT, X to_entity TE '
+                     'WHERE FE name "Affaire", RT name "concerne", TE name "String"')
+        self.assertRaises(ValidationError,
+                          self.commit)
+
+
+    def test_html_tidy_hook(self):
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0)
+        self.assertEquals(entity.descr, u'yo')
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "<b>yo"').get_entity(0, 0)
+        self.assertEquals(entity.descr, u'<b>yo</b>')
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "<b>yo</b>"').get_entity(0, 0)
+        self.assertEquals(entity.descr, u'<b>yo</b>')
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "<b>R&D</b>"').get_entity(0, 0)
+        self.assertEquals(entity.descr, u'<b>R&amp;D</b>')
+        xml = u"<div>c&apos;est <b>l'ét&eacute;"
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr %(d)s',
+                              {'d': xml}).get_entity(0, 0)
+        self.assertEquals(entity.descr, u"<div>c'est <b>l'été</b></div>")
+
+    def test_nonregr_html_tidy_hook_no_update(self):
+        entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0)
+        self.assertEquals(entity.descr, u'yo')
+        self.execute('SET A ref "REF" WHERE A eid %s' % entity.eid)
+        entity = self.execute('Any A WHERE A eid %s' % entity.eid).get_entity(0, 0)
+        self.assertEquals(entity.descr, u'yo')
+        self.execute('SET A descr "R&D<p>yo" WHERE A eid %s' % entity.eid)
+        entity = self.execute('Any A WHERE A eid %s' % entity.eid).get_entity(0, 0)
+        self.assertEquals(entity.descr, u'R&amp;D<p>yo</p>')
+        
+
+        
+class UserGroupHooksTC(RepositoryBasedTC):
+    
+    def test_user_synchronization(self):
+        self.create_user('toto', password='hop', commit=False)
+        self.assertRaises(AuthenticationError,
+                          self.repo.connect, u'toto', 'hop')
+        self.commit()
+        cnxid = self.repo.connect(u'toto', 'hop')
+        self.failIfEqual(cnxid, self.cnxid)
+        self.execute('DELETE EUser X WHERE X login "toto"')
+        self.repo.execute(cnxid, 'State X')
+        self.commit()
+        self.assertRaises(BadConnectionId,
+                          self.repo.execute, cnxid, 'State X')
+
+    def test_user_group_synchronization(self):
+        user = self.session.user
+        self.assertEquals(user.groups, set(('managers',)))
+        self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+        self.assertEquals(user.groups, set(('managers',)))
+        self.commit()
+        self.assertEquals(user.groups, set(('managers', 'guests')))
+        self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+        self.assertEquals(user.groups, set(('managers', 'guests')))
+        self.commit()
+        self.assertEquals(user.groups, set(('managers',)))
+
+    def test_user_composite_owner(self):
+        ueid = self.create_user('toto')
+        # composite of euser should be owned by the euser regardless of who created it
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X '
+                     'WHERE U login "toto"')
+        self.commit()
+        self.assertEquals(self.execute('Any A WHERE X owned_by U, U use_email X,'
+                                       'U login "toto", X address A')[0][0],
+                          'toto@logilab.fr')
+
+    def test_no_created_by_on_deleted_entity(self):
+        eid = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0]
+        self.execute('DELETE EmailAddress X WHERE X eid %s' % eid)
+        self.commit()
+        self.failIf(self.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid}))
+        
+class EPropertyHooksTC(RepositoryBasedTC):
+    
+    def test_unexistant_eproperty(self):
+        ex = self.assertRaises(ValidationError,
+                          self.execute, 'INSERT EProperty X: X pkey "bla.bla", X value "hop", X for_user U')
+        self.assertEquals(ex.errors, {'pkey': 'unknown property key'})
+        ex = self.assertRaises(ValidationError,
+                          self.execute, 'INSERT EProperty X: X pkey "bla.bla", X value "hop"')
+        self.assertEquals(ex.errors, {'pkey': 'unknown property key'})
+        
+    def test_site_wide_eproperty(self):
+        ex = self.assertRaises(ValidationError,
+                               self.execute, 'INSERT EProperty X: X pkey "ui.site-title", X value "hop", X for_user U')
+        self.assertEquals(ex.errors, {'for_user': "site-wide property can't be set for user"})
+        
+    def test_bad_type_eproperty(self):
+        ex = self.assertRaises(ValidationError,
+                               self.execute, 'INSERT EProperty X: X pkey "ui.language", X value "hop", X for_user U')
+        self.assertEquals(ex.errors, {'value': u'unauthorized value'})
+        ex = self.assertRaises(ValidationError,
+                          self.execute, 'INSERT EProperty X: X pkey "ui.language", X value "hop"')
+        self.assertEquals(ex.errors, {'value': u'unauthorized value'})
+        
+        
+class SchemaHooksTC(RepositoryBasedTC):
+        
+    def test_duplicate_etype_error(self):
+        # check we can't add a EEType or ERType entity if it already exists one
+        # with the same name
+        #
+        # according to hook order, we'll get a repository or validation error
+        self.assertRaises((ValidationError, RepositoryError),
+                          self.execute, 'INSERT EEType X: X name "Societe"')
+        self.assertRaises((ValidationError, RepositoryError),
+                          self.execute, 'INSERT ERType X: X name "in_group"')
+        
+    def test_validation_unique_constraint(self):
+        self.assertRaises(ValidationError,
+                          self.execute, 'INSERT EUser X: X login "admin"')
+        try:
+            self.execute('INSERT EUser X: X login "admin"')
+        except ValidationError, ex:
+            self.assertIsInstance(ex.entity, int)
+            self.assertEquals(ex.errors, {'login': 'the value "admin" is already used, use another one'})
+
+
+class SchemaModificationHooksTC(RepositoryBasedTC):
+    copy_schema = True
+
+    def setUp(self):
+        if not hasattr(self, '_repo'):
+            # first initialization
+            repo = self.repo # set by the RepositoryBasedTC metaclass
+            # force to read schema from the database
+            repo.config._cubes = None
+            repo.fill_schema()
+        RepositoryBasedTC.setUp(self)
+            
+    def test_base(self):
+        schema = self.repo.schema
+        dbhelper = self.session.pool.source('system').dbhelper    
+        sqlcursor = self.session.pool['system']
+        self.failIf(schema.has_entity('Societe2'))
+        self.failIf(schema.has_entity('concerne2'))
+        # schema should be update on insertion (after commit)
+        self.execute('INSERT EEType X: X name "Societe2", X description "", X meta FALSE, X final FALSE')
+        self.execute('INSERT ERType X: X name "concerne2", X description "", X meta FALSE, X final FALSE, X symetric FALSE')
+        self.failIf(schema.has_entity('Societe2'))
+        self.failIf(schema.has_entity('concerne2'))
+        self.execute('SET X read_permission G WHERE X is EEType, X name "Societe2", G is EGroup')
+        self.execute('SET X read_permission G WHERE X is ERType, X name "concerne2", G is EGroup')
+        self.execute('SET X add_permission G WHERE X is EEType, X name "Societe2", G is EGroup, G name "managers"')
+        self.execute('SET X add_permission G WHERE X is ERType, X name "concerne2", G is EGroup, G name "managers"')
+        self.execute('SET X delete_permission G WHERE X is EEType, X name "Societe2", G is EGroup, G name "owners"')
+        self.execute('SET X delete_permission G WHERE X is ERType, X name "concerne2", G is EGroup, G name "owners"')
+        # have to commit before adding definition relations
+        self.commit()
+        self.failUnless(schema.has_entity('Societe2'))
+        self.failUnless(schema.has_relation('concerne2'))
+        self.execute('INSERT EFRDef X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
+                     'WHERE RT name "nom", E name "Societe2", F name "String"')
+        concerne2_rdef_eid = self.execute(
+            'INSERT ENFRDef X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E '
+            'WHERE RT name "concerne2", E name "Societe2"')[0][0]
+        self.execute('INSERT ENFRDef X: X cardinality "?*", X relation_type RT, X from_entity E, X to_entity C '
+                     'WHERE RT name "comments", E name "Societe2", C name "Comment"')
+        self.failIf('nom' in schema['Societe2'].subject_relations())
+        self.failIf('concerne2' in schema['Societe2'].subject_relations())
+        self.failIf(dbhelper.index_exists(sqlcursor, 'Societe2', 'nom'))
+        self.commit()
+        self.failUnless('nom' in schema['Societe2'].subject_relations())
+        self.failUnless('concerne2' in schema['Societe2'].subject_relations())
+        self.failUnless(dbhelper.index_exists(sqlcursor, 'Societe2', 'nom'))
+        # now we should be able to insert and query Societe2
+        s2eid = self.execute('INSERT Societe2 X: X nom "logilab"')[0][0]
+        self.execute('Societe2 X WHERE X nom "logilab"')
+        self.execute('SET X concerne2 X WHERE X nom "logilab"')
+        rset = self.execute('Any X WHERE X concerne2 Y')
+        self.assertEquals(rset.rows, [[s2eid]])
+        # check that when a relation definition is deleted, existing relations are deleted
+        self.execute('INSERT ENFRDef X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E '
+                     'WHERE RT name "concerne2", E name "Societe"')
+        self.commit()
+        self.execute('DELETE ENFRDef X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x')
+        self.commit()
+        self.failUnless('concerne2' in schema['Societe'].subject_relations())
+        self.failIf('concerne2' in schema['Societe2'].subject_relations())
+        self.failIf(self.execute('Any X WHERE X concerne2 Y'))
+        # schema should be cleaned on delete (after commit)
+        self.execute('DELETE EEType X WHERE X name "Societe2"')
+        self.execute('DELETE ERType X WHERE X name "concerne2"')
+        self.failUnless(dbhelper.index_exists(sqlcursor, 'Societe2', 'nom'))
+        self.failUnless(schema.has_entity('Societe2'))
+        self.failUnless(schema.has_relation('concerne2'))
+        self.commit()
+        self.failIf(dbhelper.index_exists(sqlcursor, 'Societe2', 'nom'))
+        self.failIf(schema.has_entity('Societe2'))
+        self.failIf(schema.has_entity('concerne2'))
+
+    def test_is_instance_of_insertions(self):
+        seid = self.execute('INSERT SubDivision S: S nom "subdiv"')[0][0]
+        is_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is ET, ET name ETN' % seid)]
+        self.assertEquals(is_etypes, ['SubDivision'])
+        instanceof_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is_instance_of ET, ET name ETN' % seid)]
+        self.assertEquals(sorted(instanceof_etypes), ['Division', 'Societe', 'SubDivision'])
+        snames = [name for name, in self.execute('Any N WHERE S is Societe, S nom N')]
+        self.failIf('subdiv' in snames)
+        snames = [name for name, in self.execute('Any N WHERE S is Division, S nom N')]
+        self.failIf('subdiv' in snames)
+        snames = [name for name, in self.execute('Any N WHERE S is_instance_of Societe, S nom N')]
+        self.failUnless('subdiv' in snames)
+        snames = [name for name, in self.execute('Any N WHERE S is_instance_of Division, S nom N')]
+        self.failUnless('subdiv' in snames)
+        
+        
+    def test_perms_synchronization_1(self):
+        schema = self.repo.schema
+        self.assertEquals(schema['EUser'].get_groups('read'), set(('managers', 'users')))
+        self.failUnless(self.execute('Any X, Y WHERE X is EEType, X name "EUser", Y is EGroup, Y name "users"')[0])
+        self.execute('DELETE X read_permission Y WHERE X is EEType, X name "EUser", Y name "users"')
+        self.assertEquals(schema['EUser'].get_groups('read'), set(('managers', 'users', )))
+        self.commit()
+        self.assertEquals(schema['EUser'].get_groups('read'), set(('managers', )))
+        self.execute('SET X read_permission Y WHERE X is EEType, X name "EUser", Y name "users"')
+        self.commit()
+        self.assertEquals(schema['EUser'].get_groups('read'), set(('managers', 'users',)))
+
+    def test_perms_synchronization_2(self):
+        schema = self.repo.schema['in_group']
+        self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests')))
+        self.execute('DELETE X read_permission Y WHERE X is ERType, X name "in_group", Y name "guests"')
+        self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests')))
+        self.commit()
+        self.assertEquals(schema.get_groups('read'), set(('managers', 'users')))
+        self.execute('SET X read_permission Y WHERE X is ERType, X name "in_group", Y name "guests"')
+        self.assertEquals(schema.get_groups('read'), set(('managers', 'users')))
+        self.commit()
+        self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests')))
+
+    def test_nonregr_user_edit_itself(self):
+        ueid = self.session.user.eid
+        groupeids = [eid for eid, in self.execute('EGroup G WHERE G name in ("managers", "users")')]
+        self.execute('DELETE X in_group Y WHERE X eid %s' % ueid)
+        self.execute('SET X surname "toto" WHERE X eid %s' % ueid)
+        self.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid)
+        self.commit()
+        eeid = self.execute('Any X WHERE X is EEType, X name "EEType"')[0][0]
+        self.execute('DELETE X read_permission Y WHERE X eid %s' % eeid)
+        self.execute('SET X final FALSE WHERE X eid %s' % eeid)
+        self.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)'
+                     % (eeid, groupeids[0], groupeids[1]))
+        self.commit()
+        self.execute('Any X WHERE X is EEType, X name "EEType"')
+
+    # schema modification hooks tests #########################################
+    
+    def test_uninline_relation(self):
+        dbhelper = self.session.pool.source('system').dbhelper    
+        sqlcursor = self.session.pool['system']
+        # Personne inline2 Affaire inline
+        # insert a person without inline2 relation (not mandatory)
+        self.execute('INSERT Personne X: X nom "toto"')
+        peid = self.execute('INSERT Personne X: X nom "tutu"')[0][0]
+        aeid = self.execute('INSERT Affaire X: X ref "tata"')[0][0]
+        self.execute('SET X inline2 Y WHERE X eid %(x)s, Y eid %(y)s', {'x': peid, 'y': aeid})
+        self.failUnless(self.schema['inline2'].inlined)
+        try:
+            try:
+                self.execute('SET X inlined FALSE WHERE X name "inline2"')
+                self.failUnless(self.schema['inline2'].inlined)
+                self.commit()
+                self.failIf(self.schema['inline2'].inlined)
+                self.failIf(dbhelper.index_exists(sqlcursor, 'Personne', 'inline2'))
+                rset = self.execute('Any X, Y WHERE X inline2 Y')
+                self.assertEquals(len(rset), 1)
+                self.assertEquals(rset.rows[0], [peid, aeid])
+            except:
+                import traceback
+                traceback.print_exc()
+                raise
+        finally:
+            self.execute('SET X inlined TRUE WHERE X name "inline2"')
+            self.failIf(self.schema['inline2'].inlined)
+            self.commit()
+            self.failUnless(self.schema['inline2'].inlined)
+            self.failUnless(dbhelper.index_exists(sqlcursor, 'Personne', 'inline2'))
+            rset = self.execute('Any X, Y WHERE X inline2 Y')
+            self.assertEquals(len(rset), 1)
+            self.assertEquals(rset.rows[0], [peid, aeid])
+
+    def test_indexed_change(self):
+        dbhelper = self.session.pool.source('system').dbhelper    
+        sqlcursor = self.session.pool['system']
+        try:
+            self.execute('SET X indexed TRUE WHERE X relation_type R, R name "sujet"')
+            self.failIf(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed'))
+            self.failIf(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet'))
+            self.commit()
+            self.failUnless(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed'))
+            self.failUnless(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet'))
+        finally:
+            self.execute('SET X indexed FALSE WHERE X relation_type R, R name "sujet"')
+            self.failUnless(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed'))
+            self.failUnless(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet'))
+            self.commit()
+            self.failIf(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed'))
+            self.failIf(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet'))
+
+    def test_unique_change(self):
+        dbhelper = self.session.pool.source('system').dbhelper    
+        sqlcursor = self.session.pool['system']
+        try:
+            self.execute('INSERT EConstraint X: X cstrtype CT, DEF constrained_by X '
+                         'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,'
+                         'RT name "sujet", E name "Affaire"')
+            self.failIf(self.schema['Affaire'].has_unique_values('sujet'))
+            self.failIf(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet', unique=True))
+            self.commit()
+            self.failUnless(self.schema['Affaire'].has_unique_values('sujet'))
+            self.failUnless(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet', unique=True))
+        finally:
+            self.execute('DELETE DEF constrained_by X WHERE X cstrtype CT, '
+                         'CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,'
+                         'RT name "sujet", E name "Affaire"')
+            self.failUnless(self.schema['Affaire'].has_unique_values('sujet'))
+            self.failUnless(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet', unique=True))
+            self.commit()
+            self.failIf(self.schema['Affaire'].has_unique_values('sujet'))
+            self.failIf(dbhelper.index_exists(sqlcursor, 'Affaire', 'sujet', unique=True))
+        
+
+class WorkflowHooksTC(RepositoryBasedTC):
+
+    def setUp(self):
+        RepositoryBasedTC.setUp(self)
+        self.s_activated = self.execute('State X WHERE X name "activated"')[0][0]
+        self.s_deactivated = self.execute('State X WHERE X name "deactivated"')[0][0]
+        self.s_dummy = self.execute('INSERT State X: X name "dummy", X state_of E WHERE E name "EUser"')[0][0]
+        self.create_user('stduser')
+        # give access to users group on the user's wf transitions
+        # so we can test wf enforcing on euser (managers don't have anymore this
+        # enforcement
+        self.execute('SET X require_group G WHERE G name "users", X transition_of ET, ET name "EUser"')
+        self.commit()
+        
+    def tearDown(self):
+        self.execute('DELETE X require_group G WHERE G name "users", X transition_of ET, ET name "EUser"')
+        self.commit()
+        RepositoryBasedTC.tearDown(self)
+
+    def test_set_initial_state(self):
+        ueid = self.execute('INSERT EUser E: E login "x", E upassword "x", E in_group G '
+                            'WHERE G name "users"')[0][0]
+        self.failIf(self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+                                 {'x' : ueid}))
+        self.commit()
+        initialstate = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+                                    {'x' : ueid})[0][0]
+        self.assertEquals(initialstate, u'activated')
+        
+    def test_initial_state(self):
+        cnx = self.login('stduser')
+        cu = cnx.cursor()
+        self.assertRaises(ValidationError, cu.execute,
+                          'INSERT EUser X: X login "badaboum", X upassword %(pwd)s, '
+                          'X in_state S WHERE S name "deactivated"', {'pwd': 'oops'})
+        cnx.close()
+        # though managers can do whatever he want
+        self.execute('INSERT EUser X: X login "badaboum", X upassword %(pwd)s, '
+                     'X in_state S, X in_group G WHERE S name "deactivated", G name "users"', {'pwd': 'oops'})
+        self.commit()
+        
+    # test that the workflow is correctly enforced
+    def test_transition_checking1(self):
+        cnx = self.login('stduser')
+        cu = cnx.cursor()
+        ueid = cnx.user(self.current_session()).eid
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                          {'x': ueid, 's': self.s_activated}, 'x')
+        cnx.close()
+        
+    def test_transition_checking2(self):
+        cnx = self.login('stduser')
+        cu = cnx.cursor()
+        ueid = cnx.user(self.current_session()).eid
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                          {'x': ueid, 's': self.s_dummy}, 'x')
+        cnx.close()
+        
+    def test_transition_checking3(self):
+        cnx = self.login('stduser')
+        cu = cnx.cursor()
+        ueid = cnx.user(self.current_session()).eid
+        cu.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                      {'x': ueid, 's': self.s_deactivated}, 'x')
+        cnx.commit()
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                          {'x': ueid, 's': self.s_deactivated}, 'x')
+        # get back now
+        cu.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                      {'x': ueid, 's': self.s_activated}, 'x')
+        cnx.commit()
+        cnx.close()
+        
+    def test_transition_checking4(self):
+        cnx = self.login('stduser')
+        cu = cnx.cursor()
+        ueid = cnx.user(self.current_session()).eid
+        cu.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                   {'x': ueid, 's': self.s_deactivated}, 'x')
+        cnx.commit()
+        self.assertRaises(ValidationError,
+                          cu.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                          {'x': ueid, 's': self.s_dummy}, 'x')
+        # get back now
+        cu.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                      {'x': ueid, 's': self.s_activated}, 'x')
+        cnx.commit()
+        cnx.close()
+
+    def test_transition_information(self):
+        ueid = self.session.user.eid
+        self.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                      {'x': ueid, 's': self.s_deactivated}, 'x')
+        self.commit()
+        rset = self.execute('TrInfo T ORDERBY T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 2)
+        tr = rset.get_entity(1, 0)
+        #tr.complete()
+        self.assertEquals(tr.comment, None)
+        self.assertEquals(tr.from_state[0].eid, self.s_activated)
+        self.assertEquals(tr.to_state[0].eid, self.s_deactivated)
+        
+        self.session.set_shared_data('trcomment', u'il est pas sage celui-la')
+        self.session.set_shared_data('trcommentformat', u'text/plain')
+        self.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+                     {'x': ueid, 's': self.s_activated}, 'x')
+        self.commit()
+        rset = self.execute('TrInfo T ORDERBY T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 3)
+        tr = rset.get_entity(2, 0)
+        #tr.complete()
+        self.assertEquals(tr.comment, u'il est pas sage celui-la')
+        self.assertEquals(tr.comment_format, u'text/plain')
+        self.assertEquals(tr.from_state[0].eid, self.s_deactivated)
+        self.assertEquals(tr.to_state[0].eid, self.s_activated)
+        self.assertEquals(tr.owned_by[0].login, 'admin')
+
+    def test_transition_information_on_creation(self):
+        ueid = self.create_user('toto')
+        rset = self.execute('TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 1)
+        tr = rset.get_entity(0, 0)
+        #tr.complete()
+        self.assertEquals(tr.comment, None)
+        self.assertEquals(tr.from_state, [])
+        self.assertEquals(tr.to_state[0].eid, self.s_activated)
+
+    def test_std_users_can_create_trinfo(self):
+        self.create_user('toto')
+        cnx = self.login('toto')
+        cu = cnx.cursor()
+        self.failUnless(cu.execute("INSERT Note X: X type 'a', X in_state S WHERE S name 'todo'"))
+        cnx.commit()
+    
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_hooksmanager.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,178 @@
+"""unit tests for the hooks manager
+"""
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.server.hooksmanager import HooksManager, Hook
+from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools.apptest import RepositoryBasedTC
+
+class HookCalled(Exception): pass
+
+config = TestServerConfiguration('data')
+config.bootstrap_cubes()
+schema = config.load_schema()
+
+class HooksManagerTC(TestCase):
+    args = (None,)
+    kwargs = {'a': 1}
+    
+    def setUp(self):
+        """ called before each test from this class """
+        self.o = HooksManager(schema)
+
+    def test_register_hook_raise_keyerror(self):
+        self.assertRaises(AssertionError,
+                          self.o.register_hook, self._hook, 'before_add_entiti')
+        self.assertRaises(AssertionError,
+                          self.o.register_hook, self._hook, 'session_login', 'EEType')
+        self.assertRaises(AssertionError,
+                          self.o.register_hook, self._hook, 'session_logout', 'EEType')
+        self.assertRaises(AssertionError,
+                          self.o.register_hook, self._hook, 'server_startup', 'EEType')
+        self.assertRaises(AssertionError,
+                          self.o.register_hook, self._hook, 'server_shutdown', 'EEType')
+        
+    def test_register_hook1(self):
+        self.o.register_hook(self._hook, 'before_add_entity')
+        self.o.register_hook(self._hook, 'before_delete_entity', 'Personne')
+        self._test_called_hooks()
+        
+    def test_register_hook2(self):
+        self.o.register_hook(self._hook, 'before_add_entity', '')
+        self.o.register_hook(self._hook, 'before_delete_entity', 'Personne')
+        self._test_called_hooks()
+        
+    def test_register_hook3(self):
+        self.o.register_hook(self._hook, 'before_add_entity', None)
+        self.o.register_hook(self._hook, 'before_delete_entity', 'Personne')
+        self._test_called_hooks()
+        
+    def test_register_hooks(self):
+        self.o.register_hooks({'before_add_entity' : {'': [self._hook]},
+                               'before_delete_entity' : {'Personne': [self._hook]},
+                               })
+        self._test_called_hooks()
+
+    def test_unregister_hook(self):
+        self.o.register_hook(self._hook, 'after_delete_entity', 'Personne')
+        self.assertRaises(HookCalled,
+                          self.o.call_hooks, 'after_delete_entity', 'Personne',
+                          *self.args, **self.kwargs)
+        self.o.unregister_hook(self._hook, 'after_delete_entity', 'Personne')
+        # no hook should be called there
+        self.o.call_hooks('after_delete_entity', 'Personne')
+        
+
+    def _test_called_hooks(self):
+        self.assertRaises(HookCalled,
+                          self.o.call_hooks, 'before_add_entity', '',
+                          *self.args, **self.kwargs)
+        self.assertRaises(HookCalled,
+                          self.o.call_hooks, 'before_add_entity', None,
+                          *self.args, **self.kwargs)
+        self.assertRaises(HookCalled,
+                          self.o.call_hooks, 'before_add_entity', 'Personne',
+                          *self.args, **self.kwargs)
+        self.assertRaises(HookCalled,
+                          self.o.call_hooks, 'before_delete_entity', 'Personne',
+                          *self.args, **self.kwargs)
+        # no hook should be called there
+        self.o.call_hooks('before_delete_entity', None)
+        self.o.call_hooks('before_delete_entity', 'Societe')
+
+
+    def _hook(self, *args, **kwargs):
+        # check arguments
+        self.assertEqual(args, self.args)
+        self.assertEqual(kwargs, self.kwargs)
+        raise HookCalled()
+
+
+class RelationHookTC(TestCase):
+    """testcase for relation hooks grouping"""
+    def setUp(self):
+        """ called before each test from this class """
+        self.o = HooksManager(schema)
+        self.called = []
+
+    def test_before_add_relation(self):
+        """make sure before_xxx_relation hooks are called directly"""
+        self.o.register_hook(self._before_relation_hook,
+                             'before_add_relation', 'concerne')
+        self.assertEquals(self.called, [])
+        self.o.call_hooks('before_add_relation', 'concerne', 'USER',
+                          1, 'concerne', 2)        
+        self.assertEquals(self.called, [(1, 'concerne', 2)])
+        
+    def test_after_add_relation(self):
+        """make sure after_xxx_relation hooks are deferred"""
+        self.o.register_hook(self._after_relation_hook,
+                             'after_add_relation', 'concerne')
+        self.assertEquals(self.called, [])
+        self.o.call_hooks('after_add_relation', 'concerne', 'USER',
+                          1, 'concerne', 2)
+        self.o.call_hooks('after_add_relation', 'concerne', 'USER',
+                          3, 'concerne', 4)
+        self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)])
+    
+    def test_before_delete_relation(self):
+        """make sure before_xxx_relation hooks are called directly"""
+        self.o.register_hook(self._before_relation_hook,
+                             'before_delete_relation', 'concerne')
+        self.assertEquals(self.called, [])
+        self.o.call_hooks('before_delete_relation', 'concerne', 'USER',
+                          1, 'concerne', 2)        
+        self.assertEquals(self.called, [(1, 'concerne', 2)])
+
+    def test_after_delete_relation(self):
+        """make sure after_xxx_relation hooks are deferred"""
+        self.o.register_hook(self._after_relation_hook,
+                             'after_delete_relation', 'concerne')
+        self.o.call_hooks('after_delete_relation', 'concerne', 'USER',
+                          1, 'concerne', 2)
+        self.o.call_hooks('after_delete_relation', 'concerne', 'USER',
+                          3, 'concerne', 4)
+        self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)])
+
+
+    def _before_relation_hook(self, pool, subject, r_type, object):
+        self.called.append((subject, r_type, object))
+
+    def _after_relation_hook(self, pool, subject, r_type, object):
+        self.called.append((subject, r_type, object))
+
+
+class SystemHooksTC(RepositoryBasedTC):
+
+    def test_startup_shutdown(self):
+        import hooks # cubicweb/server/test/data/hooks.py
+        self.assertEquals(hooks.CALLED_EVENTS['server_startup'], True)
+        # don't actually call repository.shutdown !
+        self.repo.hm.call_hooks('server_shutdown', repo=None)
+        self.assertEquals(hooks.CALLED_EVENTS['server_shutdown'], True)
+
+    def test_session_open_close(self):
+        import hooks # cubicweb/server/test/data/hooks.py
+        cnx = self.login('anon')
+        self.assertEquals(hooks.CALLED_EVENTS['session_open'], 'anon')
+        cnx.close()
+        self.assertEquals(hooks.CALLED_EVENTS['session_close'], 'anon')
+
+
+from itertools import repeat
+
+class MyHook(Hook):
+    schema = schema # set for actual hooks at registration time
+    events = ('whatever', 'another')
+    accepts = ('Societe', 'Division')
+    
+class HookTC(RepositoryBasedTC):
+    def test_inheritance(self):
+        self.assertEquals(list(MyHook.register_to()),
+                          zip(repeat('whatever'), ('Societe', 'Division', 'SubDivision'))
+                          + zip(repeat('another'), ('Societe', 'Division', 'SubDivision')))
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_migractions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,412 @@
+"""unit tests for module cubicweb.server.migractions
+"""
+
+from mx.DateTime import DateTime, today
+
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools.apptest import RepositoryBasedTC, get_versions
+
+from cubicweb.server.repository import Repository
+from cubicweb.server.migractions import *
+
+orig_get_versions = Repository.get_versions
+
+def setup_module(*args):
+    Repository.get_versions = get_versions
+
+def teardown_module(*args):
+    Repository.get_versions = orig_get_versions
+
+    
+class MigrationCommandsTC(RepositoryBasedTC):
+    copy_schema = True
+    
+    def setUp(self):
+        if not hasattr(self, '_repo'):
+            # first initialization
+            repo = self.repo # set by the RepositoryBasedTC metaclass
+            # force to read schema from the database
+            repo.config._cubes = None
+            repo.fill_schema()
+            # hack to read the schema from data/migrschema
+            from cubicweb.schema import CubicWebSchemaLoader
+            CubicWebSchemaLoader.main_schema_directory = 'migrschema'
+            global migrschema
+            migrschema = self.repo.config.load_schema()
+            del CubicWebSchemaLoader.main_schema_directory
+            assert 'Folder' in migrschema
+            self.repo.hm.deactivate_verification_hooks()
+        RepositoryBasedTC.setUp(self)
+        self.mh = ServerMigrationHelper(self.repo.config, migrschema,
+                                        repo=self.repo, cnx=self.cnx,
+                                        interactive=False)
+        
+    def test_add_attribute_int(self):
+        self.failIf('whatever' in self.schema)
+        paraordernum = self.mh.rqlexec('Any O WHERE X name "Note", RT name "para", RDEF from_entity X, RDEF relation_type RT, RDEF ordernum O')[0][0]
+        self.mh.cmd_add_attribute('Note', 'whatever')
+        self.failUnless('whatever' in self.schema)
+        self.assertEquals(self.schema['whatever'].subjects(), ('Note',))
+        self.assertEquals(self.schema['whatever'].objects(), ('Int',))
+        paraordernum2 = self.mh.rqlexec('Any O WHERE X name "Note", RT name "para", RDEF from_entity X, RDEF relation_type RT, RDEF ordernum O')[0][0]
+        self.assertEquals(paraordernum2, paraordernum+1)
+        #self.assertEquals([r.type for r in self.schema['Note'].ordered_relations()],
+        #                  ['modification_date', 'creation_date', 'owned_by',
+        #                   'eid', 'ecrit_par', 'inline1', 'date', 'type',
+        #                   'whatever', 'para', 'in_basket'])
+        # NB: commit instead of rollback make following test fail with py2.5
+        #     this sounds like a pysqlite/2.5 bug (the same eid is affected to
+        #     two different entities)
+        self.mh.rollback()
+
+    def test_add_attribute_varchar(self):
+        self.failIf('shortpara' in self.schema)
+        self.mh.cmd_add_attribute('Note', 'shortpara')
+        self.failUnless('shortpara' in self.schema)
+        self.assertEquals(self.schema['shortpara'].subjects(), ('Note', ))
+        self.assertEquals(self.schema['shortpara'].objects(), ('String', ))
+        # test created column is actually a varchar(64)
+        notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='Note'")[0][0]
+        fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
+        self.assertEquals(fields['shortpara'], 'varchar(64)')
+        self.mh.rollback()
+        
+    def test_add_datetime_with_default_value_attribute(self):
+        self.failIf('mydate' in self.schema)
+        self.mh.cmd_add_attribute('Note', 'mydate')
+        self.failUnless('mydate' in self.schema)
+        self.assertEquals(self.schema['mydate'].subjects(), ('Note', ))
+        self.assertEquals(self.schema['mydate'].objects(), ('Date', ))
+        testdate = DateTime(2005, 12, 13)
+        eid1 = self.mh.rqlexec('INSERT Note N')[0][0]
+        eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
+        d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1}, 'x')[0][0]
+        d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2}, 'x')[0][0]
+        self.assertEquals(d1, today())
+        self.assertEquals(d2, testdate)
+        self.mh.rollback()
+            
+    def test_rename_attribute(self):
+        self.failIf('civility' in self.schema)
+        eid1 = self.mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0]
+        eid2 = self.mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0]
+        self.mh.cmd_rename_attribute('Personne', 'sexe', 'civility')
+        self.failIf('sexe' in self.schema)
+        self.failUnless('civility' in self.schema)
+        # test data has been backported
+        c1 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0]
+        self.failUnlessEqual(c1, 'M')
+        c2 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0]
+        self.failUnlessEqual(c2, None)
+
+
+    def test_workflow_actions(self):
+        foo = self.mh.cmd_add_state(u'foo', ('Personne', 'Email'), initial=True)
+        for etype in ('Personne', 'Email'):
+            s1 = self.mh.rqlexec('Any N WHERE S state_of ET, ET name "%s", S name N' %
+                                 etype)[0][0]
+            self.assertEquals(s1, "foo")
+            s1 = self.mh.rqlexec('Any N WHERE ET initial_state S, ET name "%s", S name N' %
+                                 etype)[0][0]
+            self.assertEquals(s1, "foo")
+        bar = self.mh.cmd_add_state(u'bar', ('Personne', 'Email'), initial=True)
+        baz = self.mh.cmd_add_transition(u'baz', ('Personne', 'Email'),
+                                         (foo,), bar, ('managers',))
+        for etype in ('Personne', 'Email'):
+            t1 = self.mh.rqlexec('Any N WHERE T transition_of ET, ET name "%s", T name N' %
+                                 etype)[0][0]
+            self.assertEquals(t1, "baz")
+        gn = self.mh.rqlexec('Any GN WHERE T require_group G, G name GN, T eid %s' % baz)[0][0]
+        self.assertEquals(gn, 'managers')
+        
+    def test_add_entity_type(self):
+        self.failIf('Folder2' in self.schema)
+        self.failIf('filed_under2' in self.schema)
+        self.mh.cmd_add_entity_type('Folder2')
+        self.failUnless('Folder2' in self.schema)
+        self.failUnless(self.execute('EEType X WHERE X name "Folder2"'))
+        self.failUnless('filed_under2' in self.schema)
+        self.failUnless(self.execute('ERType X WHERE X name "filed_under2"'))
+        self.assertEquals(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
+                          ['created_by', 'creation_date', 'description', 'description_format', 'eid',
+                           'filed_under2', 'has_text', 'identity', 'is', 'is_instance_of',
+                           'modification_date', 'name', 'owned_by'])
+        self.assertEquals([str(rs) for rs in self.schema['Folder2'].object_relations()],
+                          ['filed_under2', 'identity'])
+        self.assertEquals(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
+                          ['Affaire', 'Card', 'Division', 'Email', 'EmailThread', 'File', 'Folder2',
+                           'Image', 'Note', 'Personne', 'Societe', 'SubDivision'])
+        self.assertEquals(self.schema['filed_under2'].objects(), ('Folder2',))
+        eschema = self.schema.eschema('Folder2')
+        for cstr in eschema.constraints('name'):
+            self.failUnless(hasattr(cstr, 'eid'))
+
+    def test_drop_entity_type(self):
+        self.mh.cmd_add_entity_type('Folder2')
+        todoeid = self.mh.cmd_add_state(u'todo', 'Folder2', initial=True)
+        doneeid = self.mh.cmd_add_state(u'done', 'Folder2')
+        self.mh.cmd_add_transition(u'redoit', 'Folder2', (doneeid,), todoeid)
+        self.mh.cmd_add_transition(u'markasdone', 'Folder2', (todoeid,), doneeid)
+        self.commit()
+        eschema = self.schema.eschema('Folder2')
+        self.mh.cmd_drop_entity_type('Folder2')
+        self.failIf('Folder2' in self.schema)
+        self.failIf(self.execute('EEType X WHERE X name "Folder2"'))
+        # test automatic workflow deletion
+        self.failIf(self.execute('State X WHERE NOT X state_of ET'))
+        self.failIf(self.execute('Transition X WHERE NOT X transition_of ET'))
+
+    def test_add_relation_type(self):
+        self.mh.cmd_add_entity_type('Folder2', auto=False)
+        self.mh.cmd_add_relation_type('filed_under2')
+        self.failUnless('filed_under2' in self.schema)
+        self.assertEquals(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
+                          ['Affaire', 'Card', 'Division', 'Email', 'EmailThread', 'File', 'Folder2',
+                           'Image', 'Note', 'Personne', 'Societe', 'SubDivision'])
+        self.assertEquals(self.schema['filed_under2'].objects(), ('Folder2',))
+
+
+    def test_drop_relation_type(self):
+        self.mh.cmd_add_entity_type('Folder2', auto=False)
+        self.mh.cmd_add_relation_type('filed_under2')
+        self.failUnless('filed_under2' in self.schema)
+        self.mh.cmd_drop_relation_type('filed_under2')
+        self.failIf('filed_under2' in self.schema)
+
+    def test_add_relation_definition(self):
+        self.mh.cmd_add_relation_definition('Societe', 'in_state', 'State')
+        self.assertEquals(sorted(self.schema['in_state'].subjects()),
+                          ['Affaire', 'Division', 'EUser', 'Note', 'Societe', 'SubDivision'])
+        self.assertEquals(self.schema['in_state'].objects(), ('State',))
+
+    def test_add_relation_definition_nortype(self):
+        self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire')
+        self.assertEquals(self.schema['concerne2'].subjects(),
+                          ('Personne',))
+        self.assertEquals(self.schema['concerne2'].objects(), ('Affaire',))
+
+    def test_drop_relation_definition1(self):
+        self.failUnless('concerne' in self.schema)
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne'])
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+        self.mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire')
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire'])
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), ['Division', 'Note', 'Societe', 'SubDivision'])
+        
+    def test_drop_relation_definition_with_specialization(self):
+        self.failUnless('concerne' in self.schema)
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne'])
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+        self.mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe')
+        self.mh.cmd_drop_relation_definition('None', 'concerne', 'Societe')
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne'])
+        self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Note'])
+        
+    def test_drop_relation_definition2(self):
+        self.failUnless('evaluee' in self.schema)
+        self.mh.cmd_drop_relation_definition('Personne', 'evaluee', 'Note')
+        self.failUnless('evaluee' in self.schema)
+        self.assertEquals(sorted(self.schema['evaluee'].subjects()),
+                          ['Division', 'EUser', 'Societe', 'SubDivision'])
+        self.assertEquals(sorted(self.schema['evaluee'].objects()),
+                          ['Note'])
+
+    def test_rename_relation(self):
+        self.skip('implement me')
+
+    def test_change_relation_props_non_final(self):
+        rschema = self.schema['concerne']
+        card = rschema.rproperty('Affaire', 'Societe', 'cardinality')
+        self.assertEquals(card, '**')
+        try:
+            self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
+                                              cardinality='?*')
+            card = rschema.rproperty('Affaire', 'Societe', 'cardinality')
+            self.assertEquals(card, '?*')
+        finally:
+            self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
+                                              cardinality='**')
+            
+    def test_change_relation_props_final(self):
+        rschema = self.schema['adel']
+        card = rschema.rproperty('Personne', 'String', 'fulltextindexed')
+        self.assertEquals(card, False)
+        try:
+            self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
+                                              fulltextindexed=True)
+            card = rschema.rproperty('Personne', 'String', 'fulltextindexed')
+            self.assertEquals(card, True)
+        finally:
+            self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
+                                              fulltextindexed=False)
+
+    def test_synchronize_schema(self):
+        cursor = self.mh.rqlcursor
+        nbrqlexpr_start = len(cursor.execute('RQLExpression X'))
+        migrschema['titre']._rproperties[('Personne', 'String')]['order'] = 7
+        migrschema['adel']._rproperties[('Personne', 'String')]['order'] = 6
+        migrschema['ass']._rproperties[('Personne', 'String')]['order'] = 5
+#         expected = ['eid', 'has_text', 'creation_date', 'modification_date',
+#                     'nom', 'prenom', 'civility', 'promo', 'ass', 'adel', 'titre',
+#                     'web', 'tel', 'fax', 'datenaiss', 'test']
+#         self.assertEquals([rs.type for rs in migrschema['Personne'].ordered_relations() if rs.is_final()],
+#                           expected)
+        migrschema['Personne'].description = 'blabla bla'
+        migrschema['titre'].description = 'usually a title' 
+        migrschema['titre']._rproperties[('Personne', 'String')]['description'] = 'title for this person'
+#         rinorderbefore = cursor.execute('Any O,N WHERE X is EFRDef, X relation_type RT, RT name N,'
+#                                         'X from_entity FE, FE name "Personne",'
+#                                         'X ordernum O ORDERBY O')
+#         expected = [u'creation_date', u'modification_date', u'nom', u'prenom',
+#                     u'sexe', u'promo', u'titre', u'adel', u'ass', u'web', u'tel',
+#                     u'fax', u'datenaiss', u'test', u'description']
+#        self.assertListEquals(rinorderbefore, map(list, zip([0, 0]+range(1, len(expected)), expected)))
+        
+        self.mh.cmd_synchronize_schema(commit=False)
+        
+        self.assertEquals(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0],
+                          'blabla bla')
+        self.assertEquals(cursor.execute('Any D WHERE X name "titre", X description D')[0][0],
+                          'usually a title')
+        self.assertEquals(cursor.execute('Any D WHERE X relation_type RT, RT name "titre",'
+                                         'X from_entity FE, FE name "Personne",'
+                                         'X description D')[0][0],
+                          'title for this person')
+        # skip "sexe" and "description" since they aren't in the migration
+        # schema and so behaviour is undefined
+        # "civility" is also skipped since it may have been added by
+        # test_rename_attribut :o/
+        rinorder = [n for n, in cursor.execute('Any N ORDERBY O WHERE X is EFRDef, X relation_type RT, RT name N,'
+                                               'X from_entity FE, FE name "Personne",'
+                                               'X ordernum O') if n not in ('sexe', 'description', 'civility')]
+        expected = [u'nom', u'prenom', u'promo', u'ass', u'adel', u'titre',
+                    u'web', u'tel', u'fax', u'datenaiss', u'test', u'firstname',
+                    u'creation_date', u'modification_date']
+        self.assertEquals(rinorder, expected)
+
+        # test permissions synchronization ####################################
+        # new rql expr to add note entity
+        eexpr = self._erqlexpr_entity('add', 'Note')
+        self.assertEquals(eexpr.expression,
+                          'X ecrit_part PE, U in_group G, '
+                          'PE require_permission P, P name "add_note", P require_group G')
+        self.assertEquals([et.name for et in eexpr.reverse_add_permission], ['Note'])
+        self.assertEquals(eexpr.reverse_read_permission, [])
+        self.assertEquals(eexpr.reverse_delete_permission, [])
+        self.assertEquals(eexpr.reverse_update_permission, [])
+        # no more rqlexpr to delete and add para attribute
+        self.failIf(self._rrqlexpr_rset('add', 'para'))
+        self.failIf(self._rrqlexpr_rset('delete', 'para'))
+        # new rql expr to add ecrit_par relation        
+        rexpr = self._rrqlexpr_entity('add', 'ecrit_par')
+        self.assertEquals(rexpr.expression,
+                          'O require_permission P, P name "add_note", '
+                          'U in_group G, P require_group G')
+        self.assertEquals([rt.name for rt in rexpr.reverse_add_permission], ['ecrit_par'])
+        self.assertEquals(rexpr.reverse_read_permission, [])
+        self.assertEquals(rexpr.reverse_delete_permission, [])
+        # no more rqlexpr to delete and add travaille relation
+        self.failIf(self._rrqlexpr_rset('add', 'travaille'))
+        self.failIf(self._rrqlexpr_rset('delete', 'travaille'))
+        # no more rqlexpr to delete and update Societe entity
+        self.failIf(self._erqlexpr_rset('update', 'Societe'))
+        self.failIf(self._erqlexpr_rset('delete', 'Societe'))
+        # no more rqlexpr to read Affaire entity
+        self.failIf(self._erqlexpr_rset('read', 'Affaire'))
+        # rqlexpr to update Affaire entity has been updated
+        eexpr = self._erqlexpr_entity('update', 'Affaire')
+        self.assertEquals(eexpr.expression, 'X concerne S, S owned_by U')
+        # no change for rqlexpr to add and delete Affaire entity
+        self.assertEquals(len(self._erqlexpr_rset('delete', 'Affaire')), 1)
+        self.assertEquals(len(self._erqlexpr_rset('add', 'Affaire')), 1)
+        # no change for rqlexpr to add and delete concerne relation
+        self.assertEquals(len(self._rrqlexpr_rset('delete', 'concerne')), 1)
+        self.assertEquals(len(self._rrqlexpr_rset('add', 'concerne')), 1)
+        # * migrschema involve:
+        #   * 8 deletion (2 in Affaire read + Societe + travaille + para rqlexprs)
+        #   * 1 update (Affaire update)
+        #   * 2 new (Note add, ecrit_par add)
+        # remaining orphan rql expr which should be deleted at commit (composite relation)
+        self.assertEquals(len(cursor.execute('RQLExpression X WHERE NOT ET1 read_permission X, NOT ET2 add_permission X, '
+                                             'NOT ET3 delete_permission X, NOT ET4 update_permission X')), 8+1)
+        # finally
+        self.assertEquals(len(cursor.execute('RQLExpression X')), nbrqlexpr_start + 1 + 2) 
+                          
+        self.mh.rollback()
+
+    def _erqlexpr_rset(self, action, ertype):
+        rql = 'RQLExpression X WHERE ET is EEType, ET %s_permission X, ET name %%(name)s' % action
+        return self.mh.rqlcursor.execute(rql, {'name': ertype})
+    def _erqlexpr_entity(self, action, ertype):
+        rset = self._erqlexpr_rset(action, ertype)
+        self.assertEquals(len(rset), 1)
+        return rset.get_entity(0, 0)
+    def _rrqlexpr_rset(self, action, ertype):
+        rql = 'RQLExpression X WHERE ET is ERType, ET %s_permission X, ET name %%(name)s' % action
+        return self.mh.rqlcursor.execute(rql, {'name': ertype})
+    def _rrqlexpr_entity(self, action, ertype):
+        rset = self._rrqlexpr_rset(action, ertype)
+        self.assertEquals(len(rset), 1)
+        return rset.get_entity(0, 0)
+    
+    def test_set_size_constraint(self):
+        # existing previous value
+        try:
+            self.mh.cmd_set_size_constraint('EEType', 'name', 128)
+        finally:
+            self.mh.cmd_set_size_constraint('EEType', 'name', 64)
+        # non existing previous value
+        try:
+            self.mh.cmd_set_size_constraint('EEType', 'description', 256)
+        finally:
+            self.mh.cmd_set_size_constraint('EEType', 'description', None)
+
+    def test_add_remove_cube(self):
+        cubes = set(self.config.cubes())
+        schema = self.repo.schema
+        try:
+            self.mh.cmd_remove_cube('eemail')
+            # efile was there because it's an eemail dependancy, should have been removed
+            cubes.remove('eemail')
+            cubes.remove('efile')
+            self.assertEquals(set(self.config.cubes()), cubes)
+            for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 
+                           'sender', 'in_thread', 'reply_to', 'data_format'):
+                self.failIf(ertype in schema, ertype)
+            self.assertEquals(sorted(schema['see_also']._rproperties.keys()),
+                              [('Folder', 'Folder')])
+            self.assertEquals(schema['see_also'].subjects(), ('Folder',))
+            self.assertEquals(schema['see_also'].objects(), ('Folder',))
+            self.assertEquals(self.execute('Any X WHERE X pkey "system.version.eemail"').rowcount, 0)
+            self.assertEquals(self.execute('Any X WHERE X pkey "system.version.efile"').rowcount, 0)
+            self.failIf('eemail' in self.config.cubes())
+            self.failIf('efile' in self.config.cubes())
+        finally:
+            self.mh.cmd_add_cube('eemail')
+            cubes.add('eemail')
+            cubes.add('efile')
+            self.assertEquals(set(self.config.cubes()), cubes)
+            for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 
+                           'sender', 'in_thread', 'reply_to', 'data_format'):
+                self.failUnless(ertype in schema, ertype)
+            self.assertEquals(sorted(schema['see_also']._rproperties.keys()),
+                              [('EmailThread', 'EmailThread'), ('Folder', 'Folder')])
+            self.assertEquals(sorted(schema['see_also'].subjects()), ['EmailThread', 'Folder'])
+            self.assertEquals(sorted(schema['see_also'].objects()), ['EmailThread', 'Folder'])
+            from eemail.__pkginfo__ import version as eemail_version
+            from efile.__pkginfo__ import version as efile_version
+            self.assertEquals(self.execute('Any V WHERE X value V, X pkey "system.version.eemail"')[0][0],
+                              eemail_version)
+            self.assertEquals(self.execute('Any V WHERE X value V, X pkey "system.version.efile"')[0][0],
+                              efile_version)
+            self.failUnless('eemail' in self.config.cubes())
+            self.failUnless('efile' in self.config.cubes())
+            # trick: overwrite self.maxeid to avoid deletion of just reintroduced
+            #        types (and their associated tables!)
+            self.maxeid = self.execute('Any MAX(X)')[0][0]
+            # why this commit is necessary is unclear to me (though without it
+            # next test may fail complaining of missing tables
+            self.commit() 
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_querier.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1284 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for modules cubicweb.server.querier and cubicweb.server.querier_steps
+"""
+
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools import init_test_database
+from cubicweb.devtools.repotest import tuplify, BaseQuerierTC
+from unittest_session import Variable
+
+from mx.DateTime import today, now, DateTimeType
+from rql import BadRQLQuery, RQLSyntaxError
+from cubicweb import QueryError, Unauthorized
+from cubicweb.server.utils import crypt_password
+from cubicweb.server.sources.native import make_schema
+
+
+# register priority/severity sorting registered procedure
+from rql.utils import register_function, FunctionDescr
+
+class group_sort_value(FunctionDescr):
+    supported_backends = ('sqlite',)
+    rtype = 'Int'
+try:
+    register_function(group_sort_value)
+except AssertionError:
+    pass
+from cubicweb.server.sqlutils import SQL_CONNECT_HOOKS
+def init_sqlite_connexion(cnx):
+    def group_sort_value(text):
+        return {"managers": "3", "users": "2", "guests":  "1", "owners": "0"}[text]
+    cnx.create_function("GROUP_SORT_VALUE", 1, group_sort_value)
+SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion)
+
+
+from logilab.common.adbh import _GenericAdvFuncHelper
+TYPEMAP = _GenericAdvFuncHelper.TYPE_MAPPING
+
+class MakeSchemaTC(TestCase):
+    def test_known_values(self):
+        solution = {'A': 'String', 'B': 'EUser'}
+        self.assertEquals(make_schema((Variable('A'), Variable('B')), solution, 
+                                      'table0', TYPEMAP),
+                          ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'}))
+        
+
+repo, cnx = init_test_database('sqlite')
+
+
+
+class UtilsTC(BaseQuerierTC):
+    repo = repo
+    
+    def get_max_eid(self):
+        # no need for cleanup here
+        return None
+    def cleanup(self):
+        # no need for cleanup here
+        pass
+    
+    def test_preprocess_1(self):
+        reid = self.execute('Any X WHERE X is ERType, X name "owned_by"')[0][0]
+        rqlst = self._prepare('Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', {'x': reid})
+        self.assertEquals(rqlst.solutions, [{'RDEF': 'EFRDef'}, {'RDEF': 'ENFRDef'}])
+        
+    def test_preprocess_2(self):
+        teid = self.execute("INSERT Tag X: X name 'tag'")[0][0]
+        #geid = self.execute("EGroup G WHERE G name 'users'")[0][0]
+        #self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s",
+        #             {'g': geid, 't': teid}, 'g')
+        rqlst = self._prepare('Any X WHERE E eid %(x)s, E tags X', {'x': teid})
+        # the query may be optimized, should keep only one solution
+        # (any one, etype will be discarded)
+        self.assertEquals(len(rqlst.solutions), 1)
+        
+    def test_preprocess_security(self):
+        plan = self._prepare_plan('Any ETN,COUNT(X) GROUPBY ETN '
+                                  'WHERE X is ET, ET name ETN')
+        plan.session = self._user_session(('users',))[1]
+        union = plan.rqlst
+        plan.preprocess(union)
+        self.assertEquals(len(union.children), 1)
+        self.assertEquals(len(union.children[0].with_), 1)
+        subq = union.children[0].with_[0].query
+        self.assertEquals(len(subq.children), 3)
+        self.assertEquals([t.as_string() for t in union.children[0].selection],
+                          ['ETN','COUNT(X)'])
+        self.assertEquals([t.as_string() for t in union.children[0].groupby],
+                          ['ETN'])
+        partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children))
+        rql, solutions = partrqls[0]
+        self.assertEquals(rql,
+                          'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))'
+                          ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, X identity D, C is Division, D is Affaire))'
+                          ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, X identity H, H is Affaire)))'
+                          ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, X identity I, I is Affaire)))'
+                          ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, X identity J, J is Affaire)))'
+                          ', ET is EEType, X is Affaire')
+        self.assertEquals(solutions, [{'C': 'Division',
+                                       'D': 'Affaire',
+                                       'E': 'Note',
+                                       'F': 'Societe',
+                                       'G': 'SubDivision',
+                                       'H': 'Affaire',
+                                       'I': 'Affaire',
+                                       'J': 'Affaire',
+                                       'X': 'Affaire',
+                                       'ET': 'EEType', 'ETN': 'String'}])
+        rql, solutions = partrqls[1]
+        self.assertEquals(rql,  'Any ETN,X WHERE X is ET, ET name ETN, ET is EEType, '
+                          'X is IN(Bookmark, Card, Comment, Division, EConstraint, EConstraintType, EEType, EFRDef, EGroup, ENFRDef, EPermission, EProperty, ERType, EUser, Email, EmailAddress, EmailPart, EmailThread, File, Folder, Image, Note, Personne, RQLExpression, Societe, State, SubDivision, Tag, TrInfo, Transition)')
+        self.assertListEquals(sorted(solutions),
+                              sorted([{'X': 'Bookmark', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Card', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Comment', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Division', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EConstraint', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EConstraintType', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EEType', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EFRDef', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EGroup', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Email', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EmailAddress', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EmailPart', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EmailThread', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'ENFRDef', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EPermission', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EProperty', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'ERType', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'EUser', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'File', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Folder', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Image', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Note', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Personne', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Societe', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'State', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'SubDivision', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Tag', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'Transition', 'ETN': 'String', 'ET': 'EEType'},
+                                      {'X': 'TrInfo', 'ETN': 'String', 'ET': 'EEType'}]))
+        rql, solutions = partrqls[2]
+        self.assertEquals(rql,
+                          'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), '
+                          'ET is EEType, X is Basket')
+        self.assertEquals(solutions, [{'ET': 'EEType',
+                                       'X': 'Basket',
+                                       'ETN': 'String',
+                                       }])
+
+    def test_preprocess_security_aggregat(self):
+        plan = self._prepare_plan('Any MAX(X)')
+        plan.session = self._user_session(('users',))[1]
+        union = plan.rqlst
+        plan.preprocess(union)
+        self.assertEquals(len(union.children), 1)
+        self.assertEquals(len(union.children[0].with_), 1)
+        subq = union.children[0].with_[0].query
+        self.assertEquals(len(subq.children), 3)
+        self.assertEquals([t.as_string() for t in union.children[0].selection],
+                          ['MAX(X)'])
+        
+    def test_preprocess_nonregr(self):
+        rqlst = self._prepare('Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI')
+        self.assertEquals(len(rqlst.solutions), 1)
+    
+    def test_build_description(self):
+        # should return an empty result set
+        rset = self.execute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid})
+        self.assertEquals(rset.description[0][0], 'EUser')
+        rset = self.execute('Any 1')
+        self.assertEquals(rset.description[0][0], 'Int')
+        rset = self.execute('Any TRUE')
+        self.assertEquals(rset.description[0][0], 'Boolean')
+        rset = self.execute('Any "hop"')
+        self.assertEquals(rset.description[0][0], 'String')
+        rset = self.execute('Any TODAY')
+        self.assertEquals(rset.description[0][0], 'Date')
+        rset = self.execute('Any NOW')
+        self.assertEquals(rset.description[0][0], 'Datetime')
+        rset = self.execute('Any %(x)s', {'x': 1})
+        self.assertEquals(rset.description[0][0], 'Int')
+        rset = self.execute('Any %(x)s', {'x': 1L})
+        self.assertEquals(rset.description[0][0], 'Int')
+        rset = self.execute('Any %(x)s', {'x': True})
+        self.assertEquals(rset.description[0][0], 'Boolean')
+        rset = self.execute('Any %(x)s', {'x': 1.0})
+        self.assertEquals(rset.description[0][0], 'Float')
+        rset = self.execute('Any %(x)s', {'x': now()})
+        self.assertEquals(rset.description[0][0], 'Datetime')
+        rset = self.execute('Any %(x)s', {'x': 'str'})
+        self.assertEquals(rset.description[0][0], 'String')
+        rset = self.execute('Any %(x)s', {'x': u'str'})
+        self.assertEquals(rset.description[0][0], 'String')
+
+
+class QuerierTC(BaseQuerierTC):
+    repo = repo
+
+    def test_encoding_pb(self):
+        self.assertRaises(RQLSyntaxError, self.execute,
+                          'Any X WHERE X is ERType, X name "öwned_by"')
+
+    def test_unknown_eid(self):
+        # should return an empty result set
+        self.failIf(self.execute('Any X WHERE X eid 99999999'))
+        
+    # selection queries tests #################################################
+    
+    def test_select_1(self):
+        rset = self.execute('Any X ORDERBY X WHERE X is EGroup')
+        result, descr = rset.rows, rset.description
+        self.assertEquals(tuplify(result), [(1,), (2,), (3,), (4,)])
+        self.assertEquals(descr, [('EGroup',), ('EGroup',), ('EGroup',), ('EGroup',)])
+        
+    def test_select_2(self):
+        rset = self.execute('Any X ORDERBY N WHERE X is EGroup, X name N')
+        self.assertEquals(tuplify(rset.rows), [(3,), (1,), (4,), (2,)])
+        self.assertEquals(rset.description, [('EGroup',), ('EGroup',), ('EGroup',), ('EGroup',)])
+        rset = self.execute('Any X ORDERBY N DESC WHERE X is EGroup, X name N')
+        self.assertEquals(tuplify(rset.rows), [(2,), (4,), (1,), (3,)])
+        
+    def test_select_3(self):
+        rset = self.execute('Any N GROUPBY N WHERE X is EGroup, X name N')
+        result, descr = rset.rows, rset.description
+        result.sort()
+        self.assertEquals(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)])
+        self.assertEquals(descr, [('String',), ('String',), ('String',), ('String',)])
+        
+    def test_select_is(self):
+        rset = self.execute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN')
+        result, descr = rset.rows, rset.description
+        self.assertEquals(result[0][1], descr[0][0])
+        
+    def test_select_is_aggr(self):
+        rset = self.execute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN')
+        result, descr = rset.rows, rset.description
+        self.assertEquals(descr[0][0], 'String')
+        self.assertEquals(descr[0][1], 'Int')
+        self.assertEquals(result[0][0], 'ENFRDef')
+        
+    def test_select_groupby_orderby(self):
+        rset = self.execute('Any N GROUPBY N ORDERBY N WHERE X is EGroup, X name N')
+        self.assertEquals(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)])
+        self.assertEquals(rset.description, [('String',), ('String',), ('String',), ('String',)])
+        
+    def test_select_complex_groupby(self):
+        rset = self.execute('Any N GROUPBY N WHERE X name N')
+        rset = self.execute('Any N,MAX(D) GROUPBY N LIMIT 5 WHERE X name N, X creation_date D')
+        
+    def test_select_inlined_groupby(self):
+        seid = self.execute('State X WHERE X name "deactivated"')[0][0]
+        rset = self.execute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid)
+        
+    def test_select_complex_orderby(self):
+        rset1 = self.execute('Any N ORDERBY N WHERE X name N')
+        self.assertEquals(sorted(rset1.rows), rset1.rows)
+        rset = self.execute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N')
+        self.assertEquals(rset.rows[0][0], rset1.rows[1][0]) 
+        self.assertEquals(len(rset), 5)
+        
+    def test_select_5(self):
+        rset = self.execute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is EGroup')
+        self.assertEquals(tuplify(rset.rows), [(3, 'guests',), (1, 'managers',), (4, 'owners',), (2, 'users',)])
+        self.assertEquals(rset.description, [('EGroup', 'String',), ('EGroup', 'String',), ('EGroup', 'String',), ('EGroup', 'String',)])
+        
+    def test_select_6(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")[0]
+        rset = self.execute('Any Y where X name TMP, Y nom in (TMP, "bidule")')
+        #self.assertEquals(rset.description, [('Personne',), ('Personne',)])
+        self.assert_(('Personne',) in rset.description)
+        rset = self.execute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")')
+        self.assert_(('Personne',) in rset.description)
+        
+    def test_select_not_attr(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe X: X nom 'chouette'")
+        rset = self.execute('Personne X WHERE NOT X nom "bidule"')
+        self.assertEquals(len(rset.rows), 0, rset.rows)
+        rset = self.execute('Personne X WHERE NOT X nom "bid"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'")
+        rset = self.execute('Personne X WHERE NOT X travaille S')
+        self.assertEquals(len(rset.rows), 0, rset.rows)
+        
+    def test_select_is_in(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe X: X nom 'chouette'")
+        self.assertEquals(len(self.execute("Any X WHERE X is IN (Personne, Societe)")),
+                          2)
+        
+    def test_select_not_rel(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe X: X nom 'chouette'")
+        self.execute("INSERT Personne X: X nom 'autre'")
+        self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'")
+        rset = self.execute('Personne X WHERE NOT X travaille S')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        rset = self.execute('Personne X WHERE NOT X travaille S, S nom "chouette"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_nonregr_inlined(self):
+        self.execute("INSERT Note X: X para 'bidule'")
+        self.execute("INSERT Personne X: X nom 'chouette'")
+        self.execute("INSERT Personne X: X nom 'autre'")
+        self.execute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'")
+        rset = self.execute('Any U,T ORDERBY T DESC WHERE U is EUser, '
+                            'N ecrit_par U, N type T')#, {'x': self.ueid})
+        self.assertEquals(len(rset.rows), 0)
+        
+    def test_select_nonregr_edition_not(self):
+        groupeids = set((1, 2, 3))
+        groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "EGroup", Y eid IN(1, 2, 3), X read_permission Y'))
+        rset = self.execute('DISTINCT Any Y WHERE X is EEType, X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+        self.assertEquals(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
+        rset = self.execute('DISTINCT Any Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+        self.assertEquals(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
+                     
+    def test_select_outer_join(self):
+        peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        peid2 = self.execute("INSERT Personne X: X nom 'autre'")[0][0]
+        seid1 = self.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+        seid2 = self.execute("INSERT Societe X: X nom 'chouetos'")[0][0]
+        rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?')
+        self.assertEquals(rset.rows, [[peid1, None], [peid2, None]])
+        self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'")
+        rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?')
+        self.assertEquals(rset.rows, [[peid1, seid1], [peid2, None]])
+        rset = self.execute('Any S,X ORDERBY S WHERE X? travaille S')
+        self.assertEquals(rset.rows, [[seid1, peid1], [seid2, None]])
+        
+    def test_select_outer_join_optimized(self):
+        peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}, 'x')
+        self.assertEquals(rset.rows, [[peid1]])
+        rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?', {'x':peid1}, 'x')
+        self.assertEquals(rset.rows, [[peid1]])
+
+    def test_select_left_outer_join(self):
+        ueid = self.execute("INSERT EUser X: X login 'bob', X upassword 'toto', X in_group G "
+                            "WHERE G name 'users'")[0][0]
+        self.commit()
+        try:
+            rset = self.execute('Any FS,TS,C,D,U ORDERBY D DESC '
+                                'WHERE WF wf_info_for X,'
+                                'WF from_state FS?, WF to_state TS, WF comment C,'
+                                'WF creation_date D, WF owned_by U, X eid %(x)s',
+                                {'x': ueid}, 'x')
+            self.assertEquals(len(rset), 1)
+            self.execute('SET X in_state S WHERE X eid %(x)s, S name "deactivated"',
+                         {'x': ueid}, 'x')
+            rset = self.execute('Any FS,TS,C,D,U ORDERBY D DESC '
+                                'WHERE WF wf_info_for X,'
+                                'WF from_state FS?, WF to_state TS, WF comment C,'
+                                'WF creation_date D, WF owned_by U, X eid %(x)s',
+                                {'x': ueid}, 'x')
+            self.assertEquals(len(rset), 2)
+        finally:
+            self.execute('DELETE EUser X WHERE X eid %s' % ueid)
+            self.commit()
+
+    def test_select_ambigous_outer_join(self):
+        teid = self.execute("INSERT Tag X: X name 'tag'")[0][0]
+        self.execute("INSERT Tag X: X name 'tagbis'")[0][0]
+        geid = self.execute("EGroup G WHERE G name 'users'")[0][0]
+        self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s",
+                     {'g': geid, 't': teid}, 'g')
+        rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN")
+        self.failUnless(['users', 'tag'] in rset.rows)
+        self.failUnless(['activated', None] in rset.rows)
+        rset = self.execute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN")
+        self.assertEquals(rset.rows, [[None, 'tagbis'], ['users', 'tag']])            
+        
+    def test_select_not_inline_rel(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Note X: X type 'a'")
+        self.execute("INSERT Note X: X type 'b'")
+        self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'")
+        rset = self.execute('Note X WHERE NOT X ecrit_par P')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_not_unlinked_multiple_solutions(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Note X: X type 'a'")
+        self.execute("INSERT Note X: X type 'b'")
+        self.execute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'")
+        rset = self.execute('Note X WHERE NOT Y evaluee X')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+
+    def test_select_aggregat_count(self):
+        rset = self.execute('Any COUNT(X)')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Int',)])
+        
+    def test_select_aggregat_sum(self):
+        rset = self.execute('Any SUM(O) WHERE X ordernum O')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Int',)])
+        
+    def test_select_aggregat_min(self):
+        rset = self.execute('Any MIN(X) WHERE X is Personne')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Personne',)])
+        rset = self.execute('Any MIN(O) WHERE X ordernum O')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Int',)])
+        
+    def test_select_aggregat_max(self):
+        rset = self.execute('Any MAX(X) WHERE X is Personne')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Personne',)])
+        rset = self.execute('Any MAX(O) WHERE X ordernum O')
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(len(rset.rows[0]), 1)
+        self.assertEquals(rset.description, [('Int',)])
+
+    def test_select_custom_aggregat_concat_string(self):
+        rset = self.execute('Any CONCAT_STRINGS(N) WHERE X is EGroup, X name N')
+        self.failUnless(rset)
+        self.failUnlessEqual(sorted(rset[0][0].split(', ')), ['guests', 'managers',
+                                                             'owners', 'users'])
+
+    def test_select_custom_regproc_limit_size(self):
+        rset = self.execute('Any TEXT_LIMIT_SIZE(N, 3) WHERE X is EGroup, X name N, X name "managers"')
+        self.failUnless(rset)
+        self.failUnlessEqual(rset[0][0], 'man...')
+        self.execute("INSERT Basket X: X name 'bidule', X description '<b>hop hop</b>', X description_format 'text/html'")
+        rset = self.execute('Any LIMIT_SIZE(D, DF, 3) WHERE X is Basket, X description D, X description_format DF')
+        self.failUnless(rset)
+        self.failUnlessEqual(rset[0][0], 'hop...')
+
+    def test_select_regproc_orderby(self):
+        rset = self.execute('DISTINCT Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is EGroup, X name N, X name "managers"')
+        self.failUnlessEqual(len(rset), 1)
+        self.failUnlessEqual(rset[0][1], 'managers')
+        rset = self.execute('Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is EGroup, X name N, NOT U in_group X, U login "admin"')
+        self.failUnlessEqual(len(rset), 3)
+        self.failUnlessEqual(rset[0][1], 'owners')
+        
+    def test_select_aggregat_sort(self):
+        rset = self.execute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G')
+        self.assertEquals(len(rset.rows), 2)
+        self.assertEquals(len(rset.rows[0]), 2)
+        self.assertEquals(rset.description[0], ('EGroup', 'Int',))
+
+    def test_select_aggregat_having(self):
+        rset = self.execute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N '
+                            'WHERE RT name N, RDEF relation_type RT '
+                            'HAVING COUNT(RDEF) > 10')
+        self.assertListEquals(rset.rows,
+                              [[u'description', 11], ['in_basket', 11],
+                               [u'name', 12], [u'created_by', 32],
+                               [u'creation_date', 32], [u'is', 32], [u'is_instance_of', 32],
+                               [u'modification_date', 32], [u'owned_by', 32]])
+
+    def test_select_aggregat_having_dumb(self):
+        # dumb but should not raise an error
+        rset = self.execute('Any U,COUNT(X) GROUPBY U '
+                            'WHERE U eid %(x)s, X owned_by U '
+                            'HAVING COUNT(X) > 10', {'x': self.ueid})
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(rset.rows[0][0], self.ueid)
+
+    def test_select_complex_sort(self):
+        rset = self.execute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D')
+        result = rset.rows
+        result.sort()
+        self.assertEquals(tuplify(result), [(1,), (2,), (3,), (4,), (5,)])
+        
+    def test_select_upper(self):
+        rset = self.execute('Any X, UPPER(L) ORDERBY L WHERE X is EUser, X login L')
+        self.assertEquals(len(rset.rows), 2)
+        self.assertEquals(rset.rows[0][1], 'ADMIN')
+        self.assertEquals(rset.description[0], ('EUser', 'String',))
+        self.assertEquals(rset.rows[1][1], 'ANON')
+        self.assertEquals(rset.description[1], ('EUser', 'String',))
+        eid = rset.rows[0][0]
+        rset = self.execute('Any UPPER(L) WHERE X eid %s, X login L'%eid)
+        self.assertEquals(rset.rows[0][0], 'ADMIN')
+        self.assertEquals(rset.description, [('String',)])
+
+##     def test_select_simplified(self):
+##         ueid = self.session.user.eid
+##         rset = self.execute('Any L WHERE %s login L'%ueid)
+##         self.assertEquals(rset.rows[0][0], 'admin')
+##         rset = self.execute('Any L WHERE %(x)s login L', {'x':ueid})
+##         self.assertEquals(rset.rows[0][0], 'admin')
+        
+    def test_select_searchable_text_1(self):
+        rset = self.execute(u"INSERT Personne X: X nom 'bidüle'")
+        rset = self.execute(u"INSERT Societe X: X nom 'bidüle'")
+        rset = self.execute("INSERT Societe X: X nom 'chouette'")
+        self.commit()
+        rset = self.execute('Any X where X has_text %(text)s', {'text': u'bidüle'})
+        self.assertEquals(len(rset.rows), 2, rset.rows)
+        rset = self.execute(u'Any N where N has_text "bidüle"')
+        self.assertEquals(len(rset.rows), 2, rset.rows)
+        biduleeids = [r[0] for r in rset.rows]
+        rset = self.execute(u'Any N where NOT N has_text "bidüle"')
+        self.failIf([r[0] for r in rset.rows if r[0] in biduleeids])
+        # duh?
+        rset = self.execute('Any X WHERE X has_text %(text)s', {'text': u'ça'})
+        
+    def test_select_searchable_text_2(self):
+        rset = self.execute("INSERT Personne X: X nom 'bidule'")
+        rset = self.execute("INSERT Personne X: X nom 'chouette'")
+        rset = self.execute("INSERT Societe X: X nom 'bidule'")
+        self.commit()
+        rset = self.execute('Personne N where N has_text "bidule"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_searchable_text_3(self):
+        rset = self.execute("INSERT Personne X: X nom 'bidule', X sexe 'M'")
+        rset = self.execute("INSERT Personne X: X nom 'bidule', X sexe 'F'")
+        rset = self.execute("INSERT Societe X: X nom 'bidule'")
+        self.commit()
+        rset = self.execute('Any X where X has_text "bidule" and X sexe "M"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_multiple_searchable_text(self):
+        self.execute(u"INSERT Personne X: X nom 'bidüle'")
+        self.execute("INSERT Societe X: X nom 'chouette', S travaille X")
+        self.execute(u"INSERT Personne X: X nom 'bidüle'")
+        self.commit()
+        rset = self.execute('Personne X WHERE X has_text %(text)s, X travaille S, S has_text %(text2)s',
+                            {'text': u'bidüle',
+                             'text2': u'chouette',}
+                            )
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_no_descr(self):
+        rset = self.execute('Any X WHERE X is EGroup', build_descr=0)
+        rset.rows.sort()
+        self.assertEquals(tuplify(rset.rows), [(1,), (2,), (3,), (4,)])
+        self.assertEquals(rset.description, ())
+
+    def test_select_limit_offset(self):
+        rset = self.execute('EGroup X ORDERBY N LIMIT 2 WHERE X name N')
+        self.assertEquals(tuplify(rset.rows), [(3,), (1,)])
+        self.assertEquals(rset.description, [('EGroup',), ('EGroup',)])
+        rset = self.execute('EGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N')
+        self.assertEquals(tuplify(rset.rows), [(4,), (2,)])
+        
+    def test_select_symetric(self):
+        self.execute("INSERT Personne X: X nom 'machin'")
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Personne X: X nom 'chouette'")
+        self.execute("INSERT Personne X: X nom 'trucmuche'")
+        self.execute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'")
+        self.execute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'")
+        rset = self.execute('Any P where P connait P2')
+        self.assertEquals(len(rset.rows), 3, rset.rows)
+        rset = self.execute('Any P where NOT P connait P2')
+        self.assertEquals(len(rset.rows), 1, rset.rows) # trucmuche
+        rset = self.execute('Any P where P connait P2, P2 nom "bidule"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        rset = self.execute('Any P where P2 connait P, P2 nom "bidule"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        rset = self.execute('Any P where P connait P2, P2 nom "chouette"')
+        self.assertEquals(len(rset.rows), 2, rset.rows)
+        rset = self.execute('Any P where P2 connait P, P2 nom "chouette"')
+        self.assertEquals(len(rset.rows), 2, rset.rows)
+        
+    def test_select_inline(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Note X: X type 'a'")
+        self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'")
+        rset = self.execute('Any N where N ecrit_par X, X nom "bidule"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        
+    def test_select_creation_date(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        rset = self.execute('Any D WHERE X nom "bidule", X creation_date D')
+        self.assertEqual(len(rset.rows), 1)
+
+    def test_select_or_relation(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Personne X: X nom 'chouette'")
+        self.execute("INSERT Societe X: X nom 'logilab'")
+        self.execute("INSERT Societe X: X nom 'caesium'")
+        self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'logilab'")
+        rset = self.execute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, S1 nom "logilab", S2 nom "caesium"')
+        self.assertEqual(len(rset.rows), 1)
+        self.execute("SET P travaille S WHERE P nom 'chouette', S nom 'caesium'")
+        rset = self.execute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, S1 nom "logilab", S2 nom "caesium"')
+        self.assertEqual(len(rset.rows), 2)
+        
+    def test_select_or_sym_relation(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Personne X: X nom 'chouette'")
+        self.execute("INSERT Personne X: X nom 'truc'")
+        self.execute("SET P connait S WHERE P nom 'bidule', S nom 'chouette'")
+        rset = self.execute('DISTINCT Any P WHERE S connait P, S nom "chouette"')
+        self.assertEqual(len(rset.rows), 1, rset.rows)
+        rset = self.execute('DISTINCT Any P WHERE P connait S or S connait P, S nom "chouette"')
+        self.assertEqual(len(rset.rows), 1, rset.rows)
+        self.execute("SET P connait S WHERE P nom 'chouette', S nom 'truc'")
+        rset = self.execute('DISTINCT Any P WHERE S connait P, S nom "chouette"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+        rset = self.execute('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+            
+    def test_select_follow_relation(self):
+        self.execute("INSERT Affaire X: X sujet 'cool'")
+        self.execute("INSERT Societe X: X nom 'chouette'")
+        self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        self.execute("INSERT Note X: X para 'truc'")
+        self.execute("SET S evaluee N WHERE S is Societe, N is Note")
+        self.execute("INSERT Societe X: X nom 'bidule'")
+        self.execute("INSERT Note X: X para 'troc'")
+        self.execute("SET S evaluee N WHERE S nom 'bidule', N para 'troc'")
+        rset = self.execute('DISTINCT Any A,N WHERE A concerne S, S evaluee N')
+        self.assertEqual(len(rset.rows), 1, rset.rows)
+
+    def test_select_ordered_distinct_1(self):
+        self.execute("INSERT Affaire X: X sujet 'cool', X ref '1'")
+        self.execute("INSERT Affaire X: X sujet 'cool', X ref '2'")
+        rset = self.execute('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R')
+        self.assertEqual(rset.rows, [['cool']])
+
+    def test_select_ordered_distinct_2(self):
+        self.execute("INSERT Affaire X: X sujet 'minor'")
+        self.execute("INSERT Affaire X: X sujet 'important'")
+        self.execute("INSERT Affaire X: X sujet 'normal'")
+        self.execute("INSERT Affaire X: X sujet 'zou'")
+        self.execute("INSERT Affaire X: X sujet 'abcd'")
+        rset = self.execute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S')
+        self.assertEqual(rset.rows, [['abcd'], ['important'], ['minor'], ['normal'], ['zou']])
+        
+    def test_select_ordered_distinct_3(self):
+        rset = self.execute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is EGroup, X name N')
+        self.assertEqual(rset.rows, [['owners'], ['guests'], ['users'], ['managers']])
+
+    def test_select_or_value(self):
+        rset = self.execute('Any U WHERE U in_group G, G name "owners" OR G name "users"')
+        self.assertEqual(len(rset.rows), 0)
+        rset = self.execute('Any U WHERE U in_group G, G name "guests" OR G name "managers"')
+        self.assertEqual(len(rset.rows), 2)
+
+    def test_select_explicit_eid(self):
+        rset = self.execute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', {'u': self.session.user.eid})
+        self.failUnless(rset)
+        self.assertEquals(rset.description[0][1], 'Int')
+        
+#     def test_select_rewritten_optional(self):
+#         eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+#         rset = self.execute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)',
+#                             {'x': eid}, 'x')
+#         self.assertEquals(rset.rows, [[eid]])
+        
+    def test_today_bug(self):
+        self.execute("INSERT Tag X: X name 'bidule', X creation_date TODAY")
+        self.execute("INSERT Tag Y: Y name 'toto'")
+        rset = self.execute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D")
+        self.assert_(isinstance(rset.rows[0][0], DateTimeType), rset.rows)
+        rset = self.execute('Tag X WHERE X creation_date TODAY')
+        self.assertEqual(len(rset.rows), 2)
+        rset = self.execute('Any MAX(D) WHERE X is Tag, X creation_date D')
+        self.failUnless(isinstance(rset[0][0], DateTimeType), type(rset[0][0]))
+
+    def test_today(self):
+        self.execute("INSERT Tag X: X name 'bidule', X creation_date TODAY")
+        self.execute("INSERT Tag Y: Y name 'toto'")
+        rset = self.execute('Tag X WHERE X creation_date TODAY')
+        self.assertEqual(len(rset.rows), 2)
+
+    def test_select_boolean(self):
+        rset = self.execute('Any N WHERE X is EEType, X name N, X final %(val)s',
+                            {'val': True})
+        self.assertEquals(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+                                                            'Date', 'Datetime',
+                                                            'Decimal', 'Float',
+                                                            'Int', 'Interval',
+                                                            'Password', 'String',
+                                                            'Time'])
+        rset = self.execute('Any N WHERE X is EEType, X name N, X final TRUE')
+        self.assertEquals(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+                                                            'Date', 'Datetime',
+                                                            'Decimal', 'Float',
+                                                            'Int', 'Interval',
+                                                            'Password', 'String',
+                                                            'Time'])
+        
+    def test_select_constant(self):
+        rset = self.execute('Any X, "toto" ORDERBY X WHERE X is EGroup')
+        self.assertEquals(rset.rows,
+                          map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+        self.assertIsInstance(rset[0][1], unicode)
+        self.assertEquals(rset.description,
+                          zip(('EGroup', 'EGroup', 'EGroup', 'EGroup'),
+                              ('String', 'String', 'String', 'String',)))
+        rset = self.execute('Any X, %(value)s ORDERBY X WHERE X is EGroup', {'value': 'toto'})
+        self.assertEquals(rset.rows,
+                          map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+        self.assertIsInstance(rset[0][1], unicode)
+        self.assertEquals(rset.description,
+                          zip(('EGroup', 'EGroup', 'EGroup', 'EGroup'),
+                              ('String', 'String', 'String', 'String',)))
+        rset = self.execute('Any X,GN WHERE X is EUser, G is EGroup, X login "syt", X in_group G, G name GN')
+
+    def test_select_union(self):
+        rset = self.execute('Any X,N ORDERBY N WITH X,N BEING '
+                            '((Any X,N WHERE X name N, X transition_of E, E name %(name)s)'
+                            ' UNION '
+                            '(Any X,N WHERE X name N, X state_of E, E name %(name)s))',
+                            {'name': 'EUser'})
+        self.assertEquals([x[1] for x in rset.rows],
+                          ['activate', 'activated', 'deactivate', 'deactivated'])
+        self.assertEquals(rset.description,
+                          [('Transition', 'String'), ('State', 'String'),
+                           ('Transition', 'String'), ('State', 'String')])
+        
+    def test_select_union_aggregat(self):
+        # meaningless, the goal in to have group by done on different attribute
+        # for each sub-query
+        self.execute('(Any N,COUNT(X) GROUPBY N WHERE X name N, X is State)'
+                     ' UNION '
+                     '(Any N,COUNT(X) GROUPBY N ORDERBY 2 WHERE X login N)')
+        
+    def test_select_union_aggregat_independant_group(self):
+        self.execute('INSERT State X: X name "hop"')
+        self.execute('INSERT State X: X name "hop"')
+        self.execute('INSERT Transition X: X name "hop"')
+        self.execute('INSERT Transition X: X name "hop"')
+        rset = self.execute('Any N,NX ORDERBY 2 WITH N,NX BEING '
+                            '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)'
+                            ' UNION '
+                            '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))')
+        self.assertEquals(rset.rows, [[u'hop', 2], [u'hop', 2]])
+        
+    def test_select_union_selection_with_diff_variables(self):
+        rset = self.execute('(Any N WHERE X name N, X is State)'
+                            ' UNION '
+                            '(Any NN WHERE XX name NN, XX is Transition)')
+        self.assertEquals(sorted(r[0] for r in rset.rows),
+                          ['abort', 'activate', 'activated', 'ben non',
+                           'deactivate', 'deactivated', 'done', 'en cours',
+                           'end', 'finie', 'markasdone', 'pitetre', 'redoit',
+                           'start', 'todo'])
+        
+    def test_exists(self):
+        geid = self.execute("INSERT EGroup X: X name 'lulufanclub'")[0][0]
+        self.execute("SET U in_group G WHERE G name 'lulufanclub'")
+        peid = self.execute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0]
+        rset = self.execute("Any X WHERE X prenom 'lulu',"
+                            "EXISTS (U in_group G, G name 'lulufanclub' OR G name 'managers');")
+        self.assertEquals(rset.rows, [[peid]])
+
+    def test_identity(self):
+        eid = self.execute('Any X WHERE X identity Y, Y eid 1')[0][0]
+        self.assertEquals(eid, 1)
+        eid = self.execute('Any X WHERE Y identity X, Y eid 1')[0][0]
+        self.assertEquals(eid, 1)
+        login = self.execute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0]
+        self.assertEquals(login, 'admin')
+
+    def test_select_date_mathexp(self):
+        rset = self.execute('Any X, TODAY - CD WHERE X is EUser, X creation_date CD')
+        self.failUnless(rset)
+        self.failUnlessEqual(rset.description[0][1], 'Interval')
+        eid, = self.execute("INSERT Personne X: X nom 'bidule'")[0]
+        rset = self.execute('Any X, NOW - CD WHERE X is Personne, X creation_date CD')
+        self.failUnlessEqual(rset.description[0][1], 'Interval')
+        # sqlite bug
+        #from mx.DateTime import DateTimeDeltaType
+        #self.assertIsInstance(rset[0][1], DateTimeDeltaType) 
+        #self.failUnless(rset[0][1].seconds > 0)
+
+    def test_select_subquery_aggregat(self):
+        # percent users by groups
+        self.execute('SET X in_group G WHERE G name "users"')
+        rset = self.execute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1'
+                            ' WHERE G name GN, X in_group G'
+                            ' WITH T BEING (Any COUNT(U) WHERE U is EUser)')
+        self.assertEquals(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]])
+        self.assertEquals(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')])
+
+    def test_select_subquery_const(self):
+        rset = self.execute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))')
+        self.assertEquals(rset.rows, [[None], ['toto']])
+        self.assertEquals(rset.description, [(None,), ('String',)])
+                          
+    # insertion queries tests #################################################
+    
+    def test_insert_is(self):
+        eid, = self.execute("INSERT Personne X: X nom 'bidule'")[0]
+        etype, = self.execute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0]
+        self.assertEquals(etype, 'Personne')
+        self.execute("INSERT Personne X: X nom 'managers'")
+    
+    def test_insert_1(self):
+        rset = self.execute("INSERT Personne X: X nom 'bidule'")
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(rset.description, [('Personne',)])
+        rset = self.execute('Personne X WHERE X nom "bidule"')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne',)])
+
+    def test_insert_1_multiple(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Personne X: X nom 'chouette'")
+        rset = self.execute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N")
+        self.assertEquals(len(rset.rows), 2)
+        self.assertEquals(rset.description, [('Societe',), ('Societe',)])
+
+    def test_insert_2(self):
+        rset = self.execute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'")
+        self.assertEquals(rset.description, [('Personne', 'Personne')])
+        rset = self.execute('Personne X WHERE X nom "bidule" or X nom "tutu"')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne',), ('Personne',)])
+
+    def test_insert_3(self):
+        self.execute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y")
+        rset = self.execute('Personne X WHERE X nom "admin"')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne',)])        
+
+    def test_insert_4(self):
+        self.execute("INSERT Societe Y: Y nom 'toto'")
+        self.execute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'")
+        rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne', 'Societe',)])
+        
+    def test_insert_4bis(self):
+        peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        seid = self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
+                             {'x': str(peid)})[0][0]
+        self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 1)
+        self.execute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s",
+                      {'x': str(seid)})
+        self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 2)
+        
+    def test_insert_4ter(self):
+        peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        seid = self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
+                             {'x': unicode(peid)})[0][0]
+        self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 1)
+        self.execute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s",
+                      {'x': unicode(seid)})
+        self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 2)
+
+    def test_insert_5(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'")
+        rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne', 'Societe',)])
+
+    def test_insert_6(self):
+        self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y")
+        rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne', 'Societe',)])
+
+    def test_insert_7(self):
+        self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login 'admin', U login N")
+        rset = self.execute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne', 'Societe',)])
+
+    def test_insert_8(self):
+        self.execute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y WHERE U login 'admin', U login N")
+        rset = self.execute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y')
+        self.assert_(rset.rows)
+        self.assertEquals(rset.description, [('Personne', 'Societe',)])
+
+    def test_insert_query_error(self):
+        self.assertRaises(Exception,
+                          self.execute,
+                          "INSERT Personne X: X nom 'toto', X is Personne")
+        self.assertRaises(Exception,
+                          self.execute,
+                          "INSERT Personne X: X nom 'toto', X is_instance_of Personne")
+        self.assertRaises(QueryError,
+                          self.execute,
+                          "INSERT Personne X: X nom 'toto', X has_text 'tutu'")
+
+        self.assertRaises(QueryError,
+                          self.execute,
+                          "INSERT EUser X: X login 'toto', X eid %s" % cnx.user(self.session).eid)
+
+    def test_insertion_description_with_where(self):
+        rset = self.execute('INSERT EUser E, EmailAddress EM: E login "X", E upassword "X", '
+                            'E primary_email EM, EM address "X", E in_group G '
+                            'WHERE G name "managers"')
+        self.assertEquals(list(rset.description[0]), ['EUser', 'EmailAddress'])
+    
+    # deletion queries tests ##################################################
+
+    def test_delete_1(self):
+        self.execute("INSERT Personne Y: Y nom 'toto'")
+        rset = self.execute('Personne X WHERE X nom "toto"')
+        self.assertEqual(len(rset.rows), 1)
+        self.execute("DELETE Personne Y WHERE Y nom 'toto'")
+        rset = self.execute('Personne X WHERE X nom "toto"')
+        self.assertEqual(len(rset.rows), 0)
+        
+    def test_delete_2(self):
+        rset = self.execute("INSERT Personne X, Personne Y, Societe Z : X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z")
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(len(rset[0]), 3)
+        self.assertEquals(rset.description[0], ('Personne', 'Personne', 'Societe'))
+        self.assertEquals(self.execute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt')
+        rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+        self.execute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'")
+        rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+        self.execute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilab'")
+        rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"')
+        self.assertEqual(len(rset.rows), 0, rset.rows)
+
+    def test_delete_3(self):
+        u, s = self._user_session(('users',))
+        peid, = self.o.execute(s, "INSERT Personne P: P nom 'toto'")[0]
+        seid, = self.o.execute(s, "INSERT Societe S: S nom 'logilab'")[0]
+        self.o.execute(s, "SET P travaille S")
+        rset = self.execute('Personne P WHERE P travaille S')
+        self.assertEqual(len(rset.rows), 1)
+        self.execute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid))
+        rset = self.execute('Personne P WHERE P travaille S')
+        self.assertEqual(len(rset.rows), 0)
+
+    def test_delete_symetric(self):
+        teid1 = self.execute("INSERT Folder T: T name 'toto'")[0][0]
+        teid2 = self.execute("INSERT Folder T: T name 'tutu'")[0][0]
+        self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2))
+        rset = self.execute('Any X,Y WHERE X see_also Y')
+        self.assertEquals(len(rset) , 2, rset.rows)
+        self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2))
+        rset = self.execute('Any X,Y WHERE X see_also Y')
+        self.assertEquals(len(rset) , 0)
+        self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2))
+        rset = self.execute('Any X,Y WHERE X see_also Y')
+        self.assertEquals(len(rset) , 2)
+        self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1))
+        rset = self.execute('Any X,Y WHERE X see_also Y')
+        self.assertEquals(len(rset) , 0)
+
+    def test_nonregr_delete_cache(self):
+        """test that relations are properly cleaned when an entity is deleted
+        (using cachekey on sql generation returned always the same query for an eid,
+        whatever the relation)
+        """
+        u, s = self._user_session(('users',))
+        aeid, = self.o.execute(s, 'INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')[0]
+        # XXX would be nice if the rql below was enough...
+        #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y'
+        eeid, = self.o.execute(s, 'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y WHERE Y is EmailAddress')[0]
+        self.o.execute(s, "DELETE Email X")
+        sqlc = s.pool['system']
+        sqlc.execute('SELECT * FROM recipients_relation')
+        self.assertEquals(len(sqlc.fetchall()), 0)
+        sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid)
+        self.assertEquals(len(sqlc.fetchall()), 0)
+            
+    def test_nonregr_delete_cache2(self):
+        eid = self.execute("INSERT Folder T: T name 'toto'")[0][0]
+        self.commit()
+        # fill the cache
+        self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.execute("Any X WHERE X eid %s" %eid)
+        self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.execute("Folder X WHERE X eid %s" %eid)
+        self.execute("DELETE Folder T WHERE T eid %s"%eid)
+        self.commit()
+        rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.assertEquals(rset.rows, [])
+        rset = self.execute("Any X WHERE X eid %s" %eid)
+        self.assertEquals(rset.rows, [])
+        rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.assertEquals(rset.rows, [])
+        rset = self.execute("Folder X WHERE X eid %s" %eid)
+        self.assertEquals(rset.rows, [])
+        
+    # update queries tests ####################################################
+
+    def test_update_1(self):
+        self.execute("INSERT Personne Y: Y nom 'toto'")
+        rset = self.execute('Personne X WHERE X nom "toto"')
+        self.assertEqual(len(rset.rows), 1)
+        self.execute("SET X nom 'tutu', X prenom 'original' WHERE X is Personne, X nom 'toto'")
+        rset = self.execute('Any Y, Z WHERE X is Personne, X nom Y, X prenom Z')
+        self.assertEqual(tuplify(rset.rows), [('tutu', 'original')])
+        
+    def test_update_2(self):
+        self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")
+        #rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto"')
+        #self.assertEqual(len(rset.rows), 1)
+        #rset = self.execute('Any X, Y WHERE X travaille Y')
+        #self.assertEqual(len(rset.rows), 0)
+        self.execute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'")
+        rset = self.execute('Any X, Y WHERE X travaille Y')
+        self.assertEqual(len(rset.rows), 1)
+        
+    def test_update_2bis(self):
+        rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")
+        eid1, eid2 = rset[0][0], rset[0][1]
+        self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s",
+                      {'x': str(eid1), 'y': str(eid2)})
+        rset = self.execute('Any X, Y WHERE X travaille Y')
+        self.assertEqual(len(rset.rows), 1)
+        
+    def test_update_2ter(self):
+        rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")
+        eid1, eid2 = rset[0][0], rset[0][1]
+        self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s",
+                      {'x': unicode(eid1), 'y': unicode(eid2)})
+        rset = self.execute('Any X, Y WHERE X travaille Y')
+        self.assertEqual(len(rset.rows), 1)
+        
+##     def test_update_4(self):
+##         self.execute("SET X know Y WHERE X ami Y")
+        
+    def test_update_multiple1(self):
+        peid1 = self.execute("INSERT Personne Y: Y nom 'tutu'")[0][0]
+        peid2 = self.execute("INSERT Personne Y: Y nom 'toto'")[0][0]
+        self.execute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'")
+        self.assertEquals(self.execute('Any X WHERE X nom "toto"').rows, [[peid1]])
+        self.assertEquals(self.execute('Any X WHERE X nom "tutu"').rows, [[peid2]])
+
+    def test_update_multiple2(self):
+        ueid = self.execute("INSERT EUser X: X login 'bob', X upassword 'toto'")[0][0]
+        peid1 = self.execute("INSERT Personne Y: Y nom 'turlu'")[0][0]
+        peid2 = self.execute("INSERT Personne Y: Y nom 'tutu'")[0][0]
+        self.execute('SET P1 owned_by U, P2 owned_by U '
+                     'WHERE P1 eid %s, P2 eid %s, U eid %s' % (peid1, peid2, ueid))
+        self.failUnless(self.execute('Any X WHERE X eid %s, X owned_by U, U eid %s'
+                                       % (peid1, ueid)))
+        self.failUnless(self.execute('Any X WHERE X eid %s, X owned_by U, U eid %s'
+                                       % (peid2, ueid)))
+
+    def test_update_math_expr(self):
+        orders = [r[0] for r in self.execute('Any O ORDERBY O WHERE ST name "Personne", X from_entity ST, X ordernum O')]
+        for i,v in enumerate(orders):
+            if v != orders[0]:
+                splitidx = i
+                break
+        self.execute('SET X ordernum Y+1 WHERE X from_entity SE, SE name "Personne", X ordernum Y, X ordernum >= %(order)s',
+                     {'order': orders[splitidx]})
+        orders2 = [r[0] for r in self.execute('Any O ORDERBY O WHERE ST name "Personne", X from_entity ST, X ordernum O')]
+        orders = orders[:splitidx] + [o+1 for o in orders[splitidx:]]
+        self.assertEquals(orders2, orders)
+
+    def test_update_string_concat(self):
+        beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0]
+        self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'})
+        newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid}, 'x')[0][0]
+        self.assertEquals(newname, 'toto-moved')
+                       
+    def test_update_query_error(self):
+        self.execute("INSERT Personne Y: Y nom 'toto'")
+        self.assertRaises(Exception, self.execute, "SET X nom 'toto', X is Personne")
+        self.assertRaises(QueryError, self.execute, "SET X nom 'toto', X has_text 'tutu' WHERE X is Personne")
+        self.assertRaises(QueryError, self.execute, "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid)
+
+       
+    # upassword encryption tests #################################################
+    
+    def test_insert_upassword(self):
+        rset = self.execute("INSERT EUser X: X login 'bob', X upassword 'toto'")
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(rset.description, [('EUser',)])
+        self.assertRaises(Unauthorized,
+                          self.execute, "Any P WHERE X is EUser, X login 'bob', X upassword P")
+        cursor = self.pool['system']
+        cursor.execute("SELECT upassword from EUser WHERE login='bob'")
+        passwd = cursor.fetchone()[0].getvalue()
+        self.assertEquals(passwd, crypt_password('toto', passwd[:2])) 
+        rset = self.execute("Any X WHERE X is EUser, X login 'bob', X upassword '%s'" % passwd)
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(rset.description, [('EUser',)])
+        
+    def test_update_upassword(self):
+        cursor = self.pool['system']
+        rset = self.execute("INSERT EUser X: X login 'bob', X upassword %(pwd)s", {'pwd': 'toto'})
+        self.assertEquals(rset.description[0][0], 'EUser')
+        rset = self.execute("SET X upassword %(pwd)s WHERE X is EUser, X login 'bob'",
+                            {'pwd': 'tutu'})
+        cursor.execute("SELECT upassword from EUser WHERE login='bob'")
+        passwd = cursor.fetchone()[0].getvalue()
+        self.assertEquals(passwd, crypt_password('tutu', passwd[:2])) 
+        rset = self.execute("Any X WHERE X is EUser, X login 'bob', X upassword '%s'" % passwd)
+        self.assertEquals(len(rset.rows), 1)
+        self.assertEquals(rset.description, [('EUser',)])
+
+    # non regression tests ####################################################
+    
+    def test_nonregr_1(self):
+        teid = self.execute("INSERT Tag X: X name 'tag'")[0][0]
+        self.execute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'")
+        rset = self.execute('Any X WHERE T tags X')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        rset = self.execute('Any T WHERE T tags X, X is State')
+        self.assertEquals(rset.rows, [[teid]])
+        rset = self.execute('Any T WHERE T tags X')
+        self.assertEquals(rset.rows, [[teid]])
+
+    def test_nonregr_2(self):
+        teid = self.execute("INSERT Tag X: X name 'tag'")[0][0]
+        geid = self.execute("EGroup G WHERE G name 'users'")[0][0]
+        self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s",
+                       {'g': geid, 't': teid})
+        rset = self.execute('Any X WHERE E eid %(x)s, E tags X',
+                              {'x': teid})
+        self.assertEquals(rset.rows, [[geid]])
+        
+    def test_nonregr_3(self):
+        """bad sql generated on the second query (destination_state is not
+        detected as an inlined relation)
+        """
+        rset = self.execute('Any S,ES,T WHERE S state_of ET, ET name "EUser",'
+                             'ES allowed_transition T, T destination_state S')
+        self.assertEquals(len(rset.rows), 2)
+
+    def test_nonregr_4(self):
+        # fix variables'type, else we get (nb of entity types with a 'name' attribute)**3
+        # union queries and that make for instance a 266Ko sql query which is refused
+        # by the server (or client lib)
+        rset = self.execute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",'
+                            'ER is ERType, SE is EEType, OE is EEType')
+        self.assertEquals(len(rset), 1)
+
+    def test_nonregr_5(self):
+        # jpl #15505: equivalent queries returning different result sets
+        teid1 = self.execute("INSERT Folder X: X name 'hop'")[0][0]
+        teid2 = self.execute("INSERT Folder X: X name 'hip'")[0][0]
+        neid = self.execute("INSERT Note X: X todo_by U, X filed_under T WHERE U login 'admin', T name 'hop'")[0][0]
+        weid = self.execute("INSERT Affaire X: X concerne N, X filed_under T WHERE N is Note, T name 'hip'")[0][0]
+        rset1 = self.execute('Any N,U WHERE N filed_under T, T eid %s,'
+                             'N todo_by U, W concerne N,'
+                             'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2))
+        rset2 = self.execute('Any N,U WHERE N filed_under T, T eid %s,'
+                             'N todo_by U, W concerne N,'
+                             'W filed_under A, A eid %s' % (teid1, teid2))
+        rset3 = self.execute('Any N,U WHERE N todo_by U, T eid %s,'
+                             'N filed_under T, W concerne N,'
+                             'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2))
+        rset4 = self.execute('Any N,U WHERE N todo_by U, T eid %s,'
+                             'N filed_under T, W concerne N,'
+                             'W filed_under A, A eid %s' % (teid1, teid2))
+        self.assertEquals(rset1.rows, rset2.rows)
+        self.assertEquals(rset1.rows, rset3.rows)
+        self.assertEquals(rset1.rows, rset4.rows)
+        
+    def test_nonregr_6(self):
+        self.execute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State')
+        
+    def test_sqlite_encoding(self):
+        """XXX this test was trying to show a bug on use of lower which only
+        occurs with non ascii string and misconfigured locale
+        """
+        self.execute("INSERT Tag X: X name %(name)s,"
+                       "X modification_date %(modification_date)s,"
+                       "X creation_date %(creation_date)s",
+                       {'name': u'éname0',
+                        'modification_date': '2003/03/12 11:00',
+                        'creation_date': '2000/07/03 11:00'})
+        rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,'
+                            'X owned_by U, U eid %(x)s',
+                            {'x':self.session.user.eid}, 'x')
+        self.assertEquals(rset.rows, [[u'\xe9name0']])
+
+
+    def test_nonregr_description(self):
+        """check that a correct description is built in case where infered
+        solutions may be "fusionned" into one by the querier while all solutions
+        are needed to build the result's description
+        """
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe Y: Y nom 'toto'")
+        beid = self.execute("INSERT Basket B: B name 'mybasket'")[0][0]
+        self.execute("SET X in_basket B WHERE X is Personne")
+        self.execute("SET X in_basket B WHERE X is Societe")
+        rset = self.execute('Any X WHERE X in_basket B, B eid %s' % beid)
+        self.assertEquals(len(rset), 2)
+        self.assertEquals(rset.description, [('Personne',), ('Societe',)])
+
+
+    def test_nonregr_cache_1(self):
+        peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        beid = self.execute("INSERT Basket X: X name 'tag'")[0][0]
+        self.execute("SET X in_basket Y WHERE X is Personne, Y eid %(y)s",
+                       {'y': beid})
+        rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s",
+                       {'x': beid})
+        self.assertEquals(rset.rows, [[peid]])
+        rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s",
+                       {'x': beid})
+        self.assertEquals(rset.rows, [[peid]])
+
+    def test_nonregr_has_text_cache(self):
+        eid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        eid2 = self.execute("INSERT Personne X: X nom 'tag'")[0][0]
+        self.commit()
+        rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'bidule'})
+        self.assertEquals(rset.rows, [[eid1]])
+        rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'tag'})
+        self.assertEquals(rset.rows, [[eid2]])
+
+    def test_nonregr_sortterm_management(self):
+        """Error: Variable has no attribute 'sql' in rql2sql.py (visit_variable)
+
+        cause: old variable ref inserted into a fresh rqlst copy
+        (in RQLSpliter._complex_select_plan)
+        """
+        self.execute('Any X ORDERBY D DESC WHERE X creation_date D')
+    
+    def test_nonregr_extra_joins(self):
+        ueid = self.session.user.eid
+        teid1 = self.execute("INSERT Folder X: X name 'folder1'")[0][0]
+        teid2 = self.execute("INSERT Folder X: X name 'folder2'")[0][0]
+        neid1 = self.execute("INSERT Note X: X para 'note1'")[0][0]
+        neid2 = self.execute("INSERT Note X: X para 'note2'")[0][0]
+        self.execute("SET X filed_under Y WHERE X eid %s, Y eid %s"
+                       % (neid1, teid1))
+        self.execute("SET X filed_under Y WHERE X eid %s, Y eid %s"
+                       % (neid2, teid2))
+        self.execute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid)
+        rset = self.execute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s'
+                             % (ueid, teid1))
+        self.assertEquals(len(rset), 1)
+
+    def test_nonregr_XXX(self):
+        teid = self.execute('Transition S WHERE S name "deactivate"')[0][0]
+        rset = self.execute('Any O WHERE O is State, '
+                             'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid})
+        self.assertEquals(len(rset), 2)
+        rset = self.execute('Any O WHERE O is State, NOT S destination_state O, '
+                             'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid})
+        self.assertEquals(len(rset), 1)
+
+
+    def test_nonregr_set_datetime(self):
+        # huum, psycopg specific
+        self.execute('SET X creation_date %(date)s WHERE X eid 1', {'date': today()})
+
+    def test_nonregr_set_query(self):
+        ueid = self.execute("INSERT EUser X: X login 'bob', X upassword 'toto'")[0][0]
+        self.execute("SET E in_group G, E in_state S, "
+                      "E firstname %(firstname)s, E surname %(surname)s "
+                      "WHERE E eid %(x)s, G name 'users', S name 'activated'",
+                      {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}, 'x')
+        
+    def test_nonregr_u_owned_by_u(self):
+        ueid = self.execute("INSERT EUser X: X login 'bob', X upassword 'toto', X in_group G "
+                             "WHERE G name 'users'")[0][0]
+        rset = self.execute("EUser U")
+        self.assertEquals(len(rset), 3) # bob + admin + anon
+        rset = self.execute("Any U WHERE NOT U owned_by U")
+        self.assertEquals(len(rset), 0) # even admin created at repo initialization time should belong to itself
+
+    def test_nonreg_update_index(self):
+        # this is the kind of queries generated by "cubicweb-ctl db-check -ry"
+        self.execute("SET X description D WHERE X is State, X description D")
+
+    def test_nonregr_is(self):
+        uteid = self.execute('Any ET WHERE ET name "EUser"')[0][0]
+        self.execute('Any X, ET WHERE X is ET, ET eid %s' % uteid)
+
+    def test_nonregr_orderby(self):
+        seid = self.execute('Any X WHERE X name "activated"')[0][0]
+        self.execute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is EUser, T tags X, S eid IN(%s), X in_state S' % seid)
+
+    def test_nonregr_solution_cache(self):
+        self.skip('XXX should be fixed or documented') # (doesn't occur if cache key is provided.)
+        rset = self.execute('Any X WHERE X is EUser, X eid %(x)s', {'x':self.ueid})
+        self.assertEquals(len(rset), 1)
+        rset = self.execute('Any X WHERE X is EUser, X eid %(x)s', {'x':12345})
+        self.assertEquals(len(rset), 0)
+
+    def test_nonregr_final_norestr(self):
+        self.assertRaises(BadRQLQuery, self.execute, 'Date X')
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_repository.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,478 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for module cubicweb.server.repository"""
+
+import os
+import sys
+import threading
+import time
+from copy import deepcopy
+
+from mx.DateTime import DateTimeType, now
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools.apptest import RepositoryBasedTC
+from cubicweb.devtools.repotest import tuplify
+
+from yams.constraints import UniqueConstraint
+
+from cubicweb import BadConnectionId, RepositoryError, ValidationError, UnknownEid, AuthenticationError
+from cubicweb.schema import CubicWebSchema, RQLConstraint
+from cubicweb.dbapi import connect, repo_connect
+
+from cubicweb.server import repository 
+
+
+# start name server anyway, process will fail if already running
+os.system('pyro-ns >/dev/null 2>/dev/null &')
+
+
+class RepositoryTC(RepositoryBasedTC):
+    """ singleton providing access to a persistent storage for entities
+    and relation
+    """
+    
+#     def setUp(self):
+#         pass
+    
+#     def tearDown(self):
+#         self.repo.config.db_perms = True
+#         cnxid = self.repo.connect(*self.default_user_password())
+#         for etype in ('Affaire', 'Note', 'Societe', 'Personne'):
+#             self.repo.execute(cnxid, 'DELETE %s X' % etype)
+#             self.repo.commit(cnxid)
+#         self.repo.close(cnxid)
+
+    def test_fill_schema(self):
+        self.repo.schema = CubicWebSchema(self.repo.config.appid)
+        self.repo.config._cubes = None # avoid assertion error
+        self.repo.fill_schema()
+        pool = self.repo._get_pool()
+        try:
+            sqlcursor = pool['system']
+            sqlcursor.execute('SELECT name FROM EEType WHERE final is NULL')
+            self.assertEquals(sqlcursor.fetchall(), [])
+            sqlcursor.execute('SELECT name FROM EEType WHERE final=%(final)s ORDER BY name', {'final': 'TRUE'})
+            self.assertEquals(sqlcursor.fetchall(), [(u'Boolean',), (u'Bytes',),
+                                                     (u'Date',), (u'Datetime',),
+                                                     (u'Decimal',),(u'Float',),
+                                                     (u'Int',),
+                                                     (u'Interval',), (u'Password',),
+                                                     (u'String',), (u'Time',)])
+        finally:
+            self.repo._free_pool(pool)
+            
+    def test_schema_has_owner(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        self.failIf(repo.execute(cnxid, 'EEType X WHERE NOT X owned_by U'))
+        self.failIf(repo.execute(cnxid, 'ERType X WHERE NOT X owned_by U'))
+        self.failIf(repo.execute(cnxid, 'EFRDef X WHERE NOT X owned_by U'))
+        self.failIf(repo.execute(cnxid, 'ENFRDef X WHERE NOT X owned_by U'))
+        self.failIf(repo.execute(cnxid, 'EConstraint X WHERE NOT X owned_by U'))
+        self.failIf(repo.execute(cnxid, 'EConstraintType X WHERE NOT X owned_by U'))
+        
+    def test_connect(self):
+        login, passwd = self.default_user_password()
+        self.assert_(self.repo.connect(login, passwd))
+        self.assertRaises(AuthenticationError,
+                          self.repo.connect, login, 'nimportnawak')
+        self.assertRaises(AuthenticationError,
+                          self.repo.connect, login, None)
+        self.assertRaises(AuthenticationError,
+                          self.repo.connect, None, None)
+    
+    def test_execute(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        repo.execute(cnxid, 'Any X')
+        repo.execute(cnxid, 'Any X where X is Personne')
+        repo.execute(cnxid, 'Any X where X is Personne, X nom ~= "to"')
+        repo.execute(cnxid, 'Any X WHERE X has_text %(text)s', {'text': u'\xe7a'})
+        repo.close(cnxid)
+        
+    def test_login_upassword_accent(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        repo.execute(cnxid, 'INSERT EUser X: X login %(login)s, X upassword %(passwd)s, X in_state S, X in_group G WHERE S name "activated", G name "users"',
+                     {'login': u"barnabé", 'passwd': u"héhéhé".encode('UTF8')})
+        repo.commit(cnxid)
+        repo.close(cnxid)
+        self.assert_(repo.connect(u"barnabé", u"héhéhé".encode('UTF8')))
+    
+    def test_invalid_entity_rollback(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        repo.execute(cnxid, 'INSERT EUser X: X login %(login)s, X upassword %(passwd)s, X in_state S WHERE S name "activated"',
+                     {'login': u"tutetute", 'passwd': 'tutetute'})
+        self.assertRaises(ValidationError, repo.commit, cnxid)
+        rset = repo.execute(cnxid, 'EUser X WHERE X login "tutetute"')
+        self.assertEquals(rset.rowcount, 0)
+        
+    def test_close(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        self.assert_(cnxid)
+        repo.close(cnxid)
+        self.assertRaises(BadConnectionId, repo.execute, cnxid, 'Any X')
+    
+    def test_invalid_cnxid(self):
+        self.assertRaises(BadConnectionId, self.repo.execute, 0, 'Any X')
+        self.assertRaises(BadConnectionId, self.repo.close, None)
+    
+    def test_shared_data(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        repo.set_shared_data(cnxid, 'data', 4)
+        cnxid2 = repo.connect(*self.default_user_password())
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4)
+        self.assertEquals(repo.get_shared_data(cnxid2, 'data'), None)
+        repo.set_shared_data(cnxid2, 'data', 5)
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4)
+        self.assertEquals(repo.get_shared_data(cnxid2, 'data'), 5)
+        repo.get_shared_data(cnxid2, 'data', pop=True)
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4)
+        self.assertEquals(repo.get_shared_data(cnxid2, 'data'), None)
+        repo.close(cnxid)
+        repo.close(cnxid2)
+        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data')
+        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid2, 'data')
+        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid, 'data', 1)
+        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid2, 'data', 1)
+
+    def test_check_session(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        self.assertEquals(repo.check_session(cnxid), None)
+        repo.close(cnxid)
+        self.assertRaises(BadConnectionId, repo.check_session, cnxid)
+
+    def test_transaction_base(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        # check db state
+        result = repo.execute(cnxid, 'Personne X')
+        self.assertEquals(result.rowcount, 0)
+        # rollback entity insertion
+        repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'")
+        result = repo.execute(cnxid, 'Personne X')
+        self.assertEquals(result.rowcount, 1)
+        repo.rollback(cnxid)
+        result = repo.execute(cnxid, 'Personne X')
+        self.assertEquals(result.rowcount, 0, result.rows)
+        # commit
+        repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'")
+        repo.commit(cnxid)
+        result = repo.execute(cnxid, 'Personne X')
+        self.assertEquals(result.rowcount, 1)
+
+    def test_transaction_base2(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        # rollback relation insertion
+        repo.execute(cnxid, "SET U in_group G WHERE U login 'admin', G name 'guests'")
+        result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'")
+        self.assertEquals(result.rowcount, 1)
+        repo.rollback(cnxid)
+        result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'")
+        self.assertEquals(result.rowcount, 0, result.rows)
+        
+    def test_transaction_base3(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        # rollback state change which trigger TrInfo insertion
+        ueid = repo._get_session(cnxid).user.eid
+        rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 1)
+        repo.execute(cnxid, 'SET X in_state S WHERE X eid %(x)s, S name "deactivated"',
+                     {'x': ueid}, 'x')
+        rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 2)
+        repo.rollback(cnxid)
+        rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': ueid})
+        self.assertEquals(len(rset), 1)
+        
+    def test_transaction_interleaved(self):
+        self.skip('implement me')
+
+    def test_initial_schema(self):
+        schema = self.repo.schema
+        # check order of attributes is respected
+        self.assertListEquals([r.type for r in schema.eschema('EFRDef').ordered_relations()
+                               if not r.type in ('eid', 'is', 'is_instance_of', 'identity', 
+                                                 'creation_date', 'modification_date',
+                                                 'owned_by', 'created_by')],
+                              ['relation_type', 'from_entity', 'to_entity', 'constrained_by',
+                               'cardinality', 'ordernum', 
+                               'indexed', 'fulltextindexed', 'internationalizable',
+                               'defaultval', 'description_format', 'description'])
+
+        self.assertEquals(schema.eschema('EEType').main_attribute(), 'name')
+        self.assertEquals(schema.eschema('State').main_attribute(), 'name')
+
+        constraints = schema.rschema('name').rproperty('EEType', 'String', 'constraints')
+        self.assertEquals(len(constraints), 2)
+        for cstr in constraints[:]:
+            if isinstance(cstr, UniqueConstraint):
+                constraints.remove(cstr)
+                break
+        else:
+            self.fail('unique constraint not found')
+        sizeconstraint = constraints[0]
+        self.assertEquals(sizeconstraint.min, None)
+        self.assertEquals(sizeconstraint.max, 64)
+
+        constraints = schema.rschema('relation_type').rproperty('EFRDef', 'ERType', 'constraints')
+        self.assertEquals(len(constraints), 1)
+        cstr = constraints[0]
+        self.assert_(isinstance(cstr, RQLConstraint))
+        self.assertEquals(cstr.restriction, 'O final TRUE')
+
+        ownedby = schema.rschema('owned_by')
+        self.assertEquals(ownedby.objects('EEType'), ('EUser',))
+
+    def test_pyro(self):
+        import Pyro
+        Pyro.config.PYRO_MULTITHREADED = 0
+        lock = threading.Lock()
+        # the client part has to be in the thread due to sqlite limitations
+        t = threading.Thread(target=self._pyro_client, args=(lock,))
+        try:
+            daemon = self.repo.pyro_register()
+            t.start()
+            # connection
+            daemon.handleRequests(1.0)
+            daemon.handleRequests(1.0)
+            daemon.handleRequests(1.0)
+            # get schema
+            daemon.handleRequests(1.0)
+            # execute
+            daemon.handleRequests(1.0)
+            t.join()
+        finally:
+            repository.pyro_unregister(self.repo.config)
+            
+    def _pyro_client(self, lock):
+        cnx = connect(self.repo.config.appid, u'admin', 'gingkow')
+        # check we can get the schema
+        schema = cnx.get_schema()
+        self.assertEquals(schema.__hashmode__, None)
+        rset = cnx.cursor().execute('Any U,G WHERE U in_group G')
+        
+
+    def test_internal_api(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        session = repo._get_session(cnxid, setpool=True)
+        self.assertEquals(repo.type_and_source_from_eid(1, session), ('EGroup', 'system', None))
+        self.assertEquals(repo.type_from_eid(1, session), 'EGroup')
+        self.assertEquals(repo.source_from_eid(1, session).uri, 'system')
+        self.assertEquals(repo.eid2extid(repo.system_source, 1, session), None)
+        class dummysource: uri = 'toto'
+        self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 1, session)
+
+    def test_public_api(self):
+        self.assertEquals(self.repo.get_schema(), self.repo.schema)
+        self.assertEquals(self.repo.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+        # .properties() return a result set
+        self.assertEquals(self.repo.properties().rql, 'Any K,V WHERE P is EProperty,P pkey K, P value V, NOT P for_user U')
+
+    def test_session_api(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        self.assertEquals(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {}))
+        self.assertEquals(repo.describe(cnxid, 1), (u'EGroup', u'system', None))
+        repo.close(cnxid)
+        self.assertRaises(BadConnectionId, repo.user_info, cnxid)
+        self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
+
+    def test_shared_data_api(self):
+        repo = self.repo
+        cnxid = repo.connect(*self.default_user_password())
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), None)
+        repo.set_shared_data(cnxid, 'data', 4)
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4)
+        repo.get_shared_data(cnxid, 'data', pop=True)
+        repo.get_shared_data(cnxid, 'whatever', pop=True)
+        self.assertEquals(repo.get_shared_data(cnxid, 'data'), None)
+        repo.close(cnxid)
+        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid, 'data', 0)
+        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data')
+        
+
+class DataHelpersTC(RepositoryBasedTC):
+    
+    def setUp(self):
+        """ called before each test from this class """
+        cnxid = self.repo.connect(*self.default_user_password())
+        self.session = self.repo._sessions[cnxid]
+        self.session.set_pool()
+
+    def tearDown(self):
+        self.session.rollback()
+        
+    def test_create_eid(self):
+        self.assert_(self.repo.system_source.create_eid(self.session))
+
+    def test_source_from_eid(self):
+        self.assertEquals(self.repo.source_from_eid(1, self.session),
+                          self.repo.sources_by_uri['system'])
+
+    def test_source_from_eid_raise(self):
+        self.assertRaises(UnknownEid, self.repo.source_from_eid, -2, self.session)
+
+    def test_type_from_eid(self):
+        self.assertEquals(self.repo.type_from_eid(1, self.session), 'EGroup')
+        
+    def test_type_from_eid_raise(self):
+        self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session)
+        
+    def test_add_delete_info(self):
+        entity = self.repo.vreg.etype_class('Personne')(self.session, None, None)
+        entity.eid = -1
+        entity.complete = lambda x: None
+        self.repo.add_info(self.session, entity, self.repo.sources_by_uri['system'])
+        cursor = self.session.pool['system']
+        cursor.execute('SELECT * FROM entities WHERE eid = -1')
+        data = cursor.fetchall()
+        self.assertIsInstance(data[0][3], DateTimeType)
+        data[0] = list(data[0])
+        data[0][3] = None
+        self.assertEquals(tuplify(data), [(-1, 'Personne', 'system', None, None)])
+        self.repo.delete_info(self.session, -1)
+        #self.repo.commit()
+        cursor.execute('SELECT * FROM entities WHERE eid = -1')
+        data = cursor.fetchall()
+        self.assertEquals(data, [])
+
+
+class FTITC(RepositoryBasedTC):
+    
+    def test_reindex_and_modified_since(self):
+        cursor = self.session.pool['system']
+        eidp = self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')[0][0]
+        self.commit()
+        ts = now()
+        self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
+        cursor.execute('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp)
+        omtime = cursor.fetchone()[0]
+        # our sqlite datetime adapter is ignore seconds fraction, so we have to
+        # ensure update is done the next seconds
+        time.sleep(1 - (ts.second - int(ts.second)))
+        self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x')
+        self.commit()
+        self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
+        cursor.execute('SELECT mtime FROM entities WHERE eid = %s' % eidp)
+        mtime = cursor.fetchone()[0]
+        self.failUnless(omtime < mtime)
+        self.commit()
+        date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime)
+        self.assertEquals(modified, [('Personne', eidp)])
+        self.assertEquals(deleted, [])
+        date, modified, deleted = self.repo.entities_modified_since(('Personne',), mtime)
+        self.assertEquals(modified, [])
+        self.assertEquals(deleted, [])
+        self.execute('DELETE Personne X WHERE X eid %(x)s', {'x': eidp})
+        self.commit()
+        date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime)
+        self.assertEquals(modified, [])
+        self.assertEquals(deleted, [('Personne', eidp)])
+
+    def test_composite_entity(self):
+        assert self.schema.rschema('use_email').fulltext_container == 'subject'
+        eid = self.add_entity('EmailAddress', address=u'toto@logilab.fr').eid
+        self.commit()
+        rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+        self.assertEquals(rset.rows, [[eid]])
+        self.execute('SET X use_email Y WHERE X login "admin", Y eid %(y)s', {'y': eid})
+        self.commit()
+        rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+        self.assertEquals(rset.rows, [[self.session.user.eid]])
+        self.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', {'y': eid})
+        self.commit()
+        rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+        self.assertEquals(rset.rows, [])
+        eid = self.add_entity('EmailAddress', address=u'tutu@logilab.fr').eid
+        self.execute('SET X use_email Y WHERE X login "admin", Y eid %(y)s', {'y': eid})
+        self.commit()
+        rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'})
+        self.assertEquals(rset.rows, [[self.session.user.eid]])
+        
+        
+class DBInitTC(RepositoryBasedTC):
+    
+    def test_versions_inserted(self):
+        inserted = [r[0] for r in self.execute('Any K ORDERBY K WHERE P pkey K, P pkey ~= "system.version.%"')]
+        self.assertEquals(inserted,
+                          [u'system.version.ebasket', u'system.version.eclassfolders',
+                           u'system.version.eclasstags', u'system.version.ecomment',
+                           u'system.version.eemail', u'system.version.efile',
+                           u'system.version.cubicweb'])
+
+        
+class InlineRelHooksTC(RepositoryBasedTC):
+    """test relation hooks are called for inlined relations
+    """
+    def setUp(self):
+        RepositoryBasedTC.setUp(self)
+        self.hm = self.repo.hm
+        self.called = []
+    
+    def _before_relation_hook(self, pool, fromeid, rtype, toeid):
+        self.called.append((fromeid, rtype, toeid))
+
+    def _after_relation_hook(self, pool, fromeid, rtype, toeid):
+        self.called.append((fromeid, rtype, toeid))
+        
+    def test_before_add_inline_relation(self):
+        """make sure before_<event>_relation hooks are called directly"""
+        self.hm.register_hook(self._before_relation_hook,
+                             'before_add_relation', 'ecrit_par')
+        eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
+        eidn = self.execute('INSERT Note X: X type "T"')[0][0]
+        self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp)])
+        
+    def test_after_add_inline_relation(self):
+        """make sure after_<event>_relation hooks are deferred"""
+        self.hm.register_hook(self._after_relation_hook,
+                             'after_add_relation', 'ecrit_par')
+        eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
+        eidn = self.execute('INSERT Note X: X type "T"')[0][0]
+        self.assertEquals(self.called, [])
+        self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp,)])
+        
+    def test_after_add_inline(self):
+        """make sure after_<event>_relation hooks are deferred"""
+        self.hm.register_hook(self._after_relation_hook,
+                             'after_add_relation', 'in_state')
+        eidp = self.execute('INSERT EUser X: X login "toto", X upassword "tutu", X in_state S WHERE S name "activated"')[0][0]
+        eids = self.execute('State X WHERE X name "activated"')[0][0]
+        self.assertEquals(self.called, [(eidp, 'in_state', eids,)])
+    
+    def test_before_delete_inline_relation(self):
+        """make sure before_<event>_relation hooks are called directly"""
+        self.hm.register_hook(self._before_relation_hook,
+                             'before_delete_relation', 'ecrit_par')
+        eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
+        eidn = self.execute('INSERT Note X: X type "T"')[0][0]
+        self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp)])
+        rset = self.execute('Any Y where N ecrit_par Y, N type "T", Y nom "toto"')
+        # make sure the relation is really deleted
+        self.failUnless(len(rset) == 0, "failed to delete inline relation")
+
+    def test_after_delete_inline_relation(self):
+        """make sure after_<event>_relation hooks are deferred"""
+        self.hm.register_hook(self._after_relation_hook,
+                             'after_delete_relation', 'ecrit_par')
+        eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
+        eidn = self.execute('INSERT Note X: X type "T"')[0][0]
+        self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.assertEquals(self.called, [])
+        self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"')
+        self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp,)])
+
+    
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_rql2sql.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1559 @@
+"""unit tests for module cubicweb.server.sources.rql2sql"""
+
+import sys
+from mx.DateTime import today
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from rql import BadRQLQuery
+from indexer import get_indexer
+
+#from cubicweb.server.sources.native import remove_unused_solutions
+from cubicweb.server.sources.rql2sql import SQLGenerator
+
+from rql.utils import register_function, FunctionDescr
+# add a dumb registered procedure
+class stockproc(FunctionDescr):
+    supported_backends = ('postgres', 'sqlite', 'mysql')
+try:
+    register_function(stockproc)
+except AssertionError, ex:
+    pass # already registered
+
+from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools.repotest import RQLGeneratorTC
+
+config = TestServerConfiguration('data')
+config.bootstrap_cubes()
+schema = config.load_schema()
+schema['in_state'].inlined = True
+schema['comments'].inlined = False
+
+PARSER = [
+    (r"Personne P WHERE P nom 'Zig\'oto';",
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE P.nom=Zig\'oto'''),
+
+    (r'Personne P WHERE P nom ~= "Zig\"oto%";',
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE P.nom ILIKE Zig"oto%'''),
+    ]
+
+BASIC = [
+    
+    ("Any X WHERE X is Affaire",
+     '''SELECT X.eid
+FROM Affaire AS X'''),
+    
+    ("Any X WHERE X eid 0",
+     '''SELECT 0'''),
+    
+    ("Personne P",
+     '''SELECT P.eid
+FROM Personne AS P'''),
+
+    ("Personne P WHERE P test TRUE",
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE P.test=True'''),
+
+    ("Personne P WHERE P test false",
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE P.test=False'''),
+
+    ("Personne P WHERE P eid -1",
+     '''SELECT -1'''),
+
+    ("Personne P LIMIT 20 OFFSET 10",
+     '''SELECT P.eid
+FROM Personne AS P
+LIMIT 20
+OFFSET 10'''),
+
+    ("Personne P WHERE S is Societe, P travaille S, S nom 'Logilab';",
+     '''SELECT rel_travaille0.eid_from
+FROM Societe AS S, travaille_relation AS rel_travaille0
+WHERE rel_travaille0.eid_to=S.eid AND S.nom=Logilab'''),
+
+    ("Personne P WHERE P concerne A, A concerne S, S nom 'Logilab', S is Societe;",
+     '''SELECT rel_concerne0.eid_from
+FROM Societe AS S, concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1
+WHERE rel_concerne0.eid_to=rel_concerne1.eid_from AND rel_concerne1.eid_to=S.eid AND S.nom=Logilab'''),
+
+    ("Note N WHERE X evaluee N, X nom 'Logilab';",
+     '''SELECT rel_evaluee0.eid_to
+FROM Division AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom=Logilab
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM Personne AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom=Logilab
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM Societe AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom=Logilab
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM SubDivision AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom=Logilab'''),
+
+    ("Note N WHERE X evaluee N, X nom in ('Logilab', 'Caesium');",
+     '''SELECT rel_evaluee0.eid_to
+FROM Division AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom IN(Logilab, Caesium)
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM Personne AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom IN(Logilab, Caesium)
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM Societe AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom IN(Logilab, Caesium)
+UNION ALL
+SELECT rel_evaluee0.eid_to
+FROM SubDivision AS X, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=X.eid AND X.nom IN(Logilab, Caesium)'''),
+
+    ("Any X WHERE X creation_date TODAY, X is Affaire",
+     '''SELECT X.eid
+FROM Affaire AS X
+WHERE DATE(X.creation_date)=CURRENT_DATE'''),
+
+    ("Any N WHERE G is EGroup, G name N, E eid 12, E read_permission G",
+     '''SELECT G.name
+FROM EGroup AS G, read_permission_relation AS rel_read_permission0
+WHERE rel_read_permission0.eid_from=12 AND rel_read_permission0.eid_to=G.eid'''),
+
+    ('Any Y WHERE U login "admin", U login Y', # stupid but valid...
+     """SELECT U.login
+FROM EUser AS U
+WHERE U.login=admin"""),
+
+    ('Any T WHERE T tags X, X is State',
+     '''SELECT rel_tags0.eid_from
+FROM State AS X, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_to=X.eid'''),
+
+    ('Any X,Y WHERE X eid 0, Y eid 1, X concerne Y',
+     '''SELECT 0, 1
+FROM concerne_relation AS rel_concerne0
+WHERE rel_concerne0.eid_from=0 AND rel_concerne0.eid_to=1'''),
+
+    ("Any X WHERE X prenom 'lulu',"
+     "EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');",
+     '''SELECT X.eid
+FROM Personne AS X
+WHERE X.prenom=lulu AND EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, EGroup AS G WHERE rel_owned_by0.eid_from=X.eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=G.eid AND ((G.name=lulufanclub) OR (G.name=managers)))'''),
+
+    ("Any X WHERE X prenom 'lulu',"
+     "NOT EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');",
+     '''SELECT X.eid
+FROM Personne AS X
+WHERE X.prenom=lulu AND NOT EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, EGroup AS G WHERE rel_owned_by0.eid_from=X.eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=G.eid AND ((G.name=lulufanclub) OR (G.name=managers)))'''),
+]
+
+ADVANCED= [
+    ('Any X WHERE X is ET, ET eid 2',
+     '''SELECT rel_is0.eid_from
+FROM is_relation AS rel_is0
+WHERE rel_is0.eid_to=2'''),
+
+
+    ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'",
+     '''SELECT S.eid
+FROM Societe AS S
+WHERE ((S.nom=Logilab) OR (S.nom=Caesium))'''),
+    
+    ('Any X WHERE X nom "toto", X eid IN (9700, 9710, 1045, 674)',
+    '''SELECT X.eid
+FROM Division AS X
+WHERE X.nom=toto AND X.eid IN(9700, 9710, 1045, 674)
+UNION ALL
+SELECT X.eid
+FROM Personne AS X
+WHERE X.nom=toto AND X.eid IN(9700, 9710, 1045, 674)
+UNION ALL
+SELECT X.eid
+FROM Societe AS X
+WHERE X.nom=toto AND X.eid IN(9700, 9710, 1045, 674)
+UNION ALL
+SELECT X.eid
+FROM SubDivision AS X
+WHERE X.nom=toto AND X.eid IN(9700, 9710, 1045, 674)'''),
+
+    ('Any Y, COUNT(N) GROUPBY Y WHERE Y evaluee N;',
+     '''SELECT rel_evaluee0.eid_from, COUNT(rel_evaluee0.eid_to)
+FROM evaluee_relation AS rel_evaluee0
+GROUP BY rel_evaluee0.eid_from'''),
+
+    ("Any X WHERE X concerne B or C concerne X",
+     '''SELECT X.eid
+FROM Affaire AS X, concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1
+WHERE ((rel_concerne0.eid_from=X.eid) OR (rel_concerne1.eid_to=X.eid))'''),
+
+    ("Any X WHERE X travaille S or X concerne A",
+     '''SELECT X.eid
+FROM Personne AS X, concerne_relation AS rel_concerne1, travaille_relation AS rel_travaille0
+WHERE ((rel_travaille0.eid_from=X.eid) OR (rel_concerne1.eid_from=X.eid))'''),
+
+    ("Any N WHERE A evaluee N or N ecrit_par P",
+     '''SELECT N.eid
+FROM Note AS N, evaluee_relation AS rel_evaluee0
+WHERE ((rel_evaluee0.eid_to=N.eid) OR (N.ecrit_par IS NOT NULL))'''),
+
+    ("Any N WHERE A evaluee N or EXISTS(N todo_by U)",
+     '''SELECT N.eid
+FROM Note AS N, evaluee_relation AS rel_evaluee0
+WHERE ((rel_evaluee0.eid_to=N.eid) OR (EXISTS(SELECT 1 FROM todo_by_relation AS rel_todo_by1 WHERE rel_todo_by1.eid_from=N.eid)))'''),
+
+    ("Any N WHERE A evaluee N or N todo_by U",
+     '''SELECT N.eid
+FROM Note AS N, evaluee_relation AS rel_evaluee0, todo_by_relation AS rel_todo_by1
+WHERE ((rel_evaluee0.eid_to=N.eid) OR (rel_todo_by1.eid_from=N.eid))'''),
+    
+    ("Any X WHERE X concerne B or C concerne X, B eid 12, C eid 13",
+     '''SELECT X.eid
+FROM Affaire AS X, concerne_relation AS rel_concerne0, concerne_relation AS rel_concerne1
+WHERE ((rel_concerne0.eid_from=X.eid AND rel_concerne0.eid_to=12) OR (rel_concerne1.eid_from=13 AND rel_concerne1.eid_to=X.eid))'''),
+
+    ('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13',
+     '''SELECT rel_created_by0.eid_from
+FROM concerne_relation AS rel_concerne1, concerne_relation AS rel_concerne2, created_by_relation AS rel_created_by0
+WHERE ((rel_concerne1.eid_from=rel_created_by0.eid_from AND rel_concerne1.eid_to=12) OR (rel_concerne2.eid_from=13 AND rel_concerne2.eid_to=rel_created_by0.eid_from))'''),
+
+    ('Any P WHERE P travaille_subdivision S1 OR P travaille_subdivision S2, S1 nom "logilab", S2 nom "caesium"',
+     '''SELECT P.eid
+FROM Personne AS P, SubDivision AS S1, SubDivision AS S2, travaille_subdivision_relation AS rel_travaille_subdivision0, travaille_subdivision_relation AS rel_travaille_subdivision1
+WHERE ((rel_travaille_subdivision0.eid_from=P.eid AND rel_travaille_subdivision0.eid_to=S1.eid) OR (rel_travaille_subdivision1.eid_from=P.eid AND rel_travaille_subdivision1.eid_to=S2.eid)) AND S1.nom=logilab AND S2.nom=caesium'''),
+
+    ('Any X WHERE T tags X',
+     '''SELECT rel_tags0.eid_to
+FROM tags_relation AS rel_tags0'''),
+    
+    ('Any X WHERE X in_basket B, B eid 12',
+     '''SELECT rel_in_basket0.eid_from
+FROM in_basket_relation AS rel_in_basket0
+WHERE rel_in_basket0.eid_to=12'''),
+    
+    ('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN',
+     '''SELECT SE.name, R.name, OE.name
+FROM EEType AS OE, EEType AS SE, EFRDef AS X, ERType AS R
+WHERE X.from_entity=44 AND SE.eid=44 AND X.relation_type=139 AND R.eid=139 AND X.to_entity=42 AND OE.eid=42
+UNION ALL
+SELECT SE.name, R.name, OE.name
+FROM EEType AS OE, EEType AS SE, ENFRDef AS X, ERType AS R
+WHERE X.from_entity=44 AND SE.eid=44 AND X.relation_type=139 AND R.eid=139 AND X.to_entity=42 AND OE.eid=42'''),
+
+    # Any O WHERE NOT S corrected_in O, S eid %(x)s, S concerns P, O version_of P, O in_state ST, NOT ST name "published", O modification_date MTIME ORDERBY MTIME DESC LIMIT 9
+    ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P',
+     '''SELECT DISTINCT O.eid
+FROM Note AS S, Personne AS O
+WHERE (S.ecrit_par IS NULL OR S.ecrit_par!=O.eid) AND S.eid=1 AND O.inline2=S.inline1'''),
+
+    ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT S.eid AS C0, STOCKPROC(S.para) AS C1
+FROM Note AS S
+WHERE S.ecrit_par IS NULL
+ORDER BY 2) AS T1'''),
+
+    ('Any N WHERE N todo_by U, N is Note, U eid 2, N filed_under T, T eid 3',
+     # N would actually be invarient if U eid 2 had given a specific type to U
+     '''SELECT N.eid
+FROM Note AS N, filed_under_relation AS rel_filed_under1, todo_by_relation AS rel_todo_by0
+WHERE rel_todo_by0.eid_from=N.eid AND rel_todo_by0.eid_to=2 AND rel_filed_under1.eid_from=N.eid AND rel_filed_under1.eid_to=3'''),
+
+    ('Any N WHERE N todo_by U, U eid 2, P evaluee N, P eid 3',
+     '''SELECT rel_evaluee1.eid_to
+FROM evaluee_relation AS rel_evaluee1, todo_by_relation AS rel_todo_by0
+WHERE rel_evaluee1.eid_to=rel_todo_by0.eid_from AND rel_todo_by0.eid_to=2 AND rel_evaluee1.eid_from=3'''),
+
+    
+    (' Any X,U WHERE C owned_by U, NOT X owned_by U, C eid 1, X eid 2',
+     '''SELECT 2, rel_owned_by0.eid_to
+FROM owned_by_relation AS rel_owned_by0
+WHERE rel_owned_by0.eid_from=1 AND NOT EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=2 AND rel_owned_by0.eid_to=rel_owned_by1.eid_to)'''),
+
+    ('Any GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))',
+     '''SELECT G.name
+FROM EGroup AS G, in_group_relation AS rel_in_group0
+WHERE rel_in_group0.eid_to=G.eid AND ((G.name=managers) OR (EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, EUser AS T WHERE rel_copain1.eid_from=rel_in_group0.eid_from AND rel_copain1.eid_to=T.eid AND T.login IN(comme, cochon))))'''),
+
+    ('Any C WHERE C is Card, EXISTS(X documented_by C)',
+      """SELECT C.eid
+FROM Card AS C
+WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_to=C.eid)"""),
+    
+    ('Any C WHERE C is Card, EXISTS(X documented_by C, X eid 12)',
+      """SELECT C.eid
+FROM Card AS C
+WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=C.eid)"""),
+
+    ('Any T WHERE C is Card, C title T, EXISTS(X documented_by C, X eid 12)',
+      """SELECT C.title
+FROM Card AS C
+WHERE EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by0 WHERE rel_documented_by0.eid_from=12 AND rel_documented_by0.eid_to=C.eid)"""),
+
+    ('Any GN,L WHERE X in_group G, X login L, G name GN, EXISTS(X copain T, T login L, T login IN("comme", "cochon"))',
+     '''SELECT G.name, X.login
+FROM EGroup AS G, EUser AS X, in_group_relation AS rel_in_group0
+WHERE rel_in_group0.eid_from=X.eid AND rel_in_group0.eid_to=G.eid AND EXISTS(SELECT 1 FROM copain_relation AS rel_copain1, EUser AS T WHERE rel_copain1.eid_from=X.eid AND rel_copain1.eid_to=T.eid AND T.login=X.login AND T.login IN(comme, cochon))'''),
+
+    ('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is EUser, T tags X, S eid IN(32), X in_state S',
+     '''SELECT X.eid, 32, MAX(rel_tags0.eid_from)
+FROM EUser AS X, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_to=X.eid AND X.in_state=32
+GROUP BY X.eid'''),
+
+    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
+     '''SELECT COUNT(rel_concerne0.eid_from), C.nom
+FROM Societe AS C, concerne_relation AS rel_concerne0
+WHERE rel_concerne0.eid_to=C.eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by2, Card AS N WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=N.eid AND N.title=published)))
+GROUP BY C.nom
+ORDER BY 1 DESC
+LIMIT 10'''),
+
+    ('Any X WHERE Y evaluee X, Y is EUser',
+     '''SELECT rel_evaluee0.eid_to
+FROM EUser AS Y, evaluee_relation AS rel_evaluee0
+WHERE rel_evaluee0.eid_from=Y.eid'''),
+
+    ('Any L WHERE X login "admin", X identity Y, Y login L',
+     '''SELECT Y.login
+FROM EUser AS X, EUser AS Y
+WHERE X.login=admin AND X.eid=Y.eid'''),
+
+    ('Any L WHERE X login "admin", NOT X identity Y, Y login L',
+     '''SELECT Y.login
+FROM EUser AS X, EUser AS Y
+WHERE X.login=admin AND NOT X.eid=Y.eid'''),
+    
+    ('Any L WHERE X login "admin", X identity Y?, Y login L',
+     '''SELECT Y.login
+FROM EUser AS X LEFT OUTER JOIN EUser AS Y ON (X.eid=Y.eid)
+WHERE X.login=admin'''),
+
+    ('Any XN ORDERBY XN WHERE X name XN',
+     '''SELECT X.name
+FROM Basket AS X
+UNION ALL
+SELECT X.name
+FROM EConstraintType AS X
+UNION ALL
+SELECT X.name
+FROM EEType AS X
+UNION ALL
+SELECT X.name
+FROM EGroup AS X
+UNION ALL
+SELECT X.name
+FROM EPermission AS X
+UNION ALL
+SELECT X.name
+FROM ERType AS X
+UNION ALL
+SELECT X.name
+FROM File AS X
+UNION ALL
+SELECT X.name
+FROM Folder AS X
+UNION ALL
+SELECT X.name
+FROM Image AS X
+UNION ALL
+SELECT X.name
+FROM State AS X
+UNION ALL
+SELECT X.name
+FROM Tag AS X
+UNION ALL
+SELECT X.name
+FROM Transition AS X
+ORDER BY 1'''),
+
+#    ('Any XN WHERE X name XN GROUPBY XN',
+#     ''''''),
+#    ('Any XN, COUNT(X) WHERE X name XN GROUPBY XN',
+#     ''''''),
+
+    # DISTINCT, can use relatin under exists scope as principal
+    ('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)',
+     '''SELECT DISTINCT X.eid, rel_read_permission0.eid_to
+FROM EEType AS X, read_permission_relation AS rel_read_permission0
+WHERE X.name=EGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=X.eid)
+UNION
+SELECT DISTINCT X.eid, rel_read_permission0.eid_to
+FROM ERType AS X, read_permission_relation AS rel_read_permission0
+WHERE X.name=EGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=X.eid)'''),
+
+    # no distinct, Y can't be invariant
+    ('Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)',
+     '''SELECT X.eid, Y.eid
+FROM EEType AS X, EGroup AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM EEType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM EGroup AS Y, ERType AS X
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM ERType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)'''),
+
+    # DISTINCT but NEGED exists, can't be invariant
+    ('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)',
+     '''SELECT DISTINCT X.eid, Y.eid
+FROM EEType AS X, EGroup AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM EEType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM EGroup AS Y, ERType AS X
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM ERType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)'''),
+
+    # should generate the same query as above
+    ('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y',
+     '''SELECT DISTINCT X.eid, Y.eid
+FROM EEType AS X, EGroup AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM EEType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM EGroup AS Y, ERType AS X
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION
+SELECT DISTINCT X.eid, Y.eid
+FROM ERType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)'''),
+    
+    # neged relation, can't be inveriant
+    ('Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y',
+     '''SELECT X.eid, Y.eid
+FROM EEType AS X, EGroup AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM EEType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM EGroup AS Y, ERType AS X
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)
+UNION ALL
+SELECT X.eid, Y.eid
+FROM ERType AS X, RQLExpression AS Y
+WHERE X.name=EGroup AND Y.eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.eid AND rel_read_permission0.eid_to=Y.eid)'''),
+
+    ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N;',
+     '''SELECT (MAX(T1.C0) + MIN(T1.C0)), T1.C1 FROM (SELECT X.eid AS C0, X.name AS C1
+FROM Basket AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM EConstraintType AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM EEType AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM EGroup AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM EPermission AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM ERType AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM File AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM Folder AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM Image AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM State AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM Tag AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM Transition AS X) AS T1
+GROUP BY T1.C1'''),
+    
+    ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X name N, X data D, X data_format DF;',
+     '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT X.data AS C0, X.eid AS C1, X.name AS C2, X.data_format AS C3
+FROM File AS X
+UNION ALL
+SELECT X.data AS C0, X.eid AS C1, X.name AS C2, X.data_format AS C3
+FROM Image AS X) AS T1
+GROUP BY T1.C2
+ORDER BY 1,2,T1.C3'''),
+
+    ('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT A.sujet AS C0, A.ref AS C1
+FROM Affaire AS A
+ORDER BY 2) AS T1'''),
+    
+    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X name N, X data D, X data_format DF;',
+     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT X.data AS C0, X.eid AS C1, X.name AS C2, X.data_format AS C3
+FROM File AS X
+UNION
+SELECT DISTINCT X.data AS C0, X.eid AS C1, X.name AS C2, X.data_format AS C3
+FROM Image AS X) AS T1
+GROUP BY T1.C2,T1.C3
+ORDER BY 2,3) AS T1
+'''),
+
+    # ambiguity in EXISTS() -> should union the sub-query
+    ('Any T WHERE T is Tag, NOT T name in ("t1", "t2"), EXISTS(T tags X, X is IN (EUser, EGroup))',
+     '''SELECT T.eid
+FROM Tag AS T
+WHERE NOT (T.name IN(t1, t2)) AND EXISTS(SELECT 1 FROM tags_relation AS rel_tags0, EGroup AS X WHERE rel_tags0.eid_from=T.eid AND rel_tags0.eid_to=X.eid UNION SELECT 1 FROM tags_relation AS rel_tags1, EUser AS X WHERE rel_tags1.eid_from=T.eid AND rel_tags1.eid_to=X.eid)'''),
+
+    # must not use a relation in EXISTS scope to inline a variable 
+    ('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)',
+     '''SELECT U.eid
+FROM EUser AS U
+WHERE U.eid IN(1, 2) AND EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_to=U.eid)'''),
+
+    ('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)',
+     '''SELECT U.eid
+FROM EUser AS U
+WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE U.eid IN(1, 2) AND rel_owned_by0.eid_to=U.eid)'''),
+
+    ('Any COUNT(U) WHERE EXISTS (P owned_by U, P is IN (Note, Affaire))',
+     '''SELECT COUNT(U.eid)
+FROM EUser AS U
+WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, Affaire AS P WHERE rel_owned_by0.eid_from=P.eid AND rel_owned_by0.eid_to=U.eid UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, Note AS P WHERE rel_owned_by1.eid_from=P.eid AND rel_owned_by1.eid_to=U.eid)'''),
+
+    ('Any MAX(X)',
+     '''SELECT MAX(X.eid)
+FROM entities AS X'''),
+
+    ('Any MAX(X) WHERE X is Note',
+     '''SELECT MAX(X.eid)
+FROM Note AS X'''),
+    
+    ('Any X WHERE X eid > 12',
+     '''SELECT X.eid
+FROM entities AS X
+WHERE X.eid>12'''),
+    
+    ('Any X WHERE X eid > 12, X is Note',
+     """SELECT X.eid
+FROM entities AS X
+WHERE X.type='Note' AND X.eid>12"""),
+    
+    ('Any X, T WHERE X eid > 12, X title T',
+     """SELECT X.eid, X.title
+FROM Bookmark AS X
+WHERE X.eid>12
+UNION ALL
+SELECT X.eid, X.title
+FROM Card AS X
+WHERE X.eid>12
+UNION ALL
+SELECT X.eid, X.title
+FROM EmailThread AS X
+WHERE X.eid>12"""),
+
+    ('Any X',
+     '''SELECT X.eid
+FROM entities AS X'''),
+
+    ('Any X GROUPBY X WHERE X eid 12',
+     '''SELECT 12'''),
+    
+    ('Any X GROUPBY X ORDERBY Y WHERE X eid 12, X login Y',
+     '''SELECT X.eid
+FROM EUser AS X
+WHERE X.eid=12
+GROUP BY X.eid
+ORDER BY X.login'''),
+    
+    ('Any U,COUNT(X) GROUPBY U WHERE U eid 12, X owned_by U HAVING COUNT(X) > 10',
+     '''SELECT rel_owned_by0.eid_to, COUNT(rel_owned_by0.eid_from)
+FROM owned_by_relation AS rel_owned_by0
+WHERE rel_owned_by0.eid_to=12
+GROUP BY rel_owned_by0.eid_to
+HAVING COUNT(rel_owned_by0.eid_from)>10'''),
+
+    ('DISTINCT Any X ORDERBY stockproc(X) WHERE U login X',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT U.login AS C0, STOCKPROC(U.login) AS C1
+FROM EUser AS U
+ORDER BY 2) AS T1'''),
+    
+    ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT X.eid AS C0, X.login AS C1
+FROM EUser AS X, bookmarked_by_relation AS rel_bookmarked_by0
+WHERE rel_bookmarked_by0.eid_to=X.eid
+ORDER BY 2) AS T1'''),
+
+    ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT X.eid AS C0, S.name AS C1
+FROM Affaire AS X, State AS S
+WHERE X.in_state=S.eid
+UNION
+SELECT DISTINCT X.eid AS C0, S.name AS C1
+FROM EUser AS X, State AS S
+WHERE X.in_state=S.eid
+UNION
+SELECT DISTINCT X.eid AS C0, S.name AS C1
+FROM Note AS X, State AS S
+WHERE X.in_state=S.eid
+ORDER BY 2) AS T1'''),
+
+    ]
+
+MULTIPLE_SEL = [
+    ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;",
+     '''SELECT DISTINCT P.nom, P.prenom
+FROM Personne AS P'''),
+    ("Any X,Y where P is Personne, P nom X , P prenom Y, not P nom NULL;",
+     '''SELECT P.nom, P.prenom
+FROM Personne AS P
+WHERE NOT (P.nom IS NULL)'''),
+    ("Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE",
+     '''SELECT X.eid, Y.eid
+FROM Personne AS X, Personne AS Y
+WHERE Y.nom=X.nom AND NOT (Y.eid=X.eid)''')
+    ]
+
+NEGATIONS = [
+    ("Personne X WHERE NOT X evaluee Y;",
+     '''SELECT X.eid
+FROM Personne AS X
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=X.eid)'''),
+    
+    ("Note N WHERE NOT X evaluee N, X eid 0",
+     '''SELECT N.eid
+FROM Note AS N
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=0 AND rel_evaluee0.eid_to=N.eid)'''),
+    
+    ('Any X WHERE NOT X travaille S, X is Personne',
+     '''SELECT X.eid
+FROM Personne AS X
+WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=X.eid)'''),
+    
+    ("Personne P where not P datenaiss TODAY",
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE NOT (DATE(P.datenaiss)=CURRENT_DATE)'''),
+    
+    ("Personne P where NOT P concerne A",
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=P.eid)'''),
+    
+    ("Affaire A where not P concerne A",
+     '''SELECT A.eid
+FROM Affaire AS A
+WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_to=A.eid)'''),
+    ("Personne P where not P concerne A, A sujet ~= 'TEST%'",
+     '''SELECT P.eid
+FROM Affaire AS A, Personne AS P
+WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=P.eid AND rel_concerne0.eid_to=A.eid) AND A.sujet ILIKE TEST%'''),
+
+    ('Any S WHERE NOT T eid 28258, T tags S',
+     '''SELECT rel_tags0.eid_to
+FROM tags_relation AS rel_tags0
+WHERE NOT (rel_tags0.eid_from=28258)'''),
+    
+    ('Any S WHERE T is Tag, T name TN, NOT T eid 28258, T tags S, S name SN',
+     '''SELECT S.eid
+FROM EGroup AS S, Tag AS T, tags_relation AS rel_tags0
+WHERE NOT (T.eid=28258) AND rel_tags0.eid_from=T.eid AND rel_tags0.eid_to=S.eid
+UNION ALL
+SELECT S.eid
+FROM State AS S, Tag AS T, tags_relation AS rel_tags0
+WHERE NOT (T.eid=28258) AND rel_tags0.eid_from=T.eid AND rel_tags0.eid_to=S.eid
+UNION ALL
+SELECT S.eid
+FROM Tag AS S, Tag AS T, tags_relation AS rel_tags0
+WHERE NOT (T.eid=28258) AND rel_tags0.eid_from=T.eid AND rel_tags0.eid_to=S.eid'''),
+
+    
+    ('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6',
+     '''SELECT 5, rel_created_by0.eid_to
+FROM created_by_relation AS rel_created_by0
+WHERE rel_created_by0.eid_from=5 AND NOT (rel_created_by0.eid_to=6)'''),
+
+    ('Note X WHERE NOT Y evaluee X',
+     '''SELECT X.eid
+FROM Note AS X
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_to=X.eid)'''),
+
+    ('Any Y WHERE NOT Y evaluee X',
+     '''SELECT Y.eid
+FROM Division AS Y
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=Y.eid)
+UNION ALL
+SELECT Y.eid
+FROM EUser AS Y
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=Y.eid)
+UNION ALL
+SELECT Y.eid
+FROM Personne AS Y
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=Y.eid)
+UNION ALL
+SELECT Y.eid
+FROM Societe AS Y
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=Y.eid)
+UNION ALL
+SELECT Y.eid
+FROM SubDivision AS Y
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=Y.eid)'''),
+
+    ('Any X WHERE NOT Y evaluee X, Y is EUser',
+     '''SELECT X.eid
+FROM Note AS X
+WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0,EUser AS Y WHERE rel_evaluee0.eid_from=Y.eid AND rel_evaluee0.eid_to=X.eid)'''),
+    
+    ('Any X,T WHERE X title T, NOT X is Bookmark',
+     '''SELECT DISTINCT X.eid, X.title
+FROM Card AS X
+UNION
+SELECT DISTINCT X.eid, X.title
+FROM EmailThread AS X'''),
+
+    ('Any K,V WHERE P is EProperty, P pkey K, P value V, NOT P for_user U',
+     '''SELECT DISTINCT P.pkey, P.value
+FROM EProperty AS P
+WHERE P.for_user IS NULL'''),
+
+    ]
+
+OUTER_JOIN = [
+    ('Any X,S WHERE X travaille S?',
+     '''SELECT X.eid, rel_travaille0.eid_to
+FROM Personne AS X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=X.eid)'''
+#SELECT X.eid, S.eid
+#FROM Personne AS X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=X.eid) LEFT OUTER JOIN Societe AS S ON (rel_travaille0.eid_to=S.eid)'''
+    ),
+    ('Any S,X WHERE X? travaille S, S is Societe',
+     '''SELECT S.eid, rel_travaille0.eid_from
+FROM Societe AS S LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_to=S.eid)'''
+#SELECT S.eid, X.eid
+#FROM Societe AS S LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_to=S.eid) LEFT OUTER JOIN Personne AS X ON (rel_travaille0.eid_from=X.eid)'''
+    ),
+
+    ('Any N,A WHERE N inline1 A?',
+     '''SELECT N.eid, N.inline1
+FROM Note AS N'''),
+
+    ('Any SN WHERE X from_state S?, S name SN',
+     '''SELECT S.name
+FROM TrInfo AS X LEFT OUTER JOIN State AS S ON (X.from_state=S.eid)'''
+    ),
+
+    ('Any A,N WHERE N? inline1 A',
+     '''SELECT A.eid, N.eid
+FROM Affaire AS A LEFT OUTER JOIN Note AS N ON (N.inline1=A.eid)'''
+    ),
+
+    ('Any A,B,C,D,E,F,G WHERE A eid 12,A creation_date B,A modification_date C,A comment D,A from_state E?,A to_state F?,A wf_info_for G?',
+    '''SELECT A.eid, A.creation_date, A.modification_date, A.comment, A.from_state, A.to_state, A.wf_info_for
+FROM TrInfo AS A
+WHERE A.eid=12'''),
+
+    ('Any FS,TS,C,D,U ORDERBY D DESC WHERE WF wf_info_for X,WF from_state FS?, WF to_state TS, WF comment C,WF creation_date D, WF owned_by U, X eid 1',
+     '''SELECT WF.from_state, WF.to_state, WF.comment, WF.creation_date, rel_owned_by0.eid_to
+FROM TrInfo AS WF, owned_by_relation AS rel_owned_by0
+WHERE WF.wf_info_for=1 AND WF.to_state IS NOT NULL AND rel_owned_by0.eid_from=WF.eid
+ORDER BY 4 DESC'''),
+
+    ('Any X WHERE X is Affaire, S is Societe, EXISTS(X owned_by U OR (X concerne S?, S owned_by U))',
+     '''SELECT X.eid
+FROM Affaire AS X
+WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, EUser AS U, Affaire AS A LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=A.eid) LEFT OUTER JOIN Societe AS S ON (rel_concerne1.eid_to=S.eid), owned_by_relation AS rel_owned_by2 WHERE ((rel_owned_by0.eid_from=A.eid AND rel_owned_by0.eid_to=U.eid) OR (rel_owned_by2.eid_from=S.eid AND rel_owned_by2.eid_to=U.eid)) AND X.eid=A.eid)'''),
+
+    ('Any C,M WHERE C travaille G?, G evaluee M?, G is Societe',
+     '''SELECT C.eid, rel_evaluee1.eid_to
+FROM Personne AS C LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=C.eid) LEFT OUTER JOIN Societe AS G ON (rel_travaille0.eid_to=G.eid) LEFT OUTER JOIN evaluee_relation AS rel_evaluee1 ON (rel_evaluee1.eid_from=G.eid)'''
+#SELECT C.eid, M.eid
+#FROM Personne AS C LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=C.eid) LEFT OUTER JOIN Societe AS G ON (rel_travaille0.eid_to=G.eid) LEFT OUTER JOIN evaluee_relation AS rel_evaluee1 ON (rel_evaluee1.eid_from=G.eid) LEFT OUTER JOIN Note AS M ON (rel_evaluee1.eid_to=M.eid)'''
+     ),
+
+    ('Any A,C WHERE A documented_by C?, (C is NULL) OR (EXISTS(C require_permission F, '
+     'F name "read", F require_group E, U in_group E)), U eid 1',
+     '''SELECT A.eid, rel_documented_by0.eid_to
+FROM Affaire AS A LEFT OUTER JOIN documented_by_relation AS rel_documented_by0 ON (rel_documented_by0.eid_from=A.eid)
+WHERE ((rel_documented_by0.eid_to IS NULL) OR (EXISTS(SELECT 1 FROM require_permission_relation AS rel_require_permission1, EPermission AS F, require_group_relation AS rel_require_group2, in_group_relation AS rel_in_group3 WHERE rel_documented_by0.eid_to=rel_require_permission1.eid_from AND rel_require_permission1.eid_to=F.eid AND F.name=read AND rel_require_group2.eid_from=F.eid AND rel_in_group3.eid_from=1 AND rel_in_group3.eid_to=rel_require_group2.eid_to)))'''),
+
+    ("Any X WHERE X eid 12, P? connait X",
+     '''SELECT X.eid
+FROM Personne AS X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=12)
+WHERE X.eid=12'''
+#SELECT 12
+#FROM Personne AS X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=12) LEFT OUTER JOIN Personne AS P ON (rel_connait0.eid_from=P.eid)
+#WHERE X.eid=12'''
+    ),
+
+    ('Any GN, TN ORDERBY GN WHERE T tags G?, T name TN, G name GN',
+    '''SELECT _T0.C1, T.name
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN (SELECT G.eid AS C0, G.name AS C1
+FROM EGroup AS G
+UNION ALL
+SELECT G.eid AS C0, G.name AS C1
+FROM State AS G
+UNION ALL
+SELECT G.eid AS C0, G.name AS C1
+FROM Tag AS G) AS _T0 ON (rel_tags0.eid_to=_T0.C0)
+ORDER BY 1'''),
+
+
+    # optional variable with additional restriction
+    ('Any T,G WHERE T tags G?, G name "hop", G is EGroup',
+     '''SELECT T.eid, G.eid
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN EGroup AS G ON (rel_tags0.eid_to=G.eid AND G.name=hop)'''),
+
+    # optional variable with additional invariant restriction
+    ('Any T,G WHERE T tags G?, G eid 12',
+     '''SELECT T.eid, rel_tags0.eid_to
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid AND rel_tags0.eid_to=12)'''),
+
+    # optional variable with additional restriction appearing before the relation
+    ('Any T,G WHERE G name "hop", T tags G?, G is EGroup',
+     '''SELECT T.eid, G.eid
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN EGroup AS G ON (rel_tags0.eid_to=G.eid AND G.name=hop)'''),
+
+    # optional variable with additional restriction on inlined relation
+    # XXX the expected result should be as the query below. So what, raise BadRQLQuery ?
+    ('Any T,G,S WHERE T tags G?, G in_state S, S name "hop", G is EUser',
+     '''SELECT T.eid, G.eid, S.eid
+FROM State AS S, Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN EUser AS G ON (rel_tags0.eid_to=G.eid)
+WHERE G.in_state=S.eid AND S.name=hop
+'''),
+
+    # optional variable with additional invariant restriction on an inlined relation
+    ('Any T,G,S WHERE T tags G, G in_state S?, S eid 1, G is EUser',
+     '''SELECT rel_tags0.eid_from, G.eid, G.in_state
+FROM EUser AS G, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_to=G.eid AND (G.in_state=1 OR G.in_state IS NULL)'''),
+
+    # two optional variables with additional invariant restriction on an inlined relation
+    ('Any T,G,S WHERE T tags G?, G in_state S?, S eid 1, G is EUser',
+     '''SELECT T.eid, G.eid, G.in_state
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN EUser AS G ON (rel_tags0.eid_to=G.eid AND (G.in_state=1 OR G.in_state IS NULL))'''),
+
+    # two optional variables with additional restriction on an inlined relation
+    ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop", G is EUser',
+     '''SELECT T.eid, G.eid, S.eid
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN EUser AS G ON (rel_tags0.eid_to=G.eid) LEFT OUTER JOIN State AS S ON (G.in_state=S.eid AND S.name=hop)'''),
+    
+    # two optional variables with additional restriction on an ambigous inlined relation
+    ('Any T,G,S WHERE T tags G?, G in_state S?, S name "hop"',
+     '''SELECT T.eid, _T0.C0, _T0.C1
+FROM Tag AS T LEFT OUTER JOIN tags_relation AS rel_tags0 ON (rel_tags0.eid_from=T.eid) LEFT OUTER JOIN (SELECT G.eid AS C0, S.eid AS C1
+FROM Affaire AS G LEFT OUTER JOIN State AS S ON (G.in_state=S.eid AND S.name=hop) 
+UNION ALL
+SELECT G.eid AS C0, S.eid AS C1
+FROM EUser AS G LEFT OUTER JOIN State AS S ON (G.in_state=S.eid AND S.name=hop) 
+UNION ALL
+SELECT G.eid AS C0, S.eid AS C1
+FROM Note AS G LEFT OUTER JOIN State AS S ON (G.in_state=S.eid AND S.name=hop) ) AS _T0 ON (rel_tags0.eid_to=_T0.C0)'''),
+
+    ]
+
+VIRTUAL_VARS = [
+    ("Personne P WHERE P travaille S, S tel T, S fax T, S is Societe;",
+     '''SELECT rel_travaille0.eid_from
+FROM Societe AS S, travaille_relation AS rel_travaille0
+WHERE rel_travaille0.eid_to=S.eid AND S.fax=S.tel'''),
+    
+    ("Personne P where X eid 0, X creation_date D, P datenaiss < D, X is Affaire",
+     '''SELECT P.eid
+FROM Affaire AS X, Personne AS P
+WHERE X.eid=0 AND P.datenaiss<X.creation_date'''),
+
+    ("Any N,T WHERE N is Note, N type T;",
+     '''SELECT N.eid, N.type
+FROM Note AS N'''),
+
+    ("Personne P where X is Personne, X tel T, X fax F, P fax T+F",
+     '''SELECT P.eid
+FROM Personne AS P, Personne AS X
+WHERE P.fax=(X.tel + X.fax)'''),
+
+    ("Personne P where X tel T, X fax F, P fax IN (T,F)",
+     '''SELECT P.eid
+FROM Division AS X, Personne AS P
+WHERE P.fax IN(X.tel, X.fax)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, Personne AS X
+WHERE P.fax IN(X.tel, X.fax)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, Societe AS X
+WHERE P.fax IN(X.tel, X.fax)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, SubDivision AS X
+WHERE P.fax IN(X.tel, X.fax)'''),
+
+    ("Personne P where X tel T, X fax F, P fax IN (T,F,0832542332)",
+     '''SELECT P.eid
+FROM Division AS X, Personne AS P
+WHERE P.fax IN(X.tel, X.fax, 832542332)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, Personne AS X
+WHERE P.fax IN(X.tel, X.fax, 832542332)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, Societe AS X
+WHERE P.fax IN(X.tel, X.fax, 832542332)
+UNION ALL
+SELECT P.eid
+FROM Personne AS P, SubDivision AS X
+WHERE P.fax IN(X.tel, X.fax, 832542332)'''),
+    ]
+
+FUNCS = [
+    ("Any COUNT(P) WHERE P is Personne",
+     '''SELECT COUNT(P.eid)
+FROM Personne AS P'''),
+##     ("Personne X where X nom upper('TOTO')",
+##      '''SELECT X.eid\nFROM Personne AS X\nWHERE UPPER(X.nom) = TOTO'''),
+##     ("Personne X where X nom Y, UPPER(X) prenom upper(Y)",
+##      '''SELECT X.eid\nFROM Personne AS X\nWHERE UPPER(X.prenom) = UPPER(X.nom)'''),
+    ]
+
+SYMETRIC = [
+    ('Any P WHERE X eid 0, X connait P',
+     '''SELECT DISTINCT P.eid
+FROM Personne AS P, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=P.eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=P.eid)'''
+#      '''SELECT rel_connait0.eid_to
+# FROM connait_relation AS rel_connait0
+# WHERE rel_connait0.eid_from=0
+# UNION
+# SELECT rel_connait0.eid_from
+# FROM connait_relation AS rel_connait0
+# WHERE rel_connait0.eid_to=0'''
+     ),
+    
+    ('Any P WHERE X connait P',
+    '''SELECT DISTINCT P.eid
+FROM Personne AS P, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_to=P.eid OR rel_connait0.eid_from=P.eid)'''
+    ),
+    
+    ('Any X WHERE X connait P',
+    '''SELECT DISTINCT X.eid
+FROM Personne AS X, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_from=X.eid OR rel_connait0.eid_to=X.eid)'''
+     ),
+    
+    ('Any P WHERE X eid 0, NOT X connait P',
+     '''SELECT P.eid
+FROM Personne AS P
+WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=P.eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=P.eid))'''),
+    
+    ('Any P WHERE NOT X connait P',
+    '''SELECT P.eid
+FROM Personne AS P
+WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_to=P.eid OR rel_connait0.eid_from=P.eid))'''),
+    
+    ('Any X WHERE NOT X connait P',
+    '''SELECT X.eid
+FROM Personne AS X
+WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=X.eid OR rel_connait0.eid_to=X.eid))'''),
+
+    ('Any P WHERE X connait P, P nom "nom"',
+     '''SELECT DISTINCT P.eid
+FROM Personne AS P, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_to=P.eid OR rel_connait0.eid_from=P.eid) AND P.nom=nom'''),
+    
+    ('Any X WHERE X connait P, P nom "nom"',
+     '''SELECT DISTINCT X.eid
+FROM Personne AS P, Personne AS X, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_from=X.eid AND rel_connait0.eid_to=P.eid OR rel_connait0.eid_to=X.eid AND rel_connait0.eid_from=P.eid) AND P.nom=nom'''
+    ),
+
+    ('Any X ORDERBY X DESC LIMIT 9 WHERE E eid 0, E connait X',
+    '''SELECT DISTINCT X.eid
+FROM Personne AS X, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=X.eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=X.eid)
+ORDER BY 1 DESC
+LIMIT 9'''
+     ),
+
+    ('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"',
+     '''SELECT DISTINCT P.eid
+FROM Personne AS P, Personne AS S, connait_relation AS rel_connait0
+WHERE (rel_connait0.eid_from=P.eid AND rel_connait0.eid_to=S.eid OR rel_connait0.eid_to=P.eid AND rel_connait0.eid_from=S.eid) AND S.nom=chouette'''
+     )
+    ]
+
+INLINE = [
+    ('Any P, L WHERE N ecrit_par P, P nom L, N eid 0',
+     '''SELECT P.eid, P.nom
+FROM Note AS N, Personne AS P
+WHERE N.ecrit_par=P.eid AND N.eid=0'''),
+    
+    ('Any N WHERE NOT N ecrit_par P, P nom "toto"',
+     '''SELECT DISTINCT N.eid
+FROM Note AS N, Personne AS P
+WHERE (N.ecrit_par IS NULL OR N.ecrit_par!=P.eid) AND P.nom=toto'''),
+    
+    ('Any P WHERE N ecrit_par P, N eid 0',
+    '''SELECT N.ecrit_par
+FROM Note AS N
+WHERE N.ecrit_par IS NOT NULL AND N.eid=0'''),
+
+    ('Any P WHERE N ecrit_par P, P is Personne, N eid 0',
+    '''SELECT P.eid
+FROM Note AS N, Personne AS P
+WHERE N.ecrit_par=P.eid AND N.eid=0'''),
+
+    ('Any P WHERE NOT N ecrit_par P, P is Personne, N eid 512',
+     '''SELECT DISTINCT P.eid
+FROM Note AS N, Personne AS P
+WHERE (N.ecrit_par IS NULL OR N.ecrit_par!=P.eid) AND N.eid=512'''),
+
+    ('Any S,ES,T WHERE S state_of ET, ET name "EUser", ES allowed_transition T, T destination_state S',
+     '''SELECT T.destination_state, rel_allowed_transition1.eid_from, T.eid
+FROM EEType AS ET, Transition AS T, allowed_transition_relation AS rel_allowed_transition1, state_of_relation AS rel_state_of0
+WHERE T.destination_state=rel_state_of0.eid_from AND rel_state_of0.eid_to=ET.eid AND ET.name=EUser AND rel_allowed_transition1.eid_to=T.eid'''),
+    ('Any O WHERE S eid 0, S in_state O',
+     '''SELECT S.in_state
+FROM Affaire AS S
+WHERE S.eid=0 AND S.in_state IS NOT NULL
+UNION ALL
+SELECT S.in_state
+FROM EUser AS S
+WHERE S.eid=0 AND S.in_state IS NOT NULL
+UNION ALL
+SELECT S.in_state
+FROM Note AS S
+WHERE S.eid=0 AND S.in_state IS NOT NULL''')
+    
+    ]
+
+
+from logilab.common.adbh import ADV_FUNC_HELPER_DIRECTORY
+    
+class PostgresSQLGeneratorTC(RQLGeneratorTC):
+    schema = schema
+    
+    #capture = True
+    def setUp(self):
+        RQLGeneratorTC.setUp(self)
+        indexer = get_indexer('postgres', 'utf8')        
+        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['postgres']
+        dbms_helper.fti_uid_attr = indexer.uid_attr
+        dbms_helper.fti_table = indexer.table
+        dbms_helper.fti_restriction_sql = indexer.restriction_sql
+        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        self.o = SQLGenerator(schema, dbms_helper)
+
+    def _norm_sql(self, sql):
+        return sql.strip()
+    
+    def _check(self, rql, sql, varmap=None):
+        try:
+            union = self._prepare(rql)
+            r, args = self.o.generate(union, {'text': 'hip hop momo'},
+                                      varmap=varmap)
+            self.assertLinesEquals((r % args).strip(), self._norm_sql(sql))
+        except Exception, ex:
+            print rql
+            if 'r' in locals():
+                print (r%args).strip()
+                print '!='
+                print sql.strip()
+            raise
+    
+    def _parse(self, rqls):
+        for rql, sql in rqls:
+            yield self._check, rql, sql
+ 
+    def _checkall(self, rql, sql):
+        try:
+            rqlst = self._prepare(rql)
+            r, args = self.o.generate(rqlst)
+            self.assertEqual((r.strip(), args), sql)
+        except Exception, ex:
+            print rql
+            if 'r' in locals():
+                print r.strip()
+                print '!='
+                print sql[0].strip()
+            raise
+        return
+#         rqlst, solutions = self._prepare(rql)
+#         for i, sol in enumerate(solutions):
+#             try:
+#                 r, args = self.o.generate([(rqlst, sol)])
+#                 self.assertEqual((r.strip(), args), sqls[i])
+#             except Exception, ex:
+#                 print rql
+#                 raise
+
+    def test1(self):
+        self._checkall('Any count(RDEF) WHERE RDEF relation_type X, X eid %(x)s',
+                       ("""SELECT COUNT(T1.C0) FROM (SELECT RDEF.eid AS C0
+FROM EFRDef AS RDEF
+WHERE RDEF.relation_type=%(x)s
+UNION ALL
+SELECT RDEF.eid AS C0
+FROM ENFRDef AS RDEF
+WHERE RDEF.relation_type=%(x)s) AS T1""", {}),
+                       )
+
+    def test2(self):
+        self._checkall('Any X WHERE C comments X, C eid %(x)s',
+                       ('''SELECT rel_comments0.eid_to
+FROM comments_relation AS rel_comments0
+WHERE rel_comments0.eid_from=%(x)s''', {})
+                       )
+
+    def test_cache_1(self):
+        self._check('Any X WHERE X in_basket B, B eid 12',
+                    '''SELECT rel_in_basket0.eid_from
+FROM in_basket_relation AS rel_in_basket0
+WHERE rel_in_basket0.eid_to=12''')
+        
+        self._check('Any X WHERE X in_basket B, B eid 12',
+                    '''SELECT rel_in_basket0.eid_from
+FROM in_basket_relation AS rel_in_basket0
+WHERE rel_in_basket0.eid_to=12''')
+
+    def test_varmap(self):
+        self._check('Any X,L WHERE X is EUser, X in_group G, X login L, G name "users"',
+                    '''SELECT T00.x, T00.l
+FROM EGroup AS G, T00, in_group_relation AS rel_in_group0
+WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=G.eid AND G.name=users''',
+                    varmap={'X': 'T00.x', 'X.login': 'T00.l'})
+        self._check('Any X,L,GN WHERE X is EUser, X in_group G, X login L, G name GN',
+                    '''SELECT T00.x, T00.l, G.name
+FROM EGroup AS G, T00, in_group_relation AS rel_in_group0
+WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=G.eid''',
+                    varmap={'X': 'T00.x', 'X.login': 'T00.l'})
+
+    def test_parser_parse(self):
+        for t in self._parse(PARSER):
+            yield t
+            
+    def test_basic_parse(self):
+        for t in self._parse(BASIC):
+            yield t
+
+    def test_advanced_parse(self):
+        for t in self._parse(ADVANCED):
+            yield t
+
+    def test_outer_join_parse(self):
+        for t in self._parse(OUTER_JOIN):
+            yield t
+
+    def test_virtual_vars_parse(self):
+        for t in self._parse(VIRTUAL_VARS):
+            yield t
+
+    def test_multiple_sel_parse(self):
+        for t in self._parse(MULTIPLE_SEL):
+            yield t
+        
+    def test_functions(self):
+        for t in self._parse(FUNCS):
+            yield t
+        
+    def test_negation(self):
+        for t in self._parse(NEGATIONS):
+            yield t
+
+    def test_union(self):
+        for t in self._parse((
+            ('(Any N ORDERBY 1 WHERE X name N, X is State)'
+             ' UNION '
+             '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)',
+             '''(SELECT X.name
+FROM State AS X
+ORDER BY 1)
+UNION ALL
+(SELECT XX.name
+FROM Transition AS XX
+ORDER BY 1)'''),
+            )):
+            yield t
+            
+    def test_subquery(self):
+        for t in self._parse((
+
+            ('Any N ORDERBY 1 WITH N BEING '
+             '((Any N WHERE X name N, X is State)'
+             ' UNION '
+             '(Any NN WHERE XX name NN, XX is Transition))',
+             '''SELECT _T0.C0
+FROM ((SELECT X.name AS C0
+FROM State AS X)
+UNION ALL
+(SELECT XX.name AS C0
+FROM Transition AS XX)) AS _T0
+ORDER BY 1'''),
+            
+            ('Any N,NX ORDERBY NX WITH N,NX BEING '
+             '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)'
+             ' UNION '
+             '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))',
+             '''SELECT _T0.C0, _T0.C1
+FROM ((SELECT X.name AS C0, COUNT(X.eid) AS C1
+FROM State AS X
+GROUP BY X.name
+HAVING COUNT(X.eid)>1)
+UNION ALL
+(SELECT X.name AS C0, COUNT(X.eid) AS C1
+FROM Transition AS X
+GROUP BY X.name
+HAVING COUNT(X.eid)>1)) AS _T0
+ORDER BY 2'''),            
+
+            ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 '
+             'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION '
+             '                 (Any X, N WHERE X name N, X is Transition))',
+             '''SELECT _T0.C1, COUNT(_T0.C0)
+FROM ((SELECT X.eid AS C0, X.name AS C1
+FROM State AS X)
+UNION ALL
+(SELECT X.eid AS C0, X.name AS C1
+FROM Transition AS X)) AS _T0
+GROUP BY _T0.C1
+HAVING COUNT(_T0.C0)>1'''),
+
+            ('Any ETN,COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN '
+             'WITH X BEING ((Any X WHERE X is Societe) UNION (Any X WHERE X is Affaire, (EXISTS(X owned_by 1)) OR ((EXISTS(D concerne B?, B owned_by 1, X identity D, B is Note)) OR (EXISTS(F concerne E?, E owned_by 1, E is Societe, X identity F)))))',
+             '''SELECT ET.name, COUNT(_T0.C0)
+FROM ((SELECT X.eid AS C0
+FROM Societe AS X)
+UNION ALL
+(SELECT X.eid AS C0
+FROM Affaire AS X
+WHERE ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=X.eid AND rel_owned_by0.eid_to=1)) OR (((EXISTS(SELECT 1 FROM Affaire AS D LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=D.eid) LEFT OUTER JOIN Note AS B ON (rel_concerne1.eid_to=B.eid), owned_by_relation AS rel_owned_by2 WHERE rel_owned_by2.eid_from=B.eid AND rel_owned_by2.eid_to=1 AND X.eid=D.eid)) OR (EXISTS(SELECT 1 FROM Affaire AS F LEFT OUTER JOIN concerne_relation AS rel_concerne3 ON (rel_concerne3.eid_from=F.eid) LEFT OUTER JOIN Societe AS E ON (rel_concerne3.eid_to=E.eid), owned_by_relation AS rel_owned_by4 WHERE rel_owned_by4.eid_from=E.eid AND rel_owned_by4.eid_to=1 AND X.eid=F.eid))))))) AS _T0, EEType AS ET, is_relation AS rel_is0
+WHERE rel_is0.eid_from=_T0.C0 AND rel_is0.eid_to=ET.eid
+GROUP BY ET.name'''),
+            )):
+            yield t
+
+            
+    def test_subquery_error(self):
+        rql = ('Any N WHERE X name N WITH X BEING '
+               '((Any X WHERE X is State)'
+               ' UNION '
+               ' (Any X WHERE X is Transition))')
+        rqlst = self._prepare(rql)
+        self.assertRaises(BadRQLQuery, self.o.generate, rqlst)
+            
+    def test_symetric(self):
+        for t in self._parse(SYMETRIC):
+            yield t
+        
+    def test_inline(self):
+        for t in self._parse(INLINE):
+            yield t
+            
+    def test_has_text(self):
+        for t in self._parse((
+            ('Any X WHERE X has_text "toto tata"',
+             """SELECT appears0.uid
+FROM appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""),
+            
+            ('Personne X WHERE X has_text "toto tata"',
+             """SELECT X.eid
+FROM appears AS appears0, entities AS X
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.type='Personne'"""),
+            
+            ('Personne X WHERE X has_text %(text)s',
+             """SELECT X.eid
+FROM appears AS appears0, entities AS X
+WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=X.eid AND X.type='Personne'"""),
+            
+            ('Any X WHERE X has_text "toto tata", X name "tutu"',
+             """SELECT X.eid
+FROM Basket AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM File AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Folder AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Image AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM State AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Tag AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Transition AS X, appears AS appears0
+WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.eid AND X.name=tutu"""),
+
+            ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s',
+             """SELECT X.eid
+FROM appears AS appears0, appears AS appears2, entities AS X, travaille_relation AS rel_travaille1
+WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=X.eid AND X.type='Personne' AND X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""),
+            )):
+            yield t
+
+
+    def test_from_clause_needed(self):
+        queries = [("Any 1 WHERE EXISTS(T is EGroup, T name 'managers')",
+                    '''SELECT 1
+WHERE EXISTS(SELECT 1 FROM EGroup AS T WHERE T.name=managers)'''),
+                   ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6',
+                    '''SELECT 5, 6
+WHERE NOT EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6)'''),
+                   ]
+        for t in self._parse(queries):
+            yield t
+
+    def test_ambigous_exists_no_from_clause(self):
+        self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))',
+                    '''SELECT COUNT(1)
+WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, Affaire AS P WHERE rel_owned_by0.eid_from=P.eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, Note AS P WHERE rel_owned_by1.eid_from=P.eid AND rel_owned_by1.eid_to=1)''')
+
+
+class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
+    
+    def setUp(self):
+        RQLGeneratorTC.setUp(self)
+        indexer = get_indexer('sqlite', 'utf8')        
+        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['sqlite']
+        dbms_helper.fti_uid_attr = indexer.uid_attr
+        dbms_helper.fti_table = indexer.table
+        dbms_helper.fti_restriction_sql = indexer.restriction_sql
+        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        self.o = SQLGenerator(schema, dbms_helper)
+
+    def _norm_sql(self, sql):
+        return sql.strip().replace(' ILIKE ', ' LIKE ')
+
+    def test_union(self):
+        for t in self._parse((
+            ('(Any N ORDERBY 1 WHERE X name N, X is State)'
+             ' UNION '
+             '(Any NN ORDERBY 1 WHERE XX name NN, XX is Transition)',
+             '''SELECT X.name
+FROM State AS X
+ORDER BY 1
+UNION ALL
+SELECT XX.name
+FROM Transition AS XX
+ORDER BY 1'''),
+            )):
+            yield t
+            
+
+    def test_subquery(self):
+        # NOTE: no paren around UNION with sqlitebackend
+        for t in self._parse((
+
+            ('Any N ORDERBY 1 WITH N BEING '
+             '((Any N WHERE X name N, X is State)'
+             ' UNION '
+             '(Any NN WHERE XX name NN, XX is Transition))',
+             '''SELECT _T0.C0
+FROM (SELECT X.name AS C0
+FROM State AS X
+UNION ALL
+SELECT XX.name AS C0
+FROM Transition AS XX) AS _T0
+ORDER BY 1'''),
+            
+            ('Any N,NX ORDERBY NX WITH N,NX BEING '
+             '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)'
+             ' UNION '
+             '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))',
+             '''SELECT _T0.C0, _T0.C1
+FROM (SELECT X.name AS C0, COUNT(X.eid) AS C1
+FROM State AS X
+GROUP BY X.name
+HAVING COUNT(X.eid)>1
+UNION ALL
+SELECT X.name AS C0, COUNT(X.eid) AS C1
+FROM Transition AS X
+GROUP BY X.name
+HAVING COUNT(X.eid)>1) AS _T0
+ORDER BY 2'''),            
+
+            ('Any N,COUNT(X) GROUPBY N HAVING COUNT(X)>1 '
+             'WITH X, N BEING ((Any X, N WHERE X name N, X is State) UNION '
+             '                 (Any X, N WHERE X name N, X is Transition))',
+             '''SELECT _T0.C1, COUNT(_T0.C0)
+FROM (SELECT X.eid AS C0, X.name AS C1
+FROM State AS X
+UNION ALL
+SELECT X.eid AS C0, X.name AS C1
+FROM Transition AS X) AS _T0
+GROUP BY _T0.C1
+HAVING COUNT(_T0.C0)>1'''),
+            )):
+            yield t
+        
+    def test_has_text(self):
+        for t in self._parse((
+            ('Any X WHERE X has_text "toto tata"',
+             """SELECT appears0.uid
+FROM appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""),
+            
+            ('Any X WHERE X has_text %(text)s',
+             """SELECT appears0.uid
+FROM appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo'))"""),
+            
+            ('Personne X WHERE X has_text "toto tata"',
+             """SELECT X.eid
+FROM appears AS appears0, entities AS X
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.type='Personne'"""),
+            
+            ('Any X WHERE X has_text "toto tata", X name "tutu"',
+             """SELECT X.eid
+FROM Basket AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM File AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Folder AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Image AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM State AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Tag AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Transition AS X, appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.name=tutu"""),
+            )):
+            yield t
+
+
+
+class MySQLGenerator(PostgresSQLGeneratorTC):
+
+    def setUp(self):
+        RQLGeneratorTC.setUp(self)
+        indexer = get_indexer('mysql', 'utf8')        
+        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['mysql']
+        dbms_helper.fti_uid_attr = indexer.uid_attr
+        dbms_helper.fti_table = indexer.table
+        dbms_helper.fti_restriction_sql = indexer.restriction_sql
+        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        self.o = SQLGenerator(schema, dbms_helper)
+
+    def _norm_sql(self, sql):
+        return sql.strip().replace(' ILIKE ', ' LIKE ')
+
+    def test_from_clause_needed(self):
+        queries = [("Any 1 WHERE EXISTS(T is EGroup, T name 'managers')",
+                    '''SELECT 1
+FROM (SELECT 1) AS _T
+WHERE EXISTS(SELECT 1 FROM EGroup AS T WHERE T.name=managers)'''),
+                   ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6',
+                    '''SELECT 5, 6
+FROM (SELECT 1) AS _T
+WHERE NOT EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6)'''),
+                   ]
+        for t in self._parse(queries):
+            yield t
+
+
+    def test_has_text(self):
+        queries = [
+            ('Any X WHERE X has_text "toto tata"',
+             """SELECT appears0.uid
+FROM appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE)"""),
+            ('Personne X WHERE X has_text "toto tata"',
+             """SELECT X.eid
+FROM appears AS appears0, entities AS X
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.type='Personne'"""),
+            ('Personne X WHERE X has_text %(text)s',
+             """SELECT X.eid
+FROM appears AS appears0, entities AS X
+WHERE MATCH (appears0.words) AGAINST ('hip hop momo' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.type='Personne'"""),
+            ('Any X WHERE X has_text "toto tata", X name "tutu"',
+             """SELECT X.eid
+FROM Basket AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM File AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Folder AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Image AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM State AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Tag AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu
+UNION ALL
+SELECT X.eid
+FROM Transition AS X, appears AS appears0
+WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.name=tutu""")
+            ]
+        for t in self._parse(queries):
+            yield t
+                             
+
+    def test_ambigous_exists_no_from_clause(self):
+        self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))',
+                    '''SELECT COUNT(1)
+FROM (SELECT 1) AS _T
+WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, Affaire AS P WHERE rel_owned_by0.eid_from=P.eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, Note AS P WHERE rel_owned_by1.eid_from=P.eid AND rel_owned_by1.eid_to=1)''') 
+           
+
+        
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_rqlannotation.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,291 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for modules cubicweb.server.rqlannotation
+"""
+
+from cubicweb.devtools import init_test_database
+from cubicweb.devtools.repotest import BaseQuerierTC
+
+repo, cnx = init_test_database('sqlite')
+
+class SQLGenAnnotatorTC(BaseQuerierTC):
+    repo = repo
+    
+    def get_max_eid(self):
+        # no need for cleanup here
+        return None
+    def cleanup(self):
+        # no need for cleanup here
+        pass
+                
+    def test_0_1(self):        
+        rqlst = self._prepare('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN')
+        self.assertEquals(rqlst.defined_vars['SE']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['OE']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['R']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['SE'].stinfo['attrvar'], None)
+        self.assertEquals(rqlst.defined_vars['OE'].stinfo['attrvar'], None)
+        self.assertEquals(rqlst.defined_vars['R'].stinfo['attrvar'], None)
+        
+    def test_0_2(self):        
+        rqlst = self._prepare('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['O'].stinfo['attrvar'], None)
+
+    def test_0_4(self):        
+        rqlst = self._prepare('Any A,B,C WHERE A eid 12,A comment B, A ?wf_info_for C')
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, False)
+        self.assert_(rqlst.defined_vars['B'].stinfo['attrvar'])
+        self.assertEquals(rqlst.defined_vars['C']._q_invariant, False)
+        self.assertEquals(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'},
+                                      {'A': 'TrInfo', 'B': 'String', 'C': 'EUser'},
+                                      {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}])
+
+    def test_0_5(self):        
+        rqlst = self._prepare('Any P WHERE N ecrit_par P, N eid 0')
+        self.assertEquals(rqlst.defined_vars['N']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, True)
+
+    def test_0_6(self):        
+        rqlst = self._prepare('Any P WHERE NOT N ecrit_par P, N eid 512')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, False)
+
+    def test_0_7(self):        
+        rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+        self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar'])
+
+    def test_0_8(self):        
+        rqlst = self._prepare('Any P WHERE X eid 0, NOT X connait P')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, False)
+        #self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(len(rqlst.solutions), 1, rqlst.solutions)
+
+    def test_0_10(self):        
+        rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+
+    def test_0_11(self):
+        rqlst = self._prepare('Any X WHERE X todo_by Y, X is Affaire')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+
+    def test_0_12(self):        
+        rqlst = self._prepare('Personne P WHERE P concerne A, A concerne S, S nom "Logilab"')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, False)
+        
+    def test_1_0(self):
+        rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+                
+    def test_1_1(self):
+        rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid IN (6,7)')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+                
+    def test_2(self):
+        rqlst = self._prepare('Any X WHERE X identity Y, Y eid 1')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        
+    def test_7(self):
+        rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+        
+    def test_optional_inlined(self):
+        rqlst = self._prepare('Any X,S where X from_state S?')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, True)
+                
+    def test_optional_inlined_2(self):
+        rqlst = self._prepare('Any N,A WHERE N? inline1 A')
+        self.assertEquals(rqlst.defined_vars['N']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, False)
+        
+    def test_optional_1(self):
+        rqlst = self._prepare('Any X,S WHERE X travaille S?')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, True)
+        
+    def test_greater_eid(self):
+        rqlst = self._prepare('Any X WHERE X eid > 5')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_greater_eid_typed(self):
+        rqlst = self._prepare('Any X WHERE X eid > 5, X is Note')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_max_eid(self):
+        rqlst = self._prepare('Any MAX(X)')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_max_eid_typed(self):
+        rqlst = self._prepare('Any MAX(X) WHERE X is Note')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        
+    def test_all_entities(self):
+        rqlst = self._prepare('Any X')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_all_typed_entity(self):
+        rqlst = self._prepare('Any X WHERE X is Note')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        
+    def test_has_text_1(self):
+        rqlst = self._prepare('Any X WHERE X has_text "toto tata"')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text')
+        
+    def test_has_text_2(self):
+        rqlst = self._prepare('Any X WHERE X is Personne, X has_text "coucou"')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text')
+        
+    def test_not_relation_1(self):
+        # P can't be invariant since deambiguification caused by "NOT X require_permission P"
+        # is not considered by generated sql (NOT EXISTS(...))
+        rqlst = self._prepare('Any P,G WHERE P require_group G, NOT X require_permission P')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['G']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_not_relation_2(self):
+        rqlst = self._prepare('TrInfo X WHERE X eid 2, NOT X from_state Y, Y is State')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+                
+    def test_not_relation_3(self):
+        rqlst = self._prepare('Any X, Y WHERE X eid 1, Y eid in (2, 3)')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+                
+    def test_not_relation_4_1(self):
+        rqlst = self._prepare('Note X WHERE NOT Y evaluee X')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+        
+    def test_not_relation_4_2(self):
+        rqlst = self._prepare('Any X WHERE NOT Y evaluee X')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+        
+    def test_not_relation_4_3(self):
+        rqlst = self._prepare('Any Y WHERE NOT Y evaluee X')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+        
+    def test_not_relation_4_4(self):
+        rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y is EUser')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+
+    def test_not_relation_4_5(self):
+        rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y eid %s, X is Note' % self.ueid)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.solutions, [{'X': 'Note'}])
+        
+    def test_not_relation_5_1(self):
+        rqlst = self._prepare('Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+
+    def test_not_relation_5_2(self):
+        rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+
+    def test_not_relation_6(self):
+        rqlst = self._prepare('Personne P where NOT P concerne A')
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, True)
+
+    def test_not_relation_7(self):
+        rqlst = self._prepare('Any K,V WHERE P is EProperty, P pkey K, P value V, NOT P for_user U') 
+        self.assertEquals(rqlst.defined_vars['P']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, True)
+       
+    def test_exists_1(self):        
+        rqlst = self._prepare('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)')
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+       
+    def test_exists_2(self):        
+        rqlst = self._prepare('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)')
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+
+    def test_exists_3(self):        
+        rqlst = self._prepare('Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)')
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_exists_4(self):
+        rqlst = self._prepare('Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+        
+    def test_exists_5(self):
+        rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True)
+
+    def test_not_exists_1(self):        
+        rqlst = self._prepare('Any U WHERE NOT EXISTS(X owned_by U, X bookmarked_by U)')
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)        
+
+    def test_not_exists_2(self):        
+        rqlst = self._prepare('Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+
+    def test_not_exists_distinct_1(self):        
+        rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "EGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)')
+        self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+        
+    def test_or_1(self):        
+        rqlst = self._prepare('Any X WHERE X concerne B OR C concerne X, B eid 12, C eid 13')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+
+    def test_or_2(self):        
+        rqlst = self._prepare('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, True) 
+        self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by')
+
+    def test_or_3(self):        
+        rqlst = self._prepare('Any N WHERE A evaluee N or EXISTS(N todo_by U)')
+        self.assertEquals(rqlst.defined_vars['N']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, True) 
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, True) 
+        
+    def test_or_exists_1(self):
+        # query generated by security rewriting
+        rqlst = self._prepare('DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", S is IN(Division, Societe, SubDivision),'
+                              '(EXISTS(A owned_by %(D)s)) '
+                              'OR ((((EXISTS(E concerne C?, C owned_by %(D)s, A identity E, C is Note, E is Affaire)) '
+                              'OR (EXISTS(I concerne H?, H owned_by %(D)s, H is Societe, A identity I, I is Affaire))) '
+                              'OR (EXISTS(J concerne G?, G owned_by %(D)s, G is SubDivision, A identity J, J is Affaire))) '
+                              'OR (EXISTS(K concerne F?, F owned_by %(D)s, F is Division, A identity K, K is Affaire)))')
+        self.assertEquals(rqlst.defined_vars['A']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, False)
+
+    def test_or_exists_2(self):        
+        rqlst = self._prepare('Any U WHERE EXISTS(U in_group G, G name "managers") OR EXISTS(X owned_by U, X bookmarked_by U)')
+        self.assertEquals(rqlst.defined_vars['U']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['G']._q_invariant, False)
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, True)
+        
+    def test_or_exists_3(self):        
+        rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 '
+                              'WHERE C is Societe, S concerne C, C nom CS, '
+                              '(EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))')
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, True)
+        rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 '
+                              'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, '
+                              '(EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))')
+        self.assertEquals(rqlst.defined_vars['S']._q_invariant, True)
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_rqlrewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,187 @@
+from logilab.common.testlib import unittest_main, TestCase
+from logilab.common.testlib import mock_object
+
+from rql import parse, nodes, RQLHelper
+
+from cubicweb import Unauthorized
+from cubicweb.server.rqlrewrite import RQLRewriter
+from cubicweb.devtools import repotest, TestServerConfiguration
+
+config = TestServerConfiguration('data')
+config.bootstrap_cubes()
+schema = config.load_schema()
+schema.add_relation_def(mock_object(subject='Card', name='in_state', object='State', cardinality='1*'))
+                        
+rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid',
+                                                 'has_text': 'fti'})
+
+def setup_module(*args):
+    repotest.do_monkey_patch()
+
+def teardown_module(*args):
+    repotest.undo_monkey_patch()
+    
+def eid_func_map(eid):
+    return {1: 'EUser',
+            2: 'Card'}[eid]
+
+def rewrite(rqlst, snippets_map, kwargs):
+    class FakeQuerier:
+        schema = schema
+        @staticmethod
+        def solutions(sqlcursor, mainrqlst, kwargs):
+            rqlhelper.compute_solutions(rqlst, {'eid': eid_func_map}, kwargs=kwargs)
+        class _rqlhelper:
+            @staticmethod
+            def annotate(rqlst):
+                rqlhelper.annotate(rqlst)
+            @staticmethod
+            def simplify(mainrqlst, needcopy=False):
+                rqlhelper.simplify(rqlst, needcopy)
+    rewriter = RQLRewriter(FakeQuerier, mock_object(user=(mock_object(eid=1))))
+    for v, snippets in snippets_map.items():
+        snippets_map[v] = [mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
+                                       expression='Any X WHERE '+snippet) 
+                           for snippet in snippets]
+    rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, kwargs=kwargs)
+    solutions = rqlst.children[0].solutions
+    rewriter.rewrite(rqlst.children[0], snippets_map.items(), solutions, kwargs)
+    test_vrefs(rqlst.children[0])
+    return rewriter.rewritten
+
+def test_vrefs(node):
+    vrefmap = {}
+    for vref in node.iget_nodes(nodes.VariableRef):
+        vrefmap.setdefault(vref.name, set()).add(vref)
+    for var in node.defined_vars.itervalues():
+        assert not (var.stinfo['references'] ^ vrefmap[var.name])
+        assert (var.stinfo['references'])
+
+class RQLRewriteTC(TestCase):
+    """a faire:
+
+    * optimisation: detecter les relations utilisees dans les rqlexpressions qui
+      sont presentes dans la requete de depart pour les reutiliser si possible
+      
+    * "has_<ACTION>_permission" ?
+    """
+    
+    def test_base_var(self):
+        card_constraint = ('X in_state S, U in_group G, P require_state S,'
+                           'P name "read", P require_group G')
+        rqlst = parse('Card C')
+        rewrite(rqlst, {'C': (card_constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any C WHERE C is Card, B eid %(D)s, "
+                             "EXISTS(C in_state A, B in_group E, F require_state A, "
+                             "F name 'read', F require_group E, A is State, E is EGroup, F is EPermission)")
+        
+    def test_multiple_var(self):
+        card_constraint = ('X in_state S, U in_group G, P require_state S,'
+                           'P name "read", P require_group G')
+        affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"')
+        kwargs = {'u':2}
+        rqlst = parse('Any S WHERE S documented_by C, C eid %(u)s')
+        rewrite(rqlst, {'C': (card_constraint,), 'S': affaire_constraints},
+                kwargs)
+        self.assertTextEquals(rqlst.as_string(),
+                             "Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, "
+                             "EXISTS(C in_state A, B in_group E, F require_state A, "
+                             "F name 'read', F require_group E, A is State, E is EGroup, F is EPermission), "
+                             "(EXISTS(S ref LIKE 'PUBLIC%')) OR (EXISTS(B in_group G, G name 'public', G is EGroup)), "
+                             "S is Affaire")
+        self.failUnless('D' in kwargs)
+        
+    def test_or(self):
+        constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")'
+        rqlst = parse('Any S WHERE S owned_by C, C eid %(u)s')
+        rewrite(rqlst, {'C': (constraint,)}, {'u':1})
+        self.failUnlessEqual(rqlst.as_string(),
+                             "Any S WHERE S owned_by C, C eid %(u)s, A eid %(B)s, "
+                             "EXISTS((C identity A) OR (C in_state D, E identity A, "
+                             "E in_state D, D name 'subscribed'), D is State, E is EUser), "
+                             "S is IN(Affaire, Basket, Bookmark, Card, Comment, Division, EConstraint, EConstraintType, EEType, EFRDef, EGroup, ENFRDef, EPermission, EProperty, ERType, EUser, Email, EmailAddress, EmailPart, EmailThread, File, Folder, Image, Note, Personne, RQLExpression, Societe, State, SubDivision, Tag, TrInfo, Transition)")
+
+    def test_simplified_rqlst(self):
+        card_constraint = ('X in_state S, U in_group G, P require_state S,'
+                           'P name "read", P require_group G')
+        rqlst = parse('Any 2') # this is the simplified rql st for Any X WHERE X eid 12
+        rewrite(rqlst, {'2': (card_constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any 2 WHERE B eid %(C)s, "
+                             "EXISTS(2 in_state A, B in_group D, E require_state A, "
+                             "E name 'read', E require_group D, A is State, D is EGroup, E is EPermission)")
+
+    def test_optional_var(self):
+        card_constraint = ('X in_state S, U in_group G, P require_state S,'
+                           'P name "read", P require_group G')
+        rqlst = parse('Any A,C WHERE A documented_by C?')
+        rewrite(rqlst, {'C': (card_constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             "Any A,C WHERE A documented_by C?, A is Affaire "
+                             "WITH C BEING "
+                             "(Any C WHERE C in_state B, D in_group F, G require_state B, G name 'read', "
+                             "G require_group F, D eid %(A)s, C is Card)")
+        rqlst = parse('Any A,C,T WHERE A documented_by C?, C title T')
+        rewrite(rqlst, {'C': (card_constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             "Any A,C,T WHERE A documented_by C?, A is Affaire "
+                             "WITH C,T BEING "
+                             "(Any C,T WHERE C in_state B, D in_group F, G require_state B, G name 'read', "
+                             "G require_group F, C title T, D eid %(A)s, C is Card)")
+        
+    def test_relation_optimization(self):
+        # since Card in_state State as monovalued cardinality, the in_state
+        # relation used in the rql expression can be ignored and S replaced by
+        # the variable from the incoming query
+        card_constraint = ('X in_state S, U in_group G, P require_state S,'
+                           'P name "read", P require_group G')
+        rqlst = parse('Card C WHERE C in_state STATE')
+        rewrite(rqlst, {'C': (card_constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any C WHERE C in_state STATE, C is Card, A eid %(B)s, "
+                             "EXISTS(A in_group D, E require_state STATE, "
+                             "E name 'read', E require_group D, D is EGroup, E is EPermission), "
+                             "STATE is State")
+
+    def test_unsupported_constraint_1(self):
+        # EUser doesn't have require_permission
+        trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
+        rqlst = parse('Any U,T WHERE U is EUser, T wf_info_for U')
+        self.assertRaises(Unauthorized, rewrite, rqlst, {'T': (trinfo_constraint,)}, {})
+        
+    def test_unsupported_constraint_2(self):
+        trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
+        rqlst = parse('Any U,T WHERE U is EUser, T wf_info_for U')
+        rewrite(rqlst, {'T': (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any U,T WHERE U is EUser, T wf_info_for U, "
+                             "EXISTS(U in_group B, B name 'managers', B is EGroup), T is TrInfo")
+
+    def test_unsupported_constraint_3(self):
+        self.skip('raise unauthorized for now')
+        trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
+        rqlst = parse('Any T WHERE T wf_info_for X')
+        rewrite(rqlst, {'T': (trinfo_constraint, 'X in_group G, G name "managers"')}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u'XXX dunno what should be generated')
+        
+    def test_add_ambiguity_exists(self):
+        constraint = ('X concerne Y')
+        rqlst = parse('Affaire X')
+        rewrite(rqlst, {'X': (constraint,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any X WHERE X is Affaire, (((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne D, D is SubDivision))) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))")
+        
+    def test_add_ambiguity_outerjoin(self):
+        constraint = ('X concerne Y')
+        rqlst = parse('Any X,C WHERE X? documented_by C')
+        rewrite(rqlst, {'X': (constraint,)}, {})
+        # ambiguity are kept in the sub-query, no need to be resolved using OR
+        self.failUnlessEqual(rqlst.as_string(),
+                             u"Any X,C WHERE X? documented_by C, C is Card WITH X BEING (Any X WHERE X concerne A, X is Affaire)") 
+       
+        
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_schemaserial.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,177 @@
+"""unit tests for schema rql (de)serialization
+"""
+
+import sys
+from cStringIO import StringIO
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.schema import CubicWebSchemaLoader
+from cubicweb.devtools import TestServerConfiguration
+
+loader = CubicWebSchemaLoader()
+config = TestServerConfiguration('data')
+config.bootstrap_cubes()
+loader.lib_directory = config.schemas_lib_dir()
+schema = loader.load(config)
+
+from cubicweb.server.schemaserial import *
+    
+class Schema2RQLTC(TestCase):
+        
+    def test_eschema2rql1(self):
+        self.assertListEquals(list(eschema2rql(schema.eschema('EFRDef'))),
+                              [
+            ('INSERT EEType X: X description %(description)s,X final %(final)s,X meta %(meta)s,X name %(name)s',
+             {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the application schema',
+              'meta': True, 'name': u'EFRDef', 'final': False})
+            ])
+        
+    def test_eschema2rql2(self):
+        self.assertListEquals(list(eschema2rql(schema.eschema('String'))), [
+                ('INSERT EEType X: X description %(description)s,X final %(final)s,X meta %(meta)s,X name %(name)s',
+                 {'description': u'', 'final': True, 'meta': True, 'name': u'String'})])
+    
+    def test_eschema2rql_specialization(self):
+        self.assertListEquals(list(specialize2rql(schema)),
+                              [
+                ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
+                 {'x': 'Division', 'et': 'Societe'}),
+                ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
+                 {'x': 'SubDivision', 'et': 'Division'})])
+        
+    def test_rschema2rql1(self):
+        self.assertListEquals(list(rschema2rql(schema.rschema('relation_type'))),
+                             [
+            ('INSERT ERType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X meta %(meta)s,X name %(name)s,X symetric %(symetric)s',
+             {'description': u'link a relation definition to its relation type', 'meta': True, 'symetric': False, 'name': u'relation_type', 'final' : False, 'fulltext_container': None, 'inlined': True}),
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'ERType',
+              'ordernum': 1, 'cardinality': u'1*', 'se': 'EFRDef'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is ENFRDef',
+             {'rt': 'relation_type', 'oe': 'ERType', 'ctname': u'RQLConstraint', 'se': 'EFRDef', 'value': u'O final TRUE'}),
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'ERType',
+              'ordernum': 1, 'cardinality': u'1*', 'se': 'ENFRDef'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is ENFRDef',
+             {'rt': 'relation_type', 'oe': 'ERType', 'ctname': u'RQLConstraint', 'se': 'ENFRDef', 'value': u'O final FALSE'}),
+            ])
+        
+    def test_rschema2rql2(self):
+        expected = [
+            ('INSERT ERType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X meta %(meta)s,X name %(name)s,X symetric %(symetric)s', {'description': u'core relation giving to a group the permission to add an entity or relation type', 'meta': True, 'symetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}),
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'EEType'}),
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'ERType'}),
+            
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'EGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'EEType'}),
+            ('INSERT ENFRDef X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'EGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'ERType'}),
+            ]
+        for i, (rql, args) in enumerate(rschema2rql(schema.rschema('add_permission'))):
+            yield self.assertEquals, (rql, args), expected[i]
+        
+    def test_rschema2rql3(self):
+        self.assertListEquals(list(rschema2rql(schema.rschema('cardinality'))), 
+                             [
+            ('INSERT ERType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X meta %(meta)s,X name %(name)s,X symetric %(symetric)s',
+             {'description': u'', 'meta': False, 'symetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}),
+
+            ('INSERT EFRDef X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'EFRDef'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is EFRDef',
+             {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'EFRDef', 'value': u'max=2'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is EFRDef',
+             {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'EFRDef', 'value': u"u'?1', u'11', u'??', u'1?'"}),
+
+            ('INSERT EFRDef X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
+             {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'ENFRDef'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is EFRDef',
+             {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'ENFRDef', 'value': u'max=2'}),
+            ('INSERT EConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is EFRDef',
+             {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'ENFRDef', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"}),
+            ])
+        
+
+    def test_updateeschema2rql1(self):
+        self.assertListEquals(list(updateeschema2rql(schema.eschema('EFRDef'))),
+                              [('SET X description %(description)s,X final %(final)s,X meta %(meta)s,X name %(name)s WHERE X is EEType, X name %(et)s',
+                                {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the application schema', 'meta': True, 'et': 'EFRDef', 'final': False, 'name': u'EFRDef'}),
+                               ])
+        
+    def test_updateeschema2rql2(self):
+        self.assertListEquals(list(updateeschema2rql(schema.eschema('String'))),
+                              [('SET X description %(description)s,X final %(final)s,X meta %(meta)s,X name %(name)s WHERE X is EEType, X name %(et)s',
+                                {'description': u'', 'meta': True, 'et': 'String', 'final': True, 'name': u'String'})
+                               ])
+        
+    def test_updaterschema2rql1(self):
+        self.assertListEquals(list(updaterschema2rql(schema.rschema('relation_type'))),
+                             [
+            ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X meta %(meta)s,X name %(name)s,X symetric %(symetric)s WHERE X is ERType, X name %(rt)s',
+             {'rt': 'relation_type', 'symetric': False,
+              'description': u'link a relation definition to its relation type',
+              'meta': True, 'final': False, 'fulltext_container': None, 'inlined': True, 'name': u'relation_type'})
+            ])
+        
+    def test_updaterschema2rql2(self):
+        expected = [
+            ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X meta %(meta)s,X name %(name)s,X symetric %(symetric)s WHERE X is ERType, X name %(rt)s',
+             {'rt': 'add_permission', 'symetric': False,
+              'description': u'core relation giving to a group the permission to add an entity or relation type',
+              'meta': True, 'final': False, 'fulltext_container': None, 'inlined': False, 'name': u'add_permission'})
+            ]
+        for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'))):
+            yield self.assertEquals, (rql, args), expected[i]
+
+class Perms2RQLTC(TestCase):
+    GROUP_MAPPING = {
+        'managers': 0,
+        'users':  1,
+        'guests': 2,
+        'owners': 3,
+        }
+    
+    def test_eperms2rql1(self):
+        self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.eschema('EEType'), self.GROUP_MAPPING)],
+                              ['SET X read_permission Y WHERE X is EEType, X name "EEType", Y eid 2',
+                               'SET X read_permission Y WHERE X is EEType, X name "EEType", Y eid 0',
+                               'SET X read_permission Y WHERE X is EEType, X name "EEType", Y eid 1',
+                               'SET X add_permission Y WHERE X is EEType, X name "EEType", Y eid 0',
+                               'SET X update_permission Y WHERE X is EEType, X name "EEType", Y eid 0',
+                               'SET X update_permission Y WHERE X is EEType, X name "EEType", Y eid 3',
+                               'SET X delete_permission Y WHERE X is EEType, X name "EEType", Y eid 0',
+                               ])
+        
+    def test_rperms2rql2(self):
+        self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.rschema('read_permission'), self.GROUP_MAPPING)],
+                              ['SET X read_permission Y WHERE X is ERType, X name "read_permission", Y eid 2',
+                               'SET X read_permission Y WHERE X is ERType, X name "read_permission", Y eid 0',
+                               'SET X read_permission Y WHERE X is ERType, X name "read_permission", Y eid 1',
+                               'SET X add_permission Y WHERE X is ERType, X name "read_permission", Y eid 0',
+                               'SET X delete_permission Y WHERE X is ERType, X name "read_permission", Y eid 0',
+                               ])
+        
+    def test_rperms2rql3(self):
+        self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.rschema('name'), self.GROUP_MAPPING)],
+                              ['SET X read_permission Y WHERE X is ERType, X name "name", Y eid 2',
+                               'SET X read_permission Y WHERE X is ERType, X name "name", Y eid 0',
+                               'SET X read_permission Y WHERE X is ERType, X name "name", Y eid 1',
+                               'SET X add_permission Y WHERE X is ERType, X name "name", Y eid 2',
+                               'SET X add_permission Y WHERE X is ERType, X name "name", Y eid 0',
+                               'SET X add_permission Y WHERE X is ERType, X name "name", Y eid 1',
+                               'SET X delete_permission Y WHERE X is ERType, X name "name", Y eid 2',
+                               'SET X delete_permission Y WHERE X is ERType, X name "name", Y eid 0',
+                               'SET X delete_permission Y WHERE X is ERType, X name "name", Y eid 1',
+                               ])
+        
+    #def test_perms2rql(self):
+    #    self.assertListEquals(perms2rql(schema, self.GROUP_MAPPING),
+    #                         ['INSERT EEType X: X name 'Societe', X final FALSE'])
+        
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_security.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,507 @@
+"""functional tests for server'security
+"""
+import sys
+
+from logilab.common.testlib import unittest_main, TestCase
+from cubicweb.devtools.apptest import RepositoryBasedTC
+
+from cubicweb import Unauthorized, ValidationError
+from cubicweb.server.querier import check_read_access
+
+class BaseSecurityTC(RepositoryBasedTC):
+
+    def setUp(self):
+        RepositoryBasedTC.setUp(self)
+        self.create_user('iaminusersgrouponly')
+        self.readoriggroups = self.schema['Personne'].get_groups('read')
+        self.addoriggroups = self.schema['Personne'].get_groups('add')
+        
+    def tearDown(self):
+        RepositoryBasedTC.tearDown(self)
+        self.schema['Personne'].set_groups('read', self.readoriggroups)
+        self.schema['Personne'].set_groups('add', self.addoriggroups)
+
+        
+class LowLevelSecurityFunctionTC(BaseSecurityTC):
+    
+    def test_check_read_access(self):
+        rql = u'Personne U where U nom "managers"'
+        rqlst = self.repo.querier._rqlhelper.parse(rql).children[0]
+        origgroups = self.schema['Personne'].get_groups('read')
+        self.schema['Personne'].set_groups('read', ('users', 'managers'))
+        self.repo.querier._rqlhelper.compute_solutions(rqlst)
+        solution = rqlst.solutions[0]
+        check_read_access(self.schema, self.session.user, rqlst, solution)
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        self.assertRaises(Unauthorized,
+                          check_read_access,
+                          self.schema, cnx.user(self.current_session()), rqlst, solution)
+        self.assertRaises(Unauthorized, cu.execute, rql)
+            
+    def test_upassword_not_selectable(self):
+        self.assertRaises(Unauthorized,
+                          self.execute, 'Any X,P WHERE X is EUser, X upassword P')
+        self.rollback()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        self.assertRaises(Unauthorized,
+                          cu.execute, 'Any X,P WHERE X is EUser, X upassword P')
+        
+    
+class SecurityTC(BaseSecurityTC):
+    
+    def setUp(self):
+        BaseSecurityTC.setUp(self)
+        # implicitly test manager can add some entities
+        self.execute("INSERT Affaire X: X sujet 'cool'")
+        self.execute("INSERT Societe X: X nom 'logilab'")
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute('INSERT EGroup X: X name "staff"')
+        self.commit()
+
+    def test_insert_security(self):
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        cu.execute("INSERT Personne X: X nom 'bidule'")
+        self.assertRaises(Unauthorized, cnx.commit)
+        self.assertEquals(cu.execute('Personne X').rowcount, 1)
+        
+    def test_insert_rql_permission(self):
+        # test user can only add une affaire related to a societe he owns
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("INSERT Affaire X: X sujet 'cool'")
+        self.assertRaises(Unauthorized, cnx.commit)
+        # test nothing has actually been inserted
+        self.restore_connection()
+        self.assertEquals(self.execute('Affaire X').rowcount, 1)
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("INSERT Affaire X: X sujet 'cool'")
+        cu.execute("INSERT Societe X: X nom 'chouette'")
+        cu.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
+        cnx.commit()
+        
+    def test_update_security_1(self):
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        # local security check
+        cu.execute( "SET X nom 'bidulechouette' WHERE X is Personne")
+        self.assertRaises(Unauthorized, cnx.commit)
+        self.restore_connection()
+        self.assertEquals(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
+        
+    def test_update_security_2(self):
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        self.repo.schema['Personne'].set_groups('read', ('users', 'managers'))
+        self.repo.schema['Personne'].set_groups('add', ('guests', 'users', 'managers'))
+        self.assertRaises(Unauthorized, cu.execute, "SET X nom 'bidulechouette' WHERE X is Personne")
+        #self.assertRaises(Unauthorized, cnx.commit)
+        # test nothing has actually been inserted
+        self.restore_connection()
+        self.assertEquals(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
+
+    def test_update_security_3(self):
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("INSERT Personne X: X nom 'biduuule'")
+        cu.execute("INSERT Societe X: X nom 'looogilab'")
+        cu.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
+        
+    def test_update_rql_permission(self):
+        self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        self.commit()
+        # test user can only update une affaire related to a societe he owns
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("SET X sujet 'pascool' WHERE X is Affaire")
+        # this won't actually do anything since the selection query won't return anything
+        cnx.commit()
+        # to actually get Unauthorized exception, try to update an entity we can read
+        cu.execute("SET X nom 'toto' WHERE X is Societe")
+        self.assertRaises(Unauthorized, cnx.commit)        
+        cu.execute("INSERT Affaire X: X sujet 'pascool'")
+        cu.execute("INSERT Societe X: X nom 'chouette'")
+        cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
+        cu.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'")
+        cnx.commit()
+    
+    def test_delete_security(self):
+        # FIXME: sample below fails because we don't detect "owner" can't delete
+        # user anyway, and since no user with login == 'bidule' exists, no
+        # exception is raised
+        #user._groups = {'guests':1}
+        #self.assertRaises(Unauthorized,
+        #                  self.o.execute, user, "DELETE EUser X WHERE X login 'bidule'")
+        # check local security
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        self.assertRaises(Unauthorized, cu.execute, "DELETE EGroup Y WHERE Y name 'staff'")
+        
+    def test_delete_rql_permission(self):
+        self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        self.commit()
+        # test user can only dele une affaire related to a societe he owns
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        # this won't actually do anything since the selection query won't return anything        
+        cu.execute("DELETE Affaire X")
+        cnx.commit()
+        # to actually get Unauthorized exception, try to delete an entity we can read
+        self.assertRaises(Unauthorized, cu.execute, "DELETE Societe S")
+        cu.execute("INSERT Affaire X: X sujet 'pascool'")
+        cu.execute("INSERT Societe X: X nom 'chouette'")
+        cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
+        cnx.commit()
+##         # this one should fail since it will try to delete two affaires, one authorized
+##         # and the other not
+##         self.assertRaises(Unauthorized, cu.execute, "DELETE Affaire X")
+        cu.execute("DELETE Affaire X WHERE X sujet 'pascool'")
+        cnx.commit()
+
+
+    def test_insert_relation_rql_permission(self):
+        cnx = self.login('iaminusersgrouponly')
+        session = self.current_session()
+        cu = cnx.cursor(session)
+        cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        # should raise Unauthorized since user don't own S
+        # though this won't actually do anything since the selection query won't return anything
+        cnx.commit()
+        # to actually get Unauthorized exception, try to insert a relation were we can read both entities
+        rset = cu.execute('Personne P')
+        self.assertEquals(len(rset), 1)
+        ent = rset.get_entity(0, 0)
+        session.set_pool() # necessary
+        self.assertRaises(Unauthorized,
+                          ent.e_schema.check_perm, session, 'update', ent.eid)
+        self.assertRaises(Unauthorized,
+                          cu.execute, "SET P travaille S WHERE P is Personne, S is Societe")
+        # test nothing has actually been inserted:
+        self.assertEquals(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe').rowcount, 0)
+        cu.execute("INSERT Societe X: X nom 'chouette'")
+        cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
+        cnx.commit()
+
+    def test_delete_relation_rql_permission(self):
+        self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        # this won't actually do anything since the selection query won't return anything
+        cu.execute("DELETE A concerne S")
+        cnx.commit()
+        # to actually get Unauthorized exception, try to delete a relation we can read
+        self.restore_connection()
+        eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0]
+        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}, 'x')
+        self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        self.assertRaises(Unauthorized, cu.execute, "DELETE A concerne S")
+        cu.execute("INSERT Societe X: X nom 'chouette'")
+        cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
+        cnx.commit()
+        cu.execute("DELETE A concerne S WHERE S nom 'chouette'")
+
+
+    def test_user_can_change_its_upassword(self):
+        ueid = self.create_user('user')
+        cnx = self.login('user')
+        cu = cnx.cursor()
+        cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
+                   {'x': ueid, 'passwd': 'newpwd'}, 'x')
+        cnx.commit()
+        cnx.close()
+        cnx = self.login('user', 'newpwd')
+
+    def test_user_cant_change_other_upassword(self):
+        ueid = self.create_user('otheruser')
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
+                   {'x': ueid, 'passwd': 'newpwd'}, 'x')
+        self.assertRaises(Unauthorized, cnx.commit)
+
+    # read security test
+    
+    def test_read_base(self):
+        self.schema['Personne'].set_groups('read', ('users', 'managers'))
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        self.assertRaises(Unauthorized,
+                          cu.execute, 'Personne U where U nom "managers"')
+
+    def test_read_erqlexpr(self):
+        eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        rset = cu.execute('Affaire X')
+        self.assertEquals(rset.rows, [])
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+        #  cache test
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+        aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+        soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+        cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        cnx.commit()
+        rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}, 'x')
+        self.assertEquals(rset.rows, [[aff2]])
+        
+    def test_read_erqlexpr_has_text1(self):
+        aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+        card1 = self.execute("INSERT Card X: X title 'cool'")[0][0]
+        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}, 'x')
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        aff2 = cu.execute("INSERT Affaire X: X sujet 'cool', X in_state S WHERE S name 'pitetre'")[0][0]
+        soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+        cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1},
+                   ('a', 's'))
+        cnx.commit()
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}, 'x')
+        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x'))
+        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}, 'x'))
+        rset = cu.execute("Any X WHERE X has_text 'cool'")
+        self.assertEquals(sorted(eid for eid, in rset.rows),
+                          [card1, aff2])
+
+    def test_read_erqlexpr_has_text2(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe X: X nom 'bidule'")
+        self.commit()
+        self.schema['Personne'].set_groups('read', ('managers',))
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        rset = cu.execute('Any N WHERE N has_text "bidule"')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+        rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
+        self.assertEquals(len(rset.rows), 1, rset.rows)        
+
+    def test_read_erqlexpr_optional_rel(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.execute("INSERT Societe X: X nom 'bidule'")
+        self.commit()
+        self.schema['Personne'].set_groups('read', ('managers',))
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
+        self.assertEquals(len(rset.rows), 1, rset.rows)
+
+    def test_read_erqlexpr_aggregat(self):
+        self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        rset = cu.execute('Any COUNT(X) WHERE X is Affaire')
+        self.assertEquals(rset.rows, [[0]])        
+        cu = cnx.cursor()
+        aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+        soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+        cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+        cnx.commit()
+        rset = cu.execute('Any COUNT(X) WHERE X is Affaire')
+        self.assertEquals(rset.rows, [[1]])
+        rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN')
+        values = dict(rset)
+        self.assertEquals(values['Affaire'], 1)
+        self.assertEquals(values['Societe'], 2)
+        rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN WITH X BEING ((Affaire X) UNION (Societe X))')
+        self.assertEquals(len(rset), 2)
+        values = dict(rset)
+        self.assertEquals(values['Affaire'], 1)
+        self.assertEquals(values['Societe'], 2)
+        
+
+    def test_attribute_security(self):
+        # only managers should be able to edit the 'test' attribute of Personne entities
+        eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0]
+        self.commit()
+        self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")
+        self.assertRaises(Unauthorized, cnx.commit)
+        cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test FALSE")
+        self.assertRaises(Unauthorized, cnx.commit)
+        eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0]
+        cnx.commit()
+        cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cnx.commit)
+        cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cnx.commit)
+        cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}, 'x')
+        cnx.commit()
+        cnx.close()
+        
+    def test_attribute_security_rqlexpr(self):
+        # Note.para attribute editable by managers or if the note is in "todo" state
+        eid = self.execute("INSERT Note X: X para 'bidule', X in_state S WHERE S name 'done'")[0][0]
+        self.commit()
+        self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cnx.commit)
+        eid2 = cu.execute("INSERT Note X: X para 'bidule'")[0][0]
+        cnx.commit()
+        cu.execute("SET X in_state S WHERE X eid %(x)s, S name 'done'", {'x': eid2}, 'x')
+        cnx.commit()
+        self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': eid2}, 'x')),
+                          0)
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': eid2}, 'x')
+        self.assertRaises(Unauthorized, cnx.commit)
+        cu.execute("SET X in_state S WHERE X eid %(x)s, S name 'todo'", {'x': eid2}, 'x')
+        cnx.commit()
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': eid2}, 'x')
+        cnx.commit()
+
+    def test_attribute_read_security(self):
+        # anon not allowed to see users'login, but they can see users
+        self.repo.schema['EUser'].set_groups('read', ('guests', 'users', 'managers'))
+        self.repo.schema['login'].set_groups('read', ('users', 'managers'))
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        rset = cu.execute('EUser X')
+        self.failUnless(rset)
+        x = rset.get_entity(0, 0)
+        self.assertEquals(x.login, None)
+        self.failUnless(x.creation_date)
+        x = rset.get_entity(1, 0)
+        x.complete()
+        self.assertEquals(x.login, None)
+        self.failUnless(x.creation_date)
+        cnx.rollback()
+
+        
+class BaseSchemaSecurityTC(BaseSecurityTC):
+    """tests related to the base schema permission configuration"""
+        
+    def test_user_can_delete_object_he_created(self):
+        # even if some other user have changed object'state
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        # due to security test, affaire has to concerne a societe the user owns
+        cu.execute('INSERT Societe X: X nom "ARCTIA"')
+        cu.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"')
+        cnx.commit()
+        self.restore_connection()
+        self.execute('SET X in_state S WHERE X ref "ARCT01", S name "ben non"')
+        self.commit()
+        self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')),
+                          2) 
+        self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",'
+                                           'X owned_by U, U login "admin"')),
+                          1) # TrInfo at the above state change
+        self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",'
+                                           'X owned_by U, U login "iaminusersgrouponly"')),
+                          1) # TrInfo created at creation time
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        cu.execute('DELETE Affaire X WHERE X ref "ARCT01"')
+        cnx.commit()
+        self.failIf(cu.execute('Affaire X'))
+
+    def test_users_and_groups_non_readable_by_guests(self):
+        cnx = self.login('anon')
+        anon = cnx.user(self.current_session())
+        cu = cnx.cursor()
+        # anonymous user can only read itself
+        rset = cu.execute('Any L WHERE X owned_by U, U login L')
+        self.assertEquals(rset.rows, [['anon']])
+        rset = cu.execute('EUser X')
+        self.assertEquals(rset.rows, [[anon.eid]])
+        # anonymous user can read groups (necessary to check allowed transitions for instance)
+        self.assert_(cu.execute('EGroup X'))
+        # should only be able to read the anonymous user, not another one
+        origuser = self.session.user
+        self.assertRaises(Unauthorized, 
+                          cu.execute, 'EUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x')
+        # nothing selected, nothing updated, no exception raised
+        #self.assertRaises(Unauthorized,
+        #                  cu.execute, 'SET X login "toto" WHERE X eid %(x)s',
+        #                  {'x': self.user.eid})
+        
+        rset = cu.execute('EUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x')
+        self.assertEquals(rset.rows, [[anon.eid]])
+        # but can't modify it
+        cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
+        self.assertRaises(Unauthorized, cnx.commit)
+    
+    def test_in_group_relation(self):
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        rql = u"DELETE U in_group G WHERE U login 'admin'"
+        self.assertRaises(Unauthorized, cu.execute, rql)
+        rql = u"SET U in_group G WHERE U login 'admin', G name 'users'"
+        self.assertRaises(Unauthorized, cu.execute, rql)
+
+    def test_owned_by(self):
+        self.execute("INSERT Personne X: X nom 'bidule'")
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        cu = cnx.cursor()
+        rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne"
+        self.assertRaises(Unauthorized, cu.execute, rql)
+        
+    def test_bookmarked_by_guests_security(self):
+        beid1 = self.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0]
+        beid2 = self.execute('INSERT Bookmark B: B path "?vid=index", B title "index", B bookmarked_by U WHERE U login "anon"')[0][0]
+        self.commit()
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        anoneid = self.current_session().user.eid
+        self.assertEquals(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
+                                     'B bookmarked_by U, U eid %s' % anoneid).rows,
+                          [['index', '?vid=index']])
+        self.assertEquals(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
+                                     'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows,
+                          [['index', '?vid=index']])
+        # can read others bookmarks as well
+        self.assertEquals(cu.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows,
+                          [[beid1]])
+        self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U')
+        self.assertRaises(Unauthorized,
+                          cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
+                          {'x': anoneid, 'b': beid1}, 'x')
+        
+
+    def test_ambigous_ordered(self):
+        cnx = self.login('anon')
+        cu = cnx.cursor()
+        names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')]
+        self.assertEquals(names, sorted(names, key=lambda x: x.lower()))
+
+    def test_in_state_without_update_perm(self):
+        """check a user change in_state without having update permission on the
+        subject
+        """
+        eid = self.execute('INSERT Affaire X: X ref "ARCT01"')[0][0]
+        self.commit()
+        cnx = self.login('iaminusersgrouponly')
+        session = self.current_session()
+        # needed to avoid check_perm error
+        session.set_pool()
+        # needed to remove rql expr granting update perm to the user
+        self.schema['Affaire'].set_rqlexprs('update', ()) 
+        self.assertRaises(Unauthorized,
+                          self.schema['Affaire'].check_perm, session, 'update', eid)
+        cu = cnx.cursor()
+        cu.execute('SET X in_state S WHERE X ref "ARCT01", S name "abort"')
+        cnx.commit()
+        # though changing a user state (even logged user) is reserved to managers
+        rql = u"SET X in_state S WHERE X eid %(x)s, S name 'deactivated'"
+        # XXX wether it should raise Unauthorized or ValidationError is not clear
+        # the best would probably ValidationError if the transition doesn't exist
+        # from the current state but Unauthorized if it exists but user can't pass it
+        self.assertRaises(ValidationError, cu.execute, rql, {'x': cnx.user(self.current_session()).eid}, 'x')
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_session.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,29 @@
+from logilab.common.testlib import TestCase, unittest_main, mock_object
+
+from cubicweb.server.session import _make_description
+
+class Variable:
+    def __init__(self, name):
+        self.name = name
+        self.children = []
+        
+    def get_type(self, solution, args=None):
+        return solution[self.name]
+    def as_string(self):
+        return self.name
+    
+class Function:
+    def __init__(self, name, varname):
+        self.name = name
+        self.children = [Variable(varname)]
+    def get_type(self, solution, args=None):
+        return 'Int'
+
+class MakeDescriptionTC(TestCase):
+    def test_known_values(self):
+        solution = {'A': 'Int', 'B': 'EUser'}
+        self.assertEquals(_make_description((Function('max', 'A'), Variable('B')), {}, solution),
+                          ['Int','EUser'])
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_sqlutils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,32 @@
+"""unit tests for module cubicweb.server.sqlutils
+"""
+
+import sys
+from mx.DateTime import now
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.server.sqlutils import *
+
+BASE_CONFIG = {
+    'db-driver' : 'Postgres',
+    'db-host'   : 'crater',
+    'db-name'   : 'cubicweb2_test',
+    'db-user'   : 'toto',
+    'db-upassword' : 'toto',
+    }
+
+class SQLAdapterMixInTC(TestCase):
+
+    def test_init(self):
+        o = SQLAdapterMixIn(BASE_CONFIG)
+        self.assertEquals(o.encoding, 'UTF-8')
+        
+    def test_init_encoding(self):
+        config = BASE_CONFIG.copy()
+        config['db-encoding'] = 'ISO-8859-1'
+        o = SQLAdapterMixIn(config)
+        self.assertEquals(o.encoding, 'ISO-8859-1')
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_ssplanner.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,58 @@
+from cubicweb.devtools import init_test_database
+from cubicweb.devtools.repotest import BasePlannerTC, test_plan
+from cubicweb.server.ssplanner import SSPlanner
+
+# keep cnx so it's not garbage collected and the associated session is closed
+repo, cnx = init_test_database('sqlite')
+
+class SSPlannerTC(BasePlannerTC):
+    repo = repo
+    _test = test_plan
+    
+    def setUp(self):
+        BasePlannerTC.setUp(self)
+        self.planner = SSPlanner(self.o.schema, self.o._rqlhelper)
+        self.system = self.o._repo.system_source
+
+    def tearDown(self):
+        BasePlannerTC.tearDown(self)
+
+    def test_ordered_ambigous_sol(self):
+        self._test('Any XN ORDERBY XN WHERE X name XN',
+                   [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN',
+                                       [{'X': 'Basket', 'XN': 'String'},
+                                        {'X': 'EConstraintType', 'XN': 'String'},
+                                        {'X': 'EEType', 'XN': 'String'},
+                                        {'X': 'EGroup', 'XN': 'String'},
+                                        {'X': 'EPermission', 'XN': 'String'},
+                                        {'X': 'ERType', 'XN': 'String'},
+                                        {'X': 'File', 'XN': 'String'},
+                                        {'X': 'Folder', 'XN': 'String'},
+                                        {'X': 'Image', 'XN': 'String'},
+                                        {'X': 'State', 'XN': 'String'},
+                                        {'X': 'Tag', u'XN': 'String'},
+                                        {'X': 'Transition', 'XN': 'String'}])],
+                     None, None, 
+                     [self.system], None, [])])
+    
+    def test_groupeded_ambigous_sol(self):
+        self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN',
+                   [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN',
+                                       [{'X': 'Basket', 'XN': 'String'},
+                                        {'X': 'EConstraintType', 'XN': 'String'},
+                                        {'X': 'EEType', 'XN': 'String'},
+                                        {'X': 'EGroup', 'XN': 'String'},
+                                        {'X': 'EPermission', 'XN': 'String'},
+                                        {'X': 'ERType', 'XN': 'String'},
+                                        {'X': 'File', 'XN': 'String'},
+                                        {'X': 'Folder', 'XN': 'String'},
+                                        {'X': 'Image', 'XN': 'String'},
+                                        {'X': 'State', 'XN': 'String'},
+                                        {'X': 'Tag', u'XN': 'String'},
+                                        {'X': 'Transition', 'XN': 'String'}])],
+                     None, None, 
+                     [self.system], None, [])])
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_tools.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+from logilab.common.testlib import TestCase, unittest_main
+
+class ImportTC(TestCase):
+    def test(self):
+        # the minimal test: module is importable...
+        import cubicweb.server.server
+        import cubicweb.server.checkintegrity
+        import cubicweb.server.serverctl
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/utils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,138 @@
+"""Some utilities for the CubicWeb server.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import string
+from threading import Timer, Thread
+from getpass import getpass
+from random import choice
+
+try:
+    from crypt import crypt
+except ImportError:
+    # crypt is not available (eg windows)
+    from cubicweb.md5crypt import crypt
+
+
+def getsalt(chars=string.letters + string.digits):
+    """generate a random 2-character 'salt'"""
+    return choice(chars) + choice(chars)
+
+
+def crypt_password(passwd, salt=None):
+    """return the encrypted password using the given salt or a generated one
+    """
+    if passwd is None:
+        return None
+    if salt is None:
+        salt = getsalt()
+    return crypt(passwd, salt)
+
+
+def cartesian_product(seqin):
+    """returns a generator which returns the cartesian product of `seqin`
+
+    for more details, see :
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302478
+    """
+    def rloop(seqin, comb):
+        """recursive looping function"""
+        if seqin:                   # any more sequences to process?
+            for item in seqin[0]:
+                newcomb = comb + [item]     # add next item to current combination
+                # call rloop w/ remaining seqs, newcomb
+                for item in rloop(seqin[1:], newcomb):   
+                    yield item          # seqs and newcomb
+        else:                           # processing last sequence
+            yield comb                  # comb finished, add to list
+    return rloop(seqin, [])
+
+
+def cleanup_solutions(rqlst, solutions):
+    for sol in solutions:
+        for vname in sol.keys():
+            if not (vname in rqlst.defined_vars or vname in rqlst.aliases):
+                del sol[vname]
+
+
+DEFAULT_MSG = 'we need a manager connection on the repository \
+(the server doesn\'t have to run, even should better not)'
+
+def manager_userpasswd(user=None, passwd=None, msg=DEFAULT_MSG, confirm=False):
+    if not user:
+        print msg
+        while not user:
+            user = raw_input('login: ')
+        passwd = getpass('password: ')
+        if confirm:
+            while True:
+                passwd2 = getpass('confirm password: ')
+                if passwd == passwd2:
+                    break
+                print 'password doesn\'t match'
+                passwd = getpass('password: ')
+        user = unicode(user, sys.stdin.encoding)
+    elif not passwd:
+        assert not confirm
+        passwd = getpass('password for %s: ' % user)
+    # XXX decode password using stdin encoding then encode it using appl'encoding
+    return user, passwd
+
+
+class LoopTask(object):
+    """threaded task restarting itself once executed"""
+    def __init__(self, interval, func):
+        self.interval = interval
+        def auto_restart_func(self=self, func=func):
+            try:
+                func()
+            finally:
+                self.start()
+        self.func = auto_restart_func
+        self.name = func.__name__
+        
+    def start(self):
+        self._t = Timer(self.interval, self.func)
+        self._t.start()
+
+    def cancel(self):
+        self._t.cancel()
+
+    def join(self):
+        self._t.join()
+
+
+class RepoThread(Thread):
+    """subclass of thread so it auto remove itself from a given list once
+    executed
+    """
+    def __init__(self, target, running_threads):
+        def auto_remove_func(self=self, func=target):
+            try:
+                func()
+            finally:
+                self.running_threads.remove(self)
+        Thread.__init__(self, target=target)
+        self.running_threads = running_threads
+        self._name = target.__name__
+        
+    def start(self):
+        self.running_threads.append(self)
+        Thread.start(self)
+
+    @property
+    def name(self):
+        return '%s(%s)' % (self._name, Thread.getName(self))
+
+
+from logilab.common.deprecation import class_moved
+from cubicweb.server import pool
+Operation = class_moved(pool.Operation)
+PreCommitOperation = class_moved(pool.PreCommitOperation)
+LateOperation = class_moved(pool.LateOperation)
+SingleLastOperation = class_moved(pool.SingleLastOperation)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/setup.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+# pylint: disable-msg=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
+#
+# Copyright (c) 2003 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+""" Generic Setup script, takes package info from __pkginfo__.py file """
+
+import os
+import sys
+import shutil
+from distutils.core import setup
+from distutils.command import install_lib
+from os.path import isdir, exists, join, walk
+
+# import required features
+from __pkginfo__ import modname, version, license, short_desc, long_desc, \
+     web, author, author_email
+# import optional features
+try:
+    from __pkginfo__ import distname
+except ImportError:
+    distname = modname
+try:
+    from __pkginfo__ import scripts
+except ImportError:
+    scripts = []
+try:
+    from __pkginfo__ import data_files
+except ImportError:
+    data_files = None
+try:
+    from __pkginfo__ import subpackage_of
+except ImportError:
+    subpackage_of = None
+try:
+    from __pkginfo__ import include_dirs
+except ImportError:
+    include_dirs = []
+try:
+    from __pkginfo__ import ext_modules
+except ImportError:
+    ext_modules = None
+
+BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
+    
+
+def ensure_scripts(linux_scripts):
+    """
+    Creates the proper script names required for each platform
+    (taken from 4Suite)
+    """
+    from distutils import util
+    if util.get_platform()[:3] == 'win':
+        scripts_ = [script + '.bat' for script in linux_scripts]
+    else:
+        scripts_ = linux_scripts
+    return scripts_
+
+
+def get_packages(directory, prefix):
+    """return a list of subpackages for the given directory
+    """
+    result = []
+    for package in os.listdir(directory):
+        absfile = join(directory, package)
+        if isdir(absfile):
+            if exists(join(absfile, '__init__.py')) or \
+                   package in ('test', 'tests'):
+                if prefix:
+                    result.append('%s.%s' % (prefix, package))
+                else:
+                    result.append(package)
+                result += get_packages(absfile, result[-1])
+    return result
+
+def export(from_dir, to_dir,
+           blacklist=BASE_BLACKLIST,
+           ignore_ext=IGNORED_EXTENSIONS):
+    """make a mirror of from_dir in to_dir, omitting directories and files
+    listed in the black list
+    """
+    def make_mirror(arg, directory, fnames):
+        """walk handler"""
+        for norecurs in blacklist:
+            try:
+                fnames.remove(norecurs)
+            except ValueError:
+                pass
+        for filename in fnames:
+            # don't include binary files
+            if filename[-4:] in ignore_ext:
+                continue
+            if filename[-1] == '~':
+                continue
+            src = '%s/%s' % (directory, filename)
+            dest = to_dir + src[len(from_dir):]
+            print >> sys.stderr, src, '->', dest
+            if os.path.isdir(src):
+                if not exists(dest):
+                    os.mkdir(dest)
+            else:
+                if exists(dest):
+                    os.remove(dest)
+                shutil.copy2(src, dest)
+    try:
+        os.mkdir(to_dir)
+    except OSError, ex:
+        # file exists ?
+        import errno
+        if ex.errno != errno.EEXIST:
+            raise
+    walk(from_dir, make_mirror, None)
+
+
+EMPTY_FILE = '"""generated file, don\'t modify or your data will be lost"""\n'
+
+class MyInstallLib(install_lib.install_lib):
+    """extend install_lib command to handle  package __init__.py and
+    include_dirs variable if necessary
+    """
+    def run(self):
+        """overridden from install_lib class"""
+        install_lib.install_lib.run(self)
+        # create Products.__init__.py if needed
+        if subpackage_of:
+            product_init = join(self.install_dir, subpackage_of, '__init__.py')
+            if not exists(product_init):
+                self.announce('creating %s' % product_init)
+                stream = open(product_init, 'w')
+                stream.write(EMPTY_FILE)
+                stream.close()
+        # manually install included directories if any
+        if include_dirs:
+            if subpackage_of:
+                base = join(subpackage_of, modname)
+            else:
+                base = modname
+            for directory in include_dirs:
+                dest = join(self.install_dir, base, directory)
+                export(directory, dest)
+        
+def install(**kwargs):
+    """setup entry point"""
+    if subpackage_of:
+        package = subpackage_of + '.' + modname
+        kwargs['package_dir'] = {package : '.'}
+        packages = [package] + get_packages(os.getcwd(), package)
+    else:
+        kwargs['package_dir'] = {modname : '.'}
+        packages = [modname] + get_packages(os.getcwd(), modname)
+    kwargs['packages'] = packages
+    return setup(name = distname,
+                 version = version,
+                 license =license,
+                 description = short_desc,
+                 long_description = long_desc,
+                 author = author,
+                 author_email = author_email,
+                 url = web,
+                 scripts = ensure_scripts(scripts),
+                 data_files=data_files,
+                 ext_modules=ext_modules,
+                 cmdclass={'install_lib': MyInstallLib},
+                 **kwargs
+                 )
+            
+if __name__ == '__main__' :
+    install()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/MANIFEST.in	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+include *.py
+include ChangeLog
+
+recursive-include views *.py
+recursive-include entities *.py
+recursive-include sobjects *.py
+recursive-include schema Include *.py *.sql.postgres 
+recursive-include data external_resources *.gif *.png *.css *.ico *.js
+recursive-include i18n *.pot *.po
+recursive-include migration *.sql *.py depends.map
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/__init__.py.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+"""cubicweb-%(cubename)s application package
+
+%(longdesc)s
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/__pkginfo__.py.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,65 @@
+# pylint: disable-msg=W0622
+"""%(distname)s application packaging information"""
+
+distname = '%(distname)s'
+
+numversion = (0, 1, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = 'LCL'
+copyright = '''Copyright (c) 2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
+
+author = 'Logilab'
+author_email = 'contact@logilab.fr'
+
+short_desc = '%(shortdesc)s'
+long_desc = '''%(longdesc)s'''
+
+from os import listdir as _listdir
+from os.path import join, isdir
+
+from glob import glob
+scripts = glob(join('bin', '%(cubename)s-*'))
+
+web, ftp = '', ''
+
+pyversions = ['2.4']
+
+#from cubicweb.devtools.pkginfo import get_distutils_datafiles
+CUBES_DIR = join('share', 'cubicweb', 'cubes')
+THIS_CUBE_DIR = join(CUBES_DIR, '%(cubename)s')
+
+def listdir(dirpath):
+    return [join(dirpath, fname) for fname in _listdir(dirpath)
+            if fname[0] != '.' and not fname.endswith('.pyc')
+            and not fname.endswith('~')]
+
+
+try:
+    data_files = [
+        # common files
+        [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']],
+        
+        # client (web) files
+        [join(THIS_CUBE_DIR, 'data'),  listdir('data')],
+        [join(THIS_CUBE_DIR, 'i18n'),  listdir('i18n')],
+        
+        # server files
+        [join(THIS_CUBE_DIR, 'migration'), listdir('migration')],
+        ]
+    
+    # check for possible extended cube layout
+    for dirname in ('entities', 'views', 'sobjects', 'schema'):
+        if isdir(dirname):
+            data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)])
+    # Note: here, you'll need to add subdirectories if you want
+    # them to be included in the debian package
+except OSError:
+    # we are in an installed directory
+    pass
+
+
+cube_eid = None # <=== FIXME if you need direct bug-subscription
+__use__ = (%(dependancies)s)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/data/cubes.CUBENAME.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+@import url("cubicweb.css");
+
+/* template specific CSS */
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/data/cubes.CUBENAME.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+// This contains template-specific javascript
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/data/external_resources.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+# -*- shell-script -*-
+###############################################################################
+#
+# put here information about external resources used by your components,
+# or to overides existing external resources configuration
+#
+###############################################################################
+
+# CSS stylesheets to include in HTML headers
+# uncomment the line below to use template specific stylesheet
+# STYLESHEETS = DATADIR/cubes.%(cubename)s.css
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/DISTNAME.prerm.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+#!/bin/sh -e
+
+delete_pyo_pyc () {
+  find /usr/share/cubicweb/cubes/%(cubename)s -name "*.pyc" | xargs rm -f
+  find /usr/share/cubicweb/cubes/%(cubename)s -name "*.pyo" | xargs rm -f
+}
+
+
+case "$1" in
+    failed-upgrade|abort-install|abort-upgrade|disappear)
+    ;;
+    upgrade)
+    delete_pyo_pyc
+    ;;
+    remove)
+    delete_pyo_pyc
+    ;;
+    purge)
+    ;;
+
+    *)
+        echo "postrm called with unknown argument \`$1'" >&2
+        exit 1
+
+esac
+
+#DEBHELPER#
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/changelog.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+%(distname)s (0.1.0-1) unstable; urgency=low
+
+  * initial release
+
+ -- 
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/compat	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+5
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/control.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,19 @@
+Source: %(distname)s
+Section: web
+Priority: optional
+Maintainer: Logilab Packaging Team <contact@logilab.fr>
+Uploaders: Sylvain Thenault <sylvain.thenault@logilab.fr> 
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4)
+Standards-Version: 3.8.0
+
+
+Package: %(distname)s
+Architecture: all
+Depends: cubicweb-common (>= %(version)s)
+Description: %(shortdesc)s
+ CubicWeb is a semantic web application framework.
+ .
+ %(longdesc)s
+ .
+ This package will install all the components you need to run the
+ %(distname)s application (cube :)..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/copyright.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+This package was debianized by Logilab <contact@logilab.fr>
+
+Upstream Author: 
+
+  Logilab <contact@logilab.fr>
+
+Copyright:
+
+Copyright (c) %(year)s LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Logilab Closed source License. This code is *NOT* open-source. Usage of this
+code is subject to a licence agreement. If you want to use it, you should
+contact logilab's sales service at commercial@logilab.fr .
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/debian/rules.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,51 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+build: build-stamp
+build-stamp: 
+	dh_testdir
+	python setup.py -q build
+	touch build-stamp
+
+clean: 
+	dh_testdir
+	dh_testroot
+	rm -f build-stamp configure-stamp
+	rm -rf build
+	find . -name "*.pyc" | xargs rm -f
+	dh_clean
+
+install: build
+	dh_testdir
+	dh_testroot
+	dh_clean -k
+	dh_installdirs -i
+	python setup.py -q install --no-compile --prefix=debian/%(distname)s/usr/
+
+
+# Build architecture-independent files here.
+binary-indep: build install
+	dh_testdir
+	dh_testroot
+	dh_install -i
+	dh_installchangelogs -i
+	dh_installexamples -i
+	dh_installdocs -i
+	dh_installman -i
+	dh_link -i
+	dh_compress -i -X.py -X.ini -X.xml -Xtest
+	dh_fixperms -i
+	dh_installdeb -i
+	dh_gencontrol -i 
+	dh_md5sums -i
+	dh_builddeb -i
+
+
+# Build architecture-dependent files here.
+binary-arch: 
+
+binary: binary-indep 
+.PHONY: build clean binary-arch binary-indep binary
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/entities.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""this contains the cube-specific entities' classes"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/i18n/en.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+msgid ""
+msgstr ""
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/i18n/fr.po	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+msgid ""
+msgstr ""
+"Project-Id-Version: 2.0\n"
+"POT-Creation-Date: 2006-01-12 17:35+CET\n"
+"PO-Revision-Date: 2008-02-15 12:55+0100\n"
+"Last-Translator: Logilab\n"
+"Language-Team: French <devel@logilab.fr.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/postcreate.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+# postcreate script. You could setup a workflow here for example
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/precreate.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+# Instructions here will be read before reading the schema
+# You could create your own groups here, like in :
+#   add_entity('EGroup', name=u'mygroup')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+# cube's specific schema
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/setup.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152
+# Copyright (c) 2003-2004 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+""" Generic Setup script, takes package info from __pkginfo__.py file """
+
+import os
+import sys
+import shutil
+from distutils.core import setup
+from distutils import command
+from distutils.command import install_lib
+from os.path import isdir, exists, join, walk
+
+# import required features
+from __pkginfo__ import distname, version, license, short_desc, long_desc, \
+     web, author, author_email
+try:
+    from __pkginfo__ import scripts
+except ImportError:
+    scripts = []
+try:
+    from __pkginfo__ import data_files
+except ImportError:
+    data_files = None
+    
+def ensure_scripts(linux_scripts):
+    """creates the proper script names required for each platform
+    (taken from 4Suite)
+    """
+    from distutils import util
+    if util.get_platform()[:3] == 'win':
+        scripts_ = [script + '.bat' for script in linux_scripts]
+    else:
+        scripts_ = linux_scripts
+    return scripts_
+
+def install(**kwargs):
+    """setup entry point"""
+    return setup(name=distname,
+                 version=version,
+                 license =license,
+                 description=short_desc,
+                 long_description=long_desc,
+                 author=author,
+                 author_email=author_email,
+                 url=web,
+                 scripts=ensure_scripts(scripts),
+                 data_files=data_files,
+                 **kwargs)
+            
+if __name__ == '__main__' :
+    install()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/site_cubicweb.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+"""this is where you could register procedures for instance"""
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/sobjects.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+"""this contains the server-side objects"""
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/test/data/bootstrap_cubes.tmpl	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+%(cubename)s
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/test/pytestconf.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,37 @@
+import os
+import pwd
+
+from logilab.common.pytest import PyTester
+
+def getlogin():
+    """avoid usinng os.getlogin() because of strange tty / stdin problems
+    (man 3 getlogin)
+    Another solution would be to use $LOGNAME, $USER or $USERNAME
+    """
+    return pwd.getpwuid(os.getuid())[0]
+
+
+def update_parser(parser):
+    login = getlogin()
+    parser.add_option('-r', '--rebuild-database', dest='rebuild_db',
+                      default=False, action="store_true",
+                      help="remove tmpdb and rebuilds the test database")
+    parser.add_option('-u', '--dbuser', dest='dbuser', action='store',
+                      default=login, help="database user")
+    parser.add_option('-w', '--dbpassword', dest='dbpassword', action='store',
+                      default=login, help="database name")
+    parser.add_option('-n', '--dbname', dest='dbname', action='store',
+                      default=None, help="database name")
+    parser.add_option('--euser', dest='euser', action='store',
+                      default=login, help="esuer name")
+    parser.add_option('--epassword', dest='epassword', action='store',
+                      default=login, help="euser's password' name")
+    return parser
+
+
+class CustomPyTester(PyTester):
+    def __init__(self, cvg, options):
+        super(CustomPyTester, self).__init__(cvg, options)
+        if options.rebuild_db:
+            os.unlink('tmpdb')
+            os.unlink('tmpdb-template')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/test/realdb_test_CUBENAME.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,31 @@
+from cubicweb.devtools import buildconfig, loadconfig
+from cubicweb.devtools.testlib import RealDBTest
+
+def setup_module(options):
+    if options.source:
+        configcls = loadconfig(options.source)
+    elif options.dbname is None:
+        raise Exception('either <sourcefile> or <dbname> options are required')
+    else:
+        configcls = buildconfig(options.dbuser, options.dbpassword,
+                                               options.dbname, options.euser,
+                                               options.epassword)
+    RealDatabaseTC.configcls = configcls
+
+class RealDatabaseTC(RealDBTest):
+    configcls = None # set by setup_module()
+
+    def test_all_primaries(self):
+        for rset in self.iter_individual_rsets(limit=50):
+            yield self.view, 'primary', rset, rset.req.reset_headers()
+    
+    ## startup views
+    def test_startup_views(self):
+        for vid in self.list_startup_views():
+            req = self.request()
+            yield self.view, vid, None, req
+
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/test/test_CUBENAME.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+"""template automatic tests"""
+
+from logilab.common.testlib import TestCase, unittest_main
+
+class DefaultTC(TestCase):
+    def test_something(self):
+        self.skip('this cube has no test')
+
+## uncomment the import if you want to activate automatic test for your
+## template
+
+# from cubicweb.devtools.testlib import AutomaticWebTest
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""cube-specific forms/views/actions/components"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+"""server side objects"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/email.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,63 @@
+"""hooks to ensure use_email / primary_email relations consistency
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.server.hooksmanager import Hook
+from cubicweb.server.pool import PreCommitOperation
+
+class SetUseEmailRelationOp(PreCommitOperation):
+    """delay this operation to commit to avoid conflict with a late rql query
+    already setting the relation
+    """
+    rtype = 'use_email'
+
+    def condition(self):
+        """check entity has use_email set for the email address"""
+        return not self.session.unsafe_execute(
+            'Any X WHERE X eid %(x)s, X use_email Y, Y eid %(y)s',
+            {'x': self.fromeid, 'y': self.toeid}, 'x')
+    
+    def precommit_event(self):
+        session = self.session
+        if self.condition():
+            session.unsafe_execute(
+                'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype,
+                {'x': self.fromeid, 'y': self.toeid}, 'x')
+    
+class SetPrimaryEmailRelationOp(SetUseEmailRelationOp):
+    rtype = 'primary_email'
+    
+    def condition(self):
+        """check entity has no primary_email set"""
+        return not self.session.unsafe_execute(
+            'Any X WHERE X eid %(x)s, X primary_email Y',
+            {'x': self.fromeid}, 'x')
+
+    
+class SetPrimaryEmailHook(Hook):
+    """notify when a bug or story or version has its state modified"""
+    events = ('after_add_relation',)
+    accepts = ('use_email',)
+    
+    def call(self, session, fromeid, rtype, toeid):
+        subjtype = session.describe(fromeid)[0]
+        eschema = self.vreg.schema[subjtype]
+        if 'primary_email' in eschema.subject_relations():
+            SetPrimaryEmailRelationOp(session, vreg=self.vreg, 
+                                      fromeid=fromeid, toeid=toeid)
+
+class SetUseEmailHook(Hook):
+    """notify when a bug or story or version has its state modified"""
+    events = ('after_add_relation',)
+    accepts = ('primary_email',)
+    
+    def call(self, session, fromeid, rtype, toeid):
+        subjtype = session.describe(fromeid)[0]
+        eschema = self.vreg.schema[subjtype]
+        if 'use_email' in eschema.subject_relations():
+            SetUseEmailRelationOp(session, vreg=self.vreg, 
+                                  fromeid=fromeid, toeid=toeid)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,37 @@
+"""various library content hooks
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.server.hooksmanager import Hook
+from cubicweb.server.pool import PreCommitOperation
+
+class AddUpdateEUserHook(Hook):
+    """ensure user logins are stripped"""
+    events = ('before_add_entity', 'before_update_entity',)
+    accepts = ('EUser',)
+    
+    def call(self, session, entity):
+        if 'login' in entity and entity['login']:
+            entity['login'] = entity['login'].strip()
+
+
+class AutoDeleteBookmark(PreCommitOperation):
+    def precommit_event(self):
+        session = self.session
+        if not self.beid in session.query_data('pendingeids', ()):
+            if not session.unsafe_execute('Any X WHERE X bookmarked_by U, X eid %(x)s',
+                                          {'x': self.beid}, 'x'):
+                session.unsafe_execute('DELETE Bookmark X WHERE X eid %(x)s',
+                                       {'x': self.beid}, 'x')
+        
+class DelBookmarkedByHook(Hook):
+    """ensure user logins are stripped"""
+    events = ('after_delete_relation',)
+    accepts = ('bookmarked_by',)
+    
+    def call(self, session, subj, rtype, obj):
+        AutoDeleteBookmark(session, beid=subj)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/notification.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,305 @@
+"""some hooks and views to handle notification on entity's changes
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from base64 import b64encode, b64decode
+from itertools import repeat
+from time import time
+try:
+    from socket import gethostname
+except ImportError:
+    def gethostname():
+        return 'XXX'
+
+from logilab.common.textutils import normalize_text
+
+from cubicweb import RegistryException
+from cubicweb.common.view import EntityView
+from cubicweb.common.appobject import Component
+from cubicweb.common.registerers import accepts_registerer
+from cubicweb.common.selectors import accept_selector
+from cubicweb.common.mail import format_mail
+
+from cubicweb.server.pool import PreCommitOperation
+from cubicweb.server.hookhelper import SendMailOp
+from cubicweb.server.hooksmanager import Hook
+
+_ = unicode
+
+class RecipientsFinder(Component):
+    """this component is responsible to find recipients of a notification
+
+    by default user's with their email set are notified if any, else the default
+    email addresses specified in the configuration are used
+    """
+    id = 'recipients_finder'
+    __registerer__ = accepts_registerer
+    __selectors__ = (accept_selector,)
+    accepts = ('Any',)
+    user_rql = ('Any X,E,A WHERE X is EUser, X in_state S, S name "activated",'
+                'X primary_email E, E address A')
+    
+    def recipients(self):
+        mode = self.config['default-recipients-mode']
+        if mode == 'users':
+            # use unsafe execute else we may don't have the right to see users
+            # to notify...
+            execute = self.req.unsafe_execute
+            dests = [(u.get_email(), u.property_value('ui.language'))
+                     for u in execute(self.user_rql, build_descr=True, propagate=True).entities()]
+        elif mode == 'default-dest-addrs':
+            lang = self.vreg.property_value('ui.language')
+            dests = zip(self.config['default-dest-addrs'], repeat(lang))
+        else: # mode == 'none'
+            dests = []
+        return dests
+
+    
+# hooks #######################################################################
+
+class RenderAndSendNotificationView(PreCommitOperation):
+    """delay rendering of notification view until precommit"""
+    def precommit_event(self):
+        if self.view.rset[0][0] in self.session.query_data('pendingeids', ()):
+            return # entity added and deleted in the same transaction
+        self.view.render_and_send(**getattr(self, 'viewargs', {}))
+        
+class StatusChangeHook(Hook):
+    """notify when a workflowable entity has its state modified"""
+    events = ('after_add_entity',)
+    accepts = ('TrInfo',)
+    
+    def call(self, session, entity):
+        if not entity.from_state: # not a transition
+            return
+        rset = entity.related('wf_info_for')
+        try:
+            view = session.vreg.select_view('notif_status_change',
+                                            session, rset, row=0)
+        except RegistryException:
+            return
+        comment = entity.printable_value('comment', format='text/plain')
+        if comment:
+            comment = normalize_text(comment, 80,
+                                     rest=entity.comment_format=='text/rest')
+        RenderAndSendNotificationView(session, view=view, viewargs={
+            'comment': comment, 'previous_state': entity.previous_state.name,
+            'current_state': entity.new_state.name})
+
+
+class RelationChangeHook(Hook):
+    events = ('before_add_relation', 'after_add_relation',
+              'before_delete_relation', 'after_delete_relation')
+    accepts = ('Any',)
+    def call(self, session, fromeid, rtype, toeid):
+        """if a notification view is defined for the event, send notification
+        email defined by the view
+        """
+        rset = session.eid_rset(fromeid)
+        vid = 'notif_%s_%s' % (self.event,  rtype)
+        try:
+            view = session.vreg.select_view(vid, session, rset, row=0)
+        except RegistryException:
+            return
+        RenderAndSendNotificationView(session, view=view)
+
+
+class EntityChangeHook(Hook):
+    events = ('after_add_entity',
+              'after_update_entity')
+    accepts = ('Any',)
+    def call(self, session, entity):
+        """if a notification view is defined for the event, send notification
+        email defined by the view
+        """
+        rset = entity.as_rset()
+        vid = 'notif_%s' % self.event
+        try:
+            view = session.vreg.select_view(vid, session, rset, row=0)
+        except RegistryException:
+            return
+        RenderAndSendNotificationView(session, view=view)
+
+
+# abstract or deactivated notification views and mixin ########################
+
+class NotificationView(EntityView):
+    """abstract view implementing the email API
+
+    all you have to do by default is :
+    * set id and accepts attributes to match desired events and entity types
+    * set a content attribute to define the content of the email (unless you
+      override call)
+    """
+    accepts = ()
+    id = None
+    msgid_timestamp = True
+    
+    def recipients(self):
+        finder = self.vreg.select_component('recipients_finder',
+                                            req=self.req, rset=self.rset)
+        return finder.recipients()
+        
+    def subject(self):
+        entity = self.entity(0, 0)
+        subject = self.req._(self.message)
+        etype = entity.dc_type()
+        eid = entity.eid
+        login = self.user_login()
+        return self.req._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals()
+
+    def user_login(self):
+        # req is actually a session (we are on the server side), and we have to
+        # prevent nested internal session
+        return self.req.actual_session().user.login
+    
+    def context(self, **kwargs):
+        entity = self.entity(0, 0)
+        for key, val in kwargs.iteritems():
+            if val and val.strip():
+                kwargs[key] = self.req._(val)
+        kwargs.update({'user': self.user_login(),
+                       'eid': entity.eid,
+                       'etype': entity.dc_type(),
+                       'url': entity.absolute_url(),
+                       'title': entity.dc_long_title(),})
+        return kwargs
+    
+    def cell_call(self, row, col=0, **kwargs):
+        self.w(self.req._(self.content) % self.context(**kwargs))
+
+    def construct_message_id(self, eid):
+        return construct_message_id(self.config.appid, eid, self.msgid_timestamp)
+
+    def render_and_send(self, **kwargs):
+        """generate and send an email message for this view"""
+        self._kwargs = kwargs
+        recipients = self.recipients()
+        if not recipients:
+            self.info('skipping %s%s notification which has no recipients',
+                      self.id, self.accepts)
+            return
+        if not isinstance(recipients[0], tuple):
+            from warnings import warn
+            warn('recipients should now return a list of 2-uple (email, language)',
+                 DeprecationWarning, stacklevel=1)
+            lang = self.vreg.property_value('ui.language')
+            recipients = zip(recipients, repeat(lang))
+        entity = self.entity(0, 0)
+        # if the view is using timestamp in message ids, no way to reference
+        # previous email
+        if not self.msgid_timestamp:
+            refs = [self.construct_message_id(eid)
+                    for eid in entity.notification_references(self)]
+        else:
+            refs = ()
+        msgid = self.construct_message_id(entity.eid)
+        userdata = self.req.user_data()
+        origlang = self.req.lang
+        for emailaddr, lang in recipients:
+            self.req.set_language(lang)
+            # since the same view (eg self) may be called multiple time and we
+            # need a fresh stream at each iteration, reset it explicitly
+            self.w = None
+            # call dispatch before subject to set .row/.col attributes on the view :/
+            content = self.dispatch(row=0, col=0, **kwargs)
+            subject = self.subject()
+            msg = format_mail(userdata, [emailaddr], content, subject,
+                              config=self.config, msgid=msgid, references=refs)
+            self.send([emailaddr], msg)
+        # restore language
+        self.req.set_language(origlang)
+
+    def send(self, recipients, msg):
+        SendMailOp(self.req, recipients=recipients, msg=msg)
+
+
+def construct_message_id(appid, eid, withtimestamp=True):
+    if withtimestamp:
+        addrpart = 'eid=%s&timestamp=%.10f' % (eid, time())
+    else:
+        addrpart = 'eid=%s' % eid
+    # we don't want any equal sign nor trailing newlines
+    leftpart = b64encode(addrpart, '.-').rstrip().rstrip('=')
+    return '<%s@%s.%s>' % (leftpart, appid, gethostname())
+
+
+def parse_message_id(msgid, appid):
+    if msgid[0] == '<':
+        msgid = msgid[1:]
+    if msgid[-1] == '>':
+        msgid = msgid[:-1]
+    try:
+        values, qualif = msgid.split('@')
+        padding = len(values) % 4
+        values = b64decode(str(values + '='*padding), '.-')
+        values = dict(v.split('=') for v in values.split('&'))
+        fromappid, host = qualif.split('.', 1)
+    except:
+        return None
+    if appid != fromappid or host != gethostname():
+        return None
+    return values
+    
+
+class StatusChangeMixIn(object):
+    id = 'notif_status_change'
+    msgid_timestamp = True
+    message = _('status changed')
+    content = _("""
+%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for entity
+'%(title)s'
+
+%(comment)s
+
+url: %(url)s
+""")
+
+
+class ContentAddedMixIn(object):
+    """define emailcontent view for entity types for which you want to be notified
+    """
+    id = 'notif_after_add_entity' 
+    msgid_timestamp = False
+    message = _('new')
+    content = """
+%(title)s
+
+%(content)s
+
+url: %(url)s
+"""
+
+###############################################################################
+# Actual notification views.                                                  #
+#                                                                             #
+# disable them at the recipients_finder level if you don't want them          #
+###############################################################################
+
+# XXX should be based on dc_title/dc_description, no?
+
+class NormalizedTextView(ContentAddedMixIn, NotificationView):
+    def context(self, **kwargs):
+        entity = self.entity(0, 0)
+        content = entity.printable_value(self.content_attr, format='text/plain')
+        if content:
+            contentformat = getattr(entity, self.content_attr + '_format', 'text/rest')
+            content = normalize_text(content, 80, rest=contentformat=='text/rest')
+        return super(NormalizedTextView, self).context(content=content, **kwargs)
+    
+    def subject(self):
+        entity = self.entity(0, 0)
+        return  u'%s #%s (%s)' % (self.req.__('New %s' % entity.e_schema),
+                                  entity.eid, self.user_login())
+
+
+class CardAddedView(NormalizedTextView):
+    """get notified from new cards"""
+    accepts = ('Card',)
+    content_attr = 'synopsis'
+    
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/supervising.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,234 @@
+"""some hooks and views to handle supervising of any data changes
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb import UnknownEid
+from cubicweb.common.view import ComponentMixIn, StartupView
+from cubicweb.common.mail import format_mail
+from cubicweb.server.hooksmanager import Hook
+from cubicweb.server.hookhelper import SendMailOp
+
+
+class SomethingChangedHook(Hook):
+    events = ('before_add_relation', 'before_delete_relation',
+              'after_add_entity', 'before_update_entity')
+    accepts = ('Any',)
+    
+    def call(self, session, *args):
+        dest = self.config['supervising-addrs']
+        if not dest: # no supervisors, don't do this for nothing...
+            return
+        self.session = session
+        if self._call(*args):
+            SupervisionMailOp(session)
+        
+    def _call(self, *args):
+        if self._event() == 'update_entity' and args[0].e_schema == 'EUser':
+            updated = set(args[0].iterkeys())
+            if not (updated - frozenset(('eid', 'modification_date', 'last_login_time'))):
+                # don't record last_login_time update which are done 
+                # automatically at login time
+                return False
+        self.session.add_query_data('pendingchanges', (self._event(), args))
+        return True
+        
+    def _event(self):
+        return self.event.split('_', 1)[1]
+
+
+class EntityDeleteHook(SomethingChangedHook):
+    events = ('before_delete_entity',)
+    
+    def _call(self, eid):
+        entity = self.session.entity(eid)
+        try:
+            title = entity.dc_title()
+        except:
+            # may raise an error during deletion process, for instance due to
+            # missing required relation
+            title = '#%s' % eid
+        self.session.add_query_data('pendingchanges',
+                                    ('delete_entity',
+                                     (eid, str(entity.e_schema),
+                                      title)))
+        return True
+
+
+def filter_changes(changes):
+    """
+    * when an entity has been deleted:
+      * don't show deletion of its relations
+      * don't show related TrInfo deletion if any
+    * when an entity has been added don't show owned_by relation addition
+    * don't show new TrInfo entities if any
+    """
+    # first build an index of changes
+    index = {}
+    added, deleted = set(), set()
+    for change in changes[:]:
+        event, changedescr = change
+        if event == 'add_entity':
+            entity = changedescr[0]
+            added.add(entity.eid)
+            if entity.e_schema == 'TrInfo':
+                changes.remove(change)
+                if entity.from_state:
+                    try:
+                        changes.remove( ('delete_relation', 
+                                         (entity.wf_info_for[0].eid, 'in_state', 
+                                          entity.from_state[0].eid)) )
+                    except ValueError:
+                        pass
+                    try:
+                        changes.remove( ('add_relation', 
+                                         (entity.wf_info_for[0].eid, 'in_state', 
+                                          entity.to_state[0].eid)) )
+                    except ValueError:
+                        pass
+                    event = 'change_state'
+                    change = (event, 
+                              (entity.wf_info_for[0],
+                               entity.from_state[0], entity.to_state[0]))
+                    changes.append(change)
+        elif event == 'delete_entity':
+            deleted.add(changedescr[0])
+        index.setdefault(event, set()).add(change)
+    # filter changes
+    for eid in added:
+        try:
+            for change in index['add_relation'].copy():
+                changedescr = change[1]
+                # skip meta-relations which are set automatically
+                # XXX generate list below using rtags (category = 'generated')
+                if changedescr[1] in ('created_by', 'owned_by', 'is', 'is_instance_of',
+                                      'from_state', 'to_state', 'wf_info_for',) \
+                       and changedescr[0] == eid:
+                    index['add_relation'].remove(change)
+                # skip in_state relation if the entity is being created
+                # XXX this may be automatized by skipping all mandatory relation
+                #     at entity creation time
+                elif changedescr[1] == 'in_state' and changedescr[0] in added:
+                    index['add_relation'].remove(change)
+                    
+        except KeyError:
+            break
+    for eid in deleted:
+        try:
+            for change in index['delete_relation'].copy():
+                fromeid, rtype, toeid = change[1]
+                if fromeid == eid:
+                    index['delete_relation'].remove(change)
+                elif toeid == eid:
+                    index['delete_relation'].remove(change)
+                    if rtype == 'wf_info_for':
+                        for change in index['delete_entity'].copy():
+                            if change[1][0] == fromeid:
+                                index['delete_entity'].remove(change)
+        except KeyError:
+            break
+    for change in changes:
+        event, changedescr = change
+        if change in index[event]:
+            yield change
+
+
+class SupervisionEmailView(ComponentMixIn, StartupView):
+    """view implementing the email API for data changes supervision notification
+    """
+    id = 'supervision_notif'
+
+    def recipients(self):
+        return self.config['supervising-addrs']
+        
+    def subject(self):
+        return self.req._('[%s supervision] changes summary') % self.config.appid
+    
+    def call(self, changes):
+        user = self.req.actual_session().user
+        self.w(self.req._('user %s has made the following change(s):\n\n')
+               % user.login)
+        for event, changedescr in filter_changes(changes):
+            self.w(u'* ')
+            getattr(self, event)(*changedescr)
+            self.w(u'\n\n')
+
+    def _entity_context(self, entity):
+        return {'eid': entity.eid,
+                'etype': entity.dc_type().lower(),
+                'title': entity.dc_title()}
+    
+    def add_entity(self, entity):
+        msg = self.req._('added %(etype)s #%(eid)s (%(title)s)')
+        self.w(u'%s\n' % (msg % self._entity_context(entity)))
+        self.w(u'  %s' % entity.absolute_url())
+            
+    def update_entity(self, entity):
+        msg = self.req._('updated %(etype)s #%(eid)s (%(title)s)')
+        self.w(u'%s\n' % (msg % self._entity_context(entity)))
+        # XXX print changes
+        self.w(u'  %s' % entity.absolute_url())
+            
+    def delete_entity(self, eid, etype, title):
+        msg = self.req._('deleted %(etype)s #%(eid)s (%(title)s)')
+        etype = display_name(self.req, etype).lower()
+        self.w(msg % locals())
+        
+    def change_state(self, entity, fromstate, tostate):
+        msg = self.req._('changed state of %(etype)s #%(eid)s (%(title)s)')
+        self.w(u'%s\n' % (msg % self._entity_context(entity)))
+        self.w(_('  from state %(fromstate)s to state %(tostate)s\n' % 
+                 {'fromstate': _(fromstate.name), 'tostate': _(tostate.name)}))
+        self.w(u'  %s' % entity.absolute_url())
+        
+    def _relation_context(self, fromeid, rtype, toeid):
+        _ = self.req._
+        session = self.req.actual_session()
+        def describe(eid):
+            try:
+                return _(session.describe(eid)[0]).lower()
+            except UnknownEid:
+                # may occurs when an entity has been deleted from an external
+                # source and we're cleaning its relation
+                return _('unknown external entity')
+        return {'rtype': _(rtype),
+                'fromeid': fromeid,
+                'frometype': describe(fromeid),
+                'toeid': toeid,
+                'toetype': describe(toeid)}
+        
+    def add_relation(self, fromeid, rtype, toeid):
+        msg = self.req._('added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%(toeid)s')
+        self.w(msg % self._relation_context(fromeid, rtype, toeid))
+
+    def delete_relation(self, fromeid, rtype, toeid):
+        msg = self.req._('deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%(toeid)s')
+        self.w(msg % self._relation_context(fromeid, rtype, toeid))
+        
+                
+class SupervisionMailOp(SendMailOp):
+    """special send email operation which should be done only once for a bunch
+    of changes
+    """
+    def _get_view(self):
+        return self.session.vreg.select_component('supervision_notif',
+                                                  self.session, None)
+        
+    def _prepare_email(self):
+        session = self.session
+        config = session.vreg.config
+        uinfo = {'email': config['sender-addr'],
+                 'name': config['sender-name']}
+        view = self._get_view()
+        content = view.dispatch(changes=session.query_data('pendingchanges'))
+        recipients = view.recipients()
+        msg = format_mail(uinfo, recipients, content, view.subject(), config=config)
+        self.to_send = [(msg, recipients)]
+
+    def commit_event(self):
+        self._prepare_email()
+        SendMailOp.commit_event(self)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+ecomment
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/data/schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+class comments(RelationDefinition):
+    subject = 'Comment'
+    object = 'Card'
+    cardinality='1*'
+    composite='object'
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/data/sobjects/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,6 @@
+from cubicweb.sobjects.notification import StatusChangeMixIn, NotificationView
+
+class UserStatusChangeView(StatusChangeMixIn, NotificationView):
+    accepts = ('EUser',)
+    
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/unittest_email.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,29 @@
+from cubicweb.devtools.apptest import EnvBasedTC
+
+class EmailAddressHooksTC(EnvBasedTC):
+
+    def test_use_email_set_primary_email(self):
+        self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"')
+        self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows,
+                          [])
+        self.commit()
+        self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
+                          'admin@logilab.fr')
+        # having another email should'nt change anything
+        self.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"')
+        self.commit()
+        self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
+                          'admin@logilab.fr')
+
+    def test_primary_email_set_use_email(self):
+        self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"')
+        self.assertEquals(self.execute('Any A WHERE U use_email X, U login "admin", X address A').rows,
+                          [])
+        self.commit()
+        self.assertEquals(self.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0],
+                          'admin@logilab.fr')
+        
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/unittest_hooks.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,30 @@
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+class HooksTC(EnvBasedTC):
+
+    def test_euser_login_stripped(self):
+        u = self.create_user('  joe  ')
+        tname = self.execute('Any L WHERE E login L, E eid %(e)s',
+                             {'e': u.eid})[0][0]
+        self.assertEquals(tname, 'joe')
+        self.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid})
+        tname = self.execute('Any L WHERE E login L, E eid %(e)s',
+                             {'e': u.eid})[0][0]
+        self.assertEquals(tname, 'jijoe')
+
+    
+    def test_auto_delete_bookmarks(self):
+        beid = self.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U '
+                            'WHERE U login "admin"')[0][0]
+        self.execute('SET X bookmarked_by U WHERE U login "anon"')
+        self.commit()
+        self.execute('DELETE X bookmarked_by U WHERE U login "admin"')
+        self.commit()
+        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+        self.execute('DELETE X bookmarked_by U WHERE U login "anon"')
+        self.commit()
+        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/unittest_notification.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,88 @@
+# -*- coding: iso-8859-1 -*-
+from socket import gethostname
+
+from logilab.common.testlib import unittest_main, TestCase
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from mx.DateTime import now
+
+from cubicweb.sobjects.notification import construct_message_id, parse_message_id
+
+class MessageIdTC(TestCase):
+    def test_base(self):
+        msgid1 = construct_message_id('testapp', 21)
+        msgid2 = construct_message_id('testapp', 21)
+        self.failIfEqual(msgid1, msgid2)
+        self.failIf('&' in msgid1)
+        self.failIf('=' in msgid1)
+        self.failIf('/' in msgid1)
+        self.failIf('+' in msgid1)
+        values = parse_message_id(msgid1, 'testapp')
+        self.failUnless(values)
+        # parse_message_id should work with or without surrounding <>
+        self.failUnlessEqual(values, parse_message_id(msgid1[1:-1], 'testapp'))
+        self.failUnlessEqual(values['eid'], '21')
+        self.failUnless('timestamp' in values)
+        self.failUnlessEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None)
+        
+    def test_notimestamp(self):
+        msgid1 = construct_message_id('testapp', 21, False)
+        msgid2 = construct_message_id('testapp', 21, False)
+        values = parse_message_id(msgid1, 'testapp')
+        self.failUnlessEqual(values, {'eid': '21'})
+
+    def test_parse_message_doesnt_raise(self):
+        self.failUnlessEqual(parse_message_id('oijioj@bla.bla', 'tesapp'), None)
+        self.failUnlessEqual(parse_message_id('oijioj@bla', 'tesapp'), None)
+        self.failUnlessEqual(parse_message_id('oijioj', 'tesapp'), None)
+
+
+    def test_nonregr_empty_message_id(self):
+        for eid in (1, 12, 123, 1234):
+            msgid1 = construct_message_id('testapp', eid, 12)
+            self.assertNotEquals(msgid1, '<@testapp.%s>' % gethostname())
+        
+
+class RecipientsFinderTC(EnvBasedTC):
+    def test(self):
+        urset = self.execute('EUser X WHERE X login "admin"')
+        self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X '
+                     'WHERE U eid %(x)s', {'x': urset[0][0]})
+        self.execute('INSERT EProperty X: X pkey "ui.language", X value "fr", X for_user U '
+                     'WHERE U eid %(x)s', {'x': urset[0][0]})
+        self.commit() # commit so that admin get its properties updated
+        finder = self.vreg.select_component('recipients_finder', self.request(), urset)
+        self.set_option('default-recipients-mode', 'none')
+        self.assertEquals(finder.recipients(), [])
+        self.set_option('default-recipients-mode', 'users')
+        self.assertEquals(finder.recipients(), [(u'admin@logilab.fr', 'fr')])
+        self.set_option('default-recipients-mode', 'default-dest-addrs')
+        self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr')
+        self.assertEquals(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')])
+        
+
+class StatusChangeViewsTC(EnvBasedTC):
+        
+    def test_status_change_view(self):
+        req = self.session()
+        u = self.create_user('toto', req=req)
+        assert u.req
+        self.execute('SET X in_state S WHERE X eid %s, S name "deactivated"' % u.eid)
+        v = self.vreg.select_view('notif_status_change', req, u.rset, row=0)
+        content = v.dispatch(row=0, comment='yeah',
+                             previous_state='activated',
+                             current_state='deactivated')
+        # remove date
+        self.assertEquals(content,
+                          '''
+admin changed status from <activated> to <deactivated> for entity
+'toto'
+
+yeah
+
+url: http://testing.fr/cubicweb/euser/toto
+''')
+        self.assertEquals(v.subject(), 'status changed euser #%s (admin)' % u.eid)
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/unittest_supervising.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,80 @@
+# -*- coding: iso-8859-1 -*-
+import re
+
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from mx.DateTime import now
+
+from cubicweb.sobjects.supervising import SendMailOp, SupervisionMailOp
+
+
+class SupervisingTC(EnvBasedTC):
+
+    def setup_database(self):
+        self.add_entity('Card', title=u"une news !", content=u"cubicweb c'est beau")
+        self.add_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau")
+        self.add_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
+        self.add_entity('Comment', content=u"Yo !")
+        self.execute('SET C comments B WHERE B title "une autre news !", C content "Yo !"')
+        self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr')
+
+        
+    def test_supervision(self):
+        session = self.session()
+        # do some modification
+        ueid = self.execute('INSERT EUser X: X login "toto", X upassword "sosafe", X in_group G, X in_state S '
+                            'WHERE G name "users", S name "activated"')[0][0]        
+        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': ueid}, 'x')
+        self.execute('SET X in_state S WHERE X login "anon", S name "deactivated"')
+        self.execute('DELETE Card B WHERE B title "une news !"')
+        self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': ueid}, 'x')
+        self.execute('SET X content "duh?" WHERE X is Comment')
+        self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"')
+        # check only one supervision email operation
+        sentops = [op for op in session.pending_operations
+                   if isinstance(op, SupervisionMailOp)]
+        self.assertEquals(len(sentops), 1)
+        # check view content
+        op = sentops[0]
+        view = sentops[0]._get_view()
+        self.assertEquals(view.recipients(), ['test@logilab.fr'])
+        self.assertEquals(view.subject(), '[data supervision] changes summary')
+        data = view.dispatch(changes=session.query_data('pendingchanges')).strip()
+        data = re.sub('#\d+', '#EID', data)
+        data = re.sub('/\d+', '/EID', data)
+        self.assertTextEquals('''user admin has made the following change(s):
+
+* added euser #EID (toto)
+  http://testing.fr/cubicweb/euser/toto
+
+* added relation in_group from euser #EID to egroup #EID
+
+* deleted card #EID (une news !)
+
+* added relation bookmarked_by from bookmark #EID to euser #EID
+
+* updated comment #EID (#EID)
+  http://testing.fr/cubicweb/comment/EID
+
+* deleted relation comments from comment #EID to card #EID
+
+* changed state of euser #EID (anon)
+  from state activated to state deactivated
+  http://testing.fr/cubicweb/euser/anon''',
+                              data)
+        # check prepared email
+        op._prepare_email()
+        self.assertEquals(len(op.to_send), 1) 
+        self.assert_(op.to_send[0][0])
+        self.assertEquals(op.to_send[0][1], ['test@logilab.fr']) 
+
+    def test_nonregr1(self):
+        session = self.session()
+        # do some unlogged modification
+        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x')
+        self.commit() # no crash
+
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/erqlexpr_on_ertype.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,23 @@
+class ToTo(EntityType):
+    permissions = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+    toto = SubjectRelation('TuTu')
+    
+class TuTu(EntityType):
+    permissions = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+
+class toto(RelationType):
+    permissions = {
+        'read': ('managers', ),
+        'add': ('managers', ERQLExpression('S bla Y'),),
+        'delete': ('managers',),
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/rqlexpr_on_ertype_read.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,23 @@
+class ToTo(EntityType):
+    permissions = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+    toto = SubjectRelation('TuTu')
+    
+class TuTu(EntityType):
+    permissions = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+
+class toto(RelationType):
+    permissions = {
+        'read': ('managers', RRQLExpression('S bla Y'), ),
+        'add': ('managers',),
+        'delete': ('managers',),
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/rrqlexpr_on_attr.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+class ToTo(EntityType):
+    permissions = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+    attr = String()
+    
+class attr(RelationType):
+    permissions = {
+        'read': ('managers', ),
+        'add': ('managers', RRQLExpression('S bla Y'),),
+        'delete': ('managers',),
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/rrqlexpr_on_eetype.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,8 @@
+class ToTo(EntityType):
+    permissions = {
+        'read': ('managers', RRQLExpression('S bla Y'),),
+        'add': ('managers',),
+        'update': ('managers',),
+        'delete': ('managers',),
+        }
+    attr = String()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_cwconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,74 @@
+import os
+from tempfile import mktemp
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.changelog import Version
+
+from cubicweb.devtools import ApptestConfiguration
+
+def unabsolutize(path):
+    parts = path.split(os.sep)
+    for i, part in enumerate(parts):
+        if part in ('cubicweb', 'cubes', 'cubes'):
+            return '/'.join(parts[i+1:])
+    raise Exception('duh? %s' % path)
+    
+class CubicWebConfigurationTC(TestCase):
+    def setUp(self):
+        self.config = ApptestConfiguration('data')
+        self.config._cubes = ('eemail', 'efile')
+
+    def test_reorder_cubes(self):
+        # jpl depends on eemail and efile and ecomment
+        # eemail depends on efile
+        self.assertEquals(self.config.reorder_cubes(['efile', 'eemail', 'jpl']),
+                          ('jpl', 'eemail', 'efile'))
+        self.assertEquals(self.config.reorder_cubes(['eemail', 'efile', 'jpl']),
+                          ('jpl', 'eemail', 'efile'))
+        self.assertEquals(self.config.reorder_cubes(['eemail', 'jpl', 'efile']),
+                          ('jpl', 'eemail', 'efile'))
+        self.assertEquals(self.config.reorder_cubes(['efile', 'jpl', 'eemail']),
+                          ('jpl', 'eemail', 'efile'))
+        self.assertEquals(self.config.reorder_cubes(['jpl', 'efile', 'eemail']),
+                          ('jpl', 'eemail', 'efile'))
+        self.assertEquals(self.config.reorder_cubes(('jpl', 'eemail', 'efile')),
+                          ('jpl', 'eemail', 'efile'))
+        
+    def test_reorder_cubes_recommends(self):
+        from ecomment import __pkginfo__ as ecomment_pkginfo
+        ecomment_pkginfo.__recommend__ = ('efile',)
+        try:
+            # eemail recommends ecomment
+            # ecomment recommends efile
+            self.assertEquals(self.config.reorder_cubes(('jpl', 'eemail', 'efile', 'ecomment')),
+                              ('jpl', 'eemail', 'ecomment', 'efile'))
+            self.assertEquals(self.config.reorder_cubes(('jpl', 'eemail', 'ecomment', 'efile')),
+                              ('jpl', 'eemail', 'ecomment', 'efile'))
+            self.assertEquals(self.config.reorder_cubes(('jpl', 'ecomment', 'eemail', 'efile')),
+                              ('jpl', 'eemail', 'ecomment', 'efile'))
+            self.assertEquals(self.config.reorder_cubes(('ecomment', 'jpl', 'eemail', 'efile')),
+                              ('jpl', 'eemail', 'ecomment', 'efile'))
+        finally:
+            ecomment_pkginfo.__use__ = ()
+            
+        
+#     def test_vc_config(self):
+#         vcconf = self.config.vc_config()
+#         self.assertIsInstance(vcconf['EEMAIL'], Version)
+#         self.assertEquals(vcconf['EEMAIL'], (0, 3, 1))
+#         self.assertEquals(vcconf['CW'], (2, 31, 2))
+#         self.assertRaises(KeyError, vcconf.__getitem__, 'CW_VERSION')
+#         self.assertRaises(KeyError, vcconf.__getitem__, 'CRM')
+        
+    def test_expand_cubes(self):
+        self.assertEquals(self.config.expand_cubes(('eemail', 'eblog')),
+                          ['eemail', 'eblog', 'efile'])
+
+    def test_vregistry_path(self):
+        self.assertEquals([unabsolutize(p) for p in self.config.vregistry_path()],
+                          ['entities', 'web/views', 'sobjects',
+                           'efile/entities.py', 'efile/views', 'efile/hooks.py',
+                           'eemail/entities.py', 'eemail/views', 'eemail/hooks.py'])
+            
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_cwctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+import sys
+import os
+from cStringIO import StringIO
+from logilab.common.testlib import TestCase, unittest_main
+
+if os.environ.get('APYCOT_ROOT'):
+    root = os.environ['APYCOT_ROOT']
+    CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root
+    os.environ['CW_CUBES'] = CUBES_DIR
+    REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
+    os.environ['CW_REGISTRY_DIR'] = REGISTRY_DIR
+
+from cubicweb.cwconfig import CubicWebConfiguration
+CubicWebConfiguration.load_cwctl_plugins()
+
+class CubicWebCtlTC(TestCase):
+    def setUp(self):
+        self.stream = StringIO()
+        sys.stdout = self.stream
+    def tearDown(self):
+        sys.stdout = sys.__stdout__
+        
+    def test_list(self):
+        from cubicweb.cwctl import ListCommand
+        ListCommand().run([])
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_dbapi.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,82 @@
+from cubicweb import ConnectionError
+from cubicweb.dbapi import ProgrammingError
+from cubicweb.devtools.apptest import EnvBasedTC
+
+
+class DBAPITC(EnvBasedTC):
+    @property
+    def cnx(self):
+        return self.login('anon')
+
+    def test_public_repo_api(self):
+        cnx = self.cnx
+        self.assertEquals(cnx.get_schema(), self.env.repo.schema)
+        self.assertEquals(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+        self.restore_connection() # proper way to close cnx
+        self.assertRaises(ProgrammingError, cnx.get_schema)
+        self.assertRaises(ProgrammingError, cnx.source_defs)
+
+    def test_db_api(self):
+        cnx = self.cnx
+        self.assertEquals(cnx.rollback(), None)
+        self.assertEquals(cnx.commit(), None)
+        self.restore_connection() # proper way to close cnx
+        #self.assertEquals(cnx.close(), None)
+        self.assertRaises(ProgrammingError, cnx.rollback)
+        self.assertRaises(ProgrammingError, cnx.commit)
+        self.assertRaises(ProgrammingError, cnx.close)
+
+    def test_api(self):
+        cnx = self.cnx
+        self.assertEquals(cnx.user(None).login, 'anon')
+        self.assertEquals(cnx.describe(1), (u'EGroup', u'system', None))
+        self.restore_connection() # proper way to close cnx
+        self.assertRaises(ConnectionError, cnx.user, None)
+        self.assertRaises(ConnectionError, cnx.describe, 1)
+
+    def test_session_data_api(self):
+        cnx = self.cnx
+        self.assertEquals(cnx.get_session_data('data'), None)
+        self.assertEquals(cnx.session_data(), {})
+        cnx.set_session_data('data', 4)
+        self.assertEquals(cnx.get_session_data('data'), 4)
+        self.assertEquals(cnx.session_data(), {'data': 4})
+        cnx.del_session_data('data')
+        cnx.del_session_data('whatever')
+        self.assertEquals(cnx.get_session_data('data'), None)
+        self.assertEquals(cnx.session_data(), {})
+        cnx.session_data()['data'] = 4
+        self.assertEquals(cnx.get_session_data('data'), 4)
+        self.assertEquals(cnx.session_data(), {'data': 4})
+
+    def test_shared_data_api(self):
+        cnx = self.cnx
+        self.assertEquals(cnx.get_shared_data('data'), None)
+        cnx.set_shared_data('data', 4)
+        self.assertEquals(cnx.get_shared_data('data'), 4)
+        cnx.get_shared_data('data', pop=True)
+        cnx.get_shared_data('whatever', pop=True)
+        self.assertEquals(cnx.get_shared_data('data'), None)
+        cnx.set_shared_data('data', 4)
+        self.assertEquals(cnx.get_shared_data('data'), 4)
+        self.restore_connection() # proper way to close cnx
+        self.assertRaises(ConnectionError, cnx.check)
+        self.assertRaises(ConnectionError, cnx.set_shared_data, 'data', 0)
+        self.assertRaises(ConnectionError, cnx.get_shared_data, 'data')
+
+
+# class DBAPICursorTC(EnvBasedTC):
+
+#     @property
+#     def cursor(self):
+#         return self.env.cnx.cursor()
+
+#     def test_api(self):
+#         cu = self.cursor
+#         self.assertEquals(cu.describe(1), (u'EGroup', u'system', None))
+#         #cu.close()
+#         #self.assertRaises(ConnectionError, cu.describe, 1)
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_rset.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,347 @@
+# coding: utf-8
+"""unit tests for module cubicweb.common.utils"""
+
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from urlparse import urlsplit
+from rql import parse
+
+from cubicweb.rset import NotAnEntity, ResultSet, attr_desc_iterator
+
+        
+def pprelcachedict(d):
+    res = {}
+    for k, (rset, related) in d.items():
+        res[k] = sorted(v.eid for v in related)
+    return sorted(res.items())
+        
+
+class AttrDescIteratorTC(TestCase):
+    """TestCase for cubicweb.rset.attr_desc_iterator"""
+    
+    def test_relations_description(self):
+        """tests relations_description() function"""
+        queries = {
+            'Any U,L,M where U is EUser, U login L, U mail M' : [(1, 'login', 'subject'), (2, 'mail', 'subject')],
+            'Any U,L,M where U is EUser, L is Foo, U mail M' : [(2, 'mail', 'subject')],
+            'Any C,P where C is Company, C employs P' : [(1, 'employs', 'subject')],
+            'Any C,P where C is Company, P employed_by P' : [],
+            'Any C where C is Company, C employs P' : [],
+            }
+        for rql, relations in queries.items():
+            result = list(attr_desc_iterator(parse(rql).children[0]))
+            self.assertEquals((rql, result), (rql, relations))
+            
+    def test_relations_description_indexed(self):
+        """tests relations_description() function"""
+        queries = {
+            'Any C,U,P,L,M where C is Company, C employs P, U is EUser, U login L, U mail M' :
+            {0: [(2,'employs', 'subject')], 1: [(3,'login', 'subject'), (4,'mail', 'subject')]},
+            }
+        for rql, results in queries.items():
+            for var_index, relations in results.items():
+                result = list(attr_desc_iterator(parse(rql).children[0], var_index))
+                self.assertEquals(result, relations)
+
+
+
+class ResultSetTC(EnvBasedTC):    
+
+    def setUp(self):
+        super(ResultSetTC, self).setUp()
+        self.rset = ResultSet([[12, 'adim'], [13, 'syt']],
+                              'Any U,L where U is EUser, U login L',
+                              description=[['EUser', 'String'], ['Bar', 'String']])
+        self.rset.vreg = self.vreg
+
+    def compare_urls(self, url1, url2):
+        info1 = urlsplit(url1)
+        info2 = urlsplit(url2)
+        self.assertEquals(info1[:3], info2[:3])
+        if info1[3] != info2[3]:
+            params1 = dict(pair.split('=') for pair in info1[3].split('&'))
+            params2 = dict(pair.split('=') for pair in info1[3].split('&'))
+            self.assertDictEquals(params1, params2)
+
+        
+    def test_build_url(self):
+        req = self.request()
+        baseurl = req.base_url()
+        self.compare_urls(req.build_url('view', vid='foo', rql='yo'),
+                          '%sview?vid=foo&rql=yo' % baseurl)
+        self.compare_urls(req.build_url('view', _restpath='task/title/go'),
+                          '%stask/title/go' % baseurl)
+        #self.compare_urls(req.build_url('view', _restpath='/task/title/go'),
+        #                  '%stask/title/go' % baseurl)
+        # empty _restpath should not crash
+        self.compare_urls(req.build_url('view', _restpath=''), baseurl)
+                
+        
+    def test_resultset_build(self):
+        """test basic build of a ResultSet"""
+        rs = ResultSet([1,2,3], 'EGroup X', description=['EGroup', 'EGroup', 'EGroup'])
+        self.assertEquals(rs.rowcount, 3)
+        self.assertEquals(rs.rows, [1,2,3])
+        self.assertEquals(rs.description, ['EGroup', 'EGroup', 'EGroup'])
+
+
+    def test_resultset_limit(self):
+        rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
+                       'Any U,L where U is EUser, U login L',
+                       description=[['EUser', 'String']] * 3)
+        rs.req = self.request()
+        rs.vreg = self.env.vreg
+
+        self.assertEquals(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']])
+        rs2 = rs.limit(2, offset=1)
+        self.assertEquals(rs2.rows, [[13000, 'syt'], [14000, 'nico']])
+        self.assertEquals(rs2.get_entity(0, 0).row, 0)
+        self.assertEquals(rs.limit(2, offset=2).rows, [[14000, 'nico']])
+        self.assertEquals(rs.limit(2, offset=3).rows, [])
+        
+
+    def test_resultset_filter(self):
+        rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
+                       'Any U,L where U is EUser, U login L',
+                       description=[['EUser', 'String']] * 3)
+        rs.req = self.request()
+        rs.vreg = self.env.vreg
+        def test_filter(entity):
+            return entity.login != 'nico'
+        
+        rs2 = rs.filtered_rset(test_filter)
+        self.assertEquals(len(rs2), 2)
+        self.assertEquals([login for _, login in rs2], ['adim', 'syt'])
+        
+    def test_resultset_transform(self):
+        rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']],
+                       'Any U,L where U is EUser, U login L',
+                       description=[['EUser', 'String']] * 3)
+        rs.req = self.request()
+        def test_transform(row, desc):
+            return row[1:], desc[1:]
+        rs2 = rs.transformed_rset(test_transform)
+
+        self.assertEquals(len(rs2), 3)
+        self.assertEquals(list(rs2), [['adim'],['syt'],['nico']])
+        
+    def test_resultset_sort(self):
+        rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
+                       'Any U,L where U is EUser, U login L',
+                       description=[['EUser', 'String']] * 3)
+        rs.req = self.request()
+        rs.vreg = self.env.vreg
+        
+        rs2 = rs.sorted_rset(lambda e:e['login'])
+        self.assertEquals(len(rs2), 3)
+        self.assertEquals([login for _, login in rs2], ['adim', 'nico', 'syt'])
+        # make sure rs is unchanged
+        self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico'])
+        
+        rs2 = rs.sorted_rset(lambda e:e['login'], reverse=True)
+        self.assertEquals(len(rs2), 3)
+        self.assertEquals([login for _, login in rs2], ['syt', 'nico', 'adim'])
+        # make sure rs is unchanged
+        self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico'])
+
+        rs3 = rs.sorted_rset(lambda row: row[1], col=-1)
+        self.assertEquals(len(rs3), 3)
+        self.assertEquals([login for _, login in rs3], ['adim', 'nico', 'syt'])
+        # make sure rs is unchanged
+        self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico'])
+
+    def test_resultset_split(self):
+        rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'],
+                        [12000, 'adim', u'Jardiner facile'],
+                        [13000, 'syt',  u'Le carrelage en 42 leçons'],
+                        [14000, 'nico', u'La tarte tatin en 15 minutes'],
+                        [14000, 'nico', u"L'épluchage du castor commun"]],
+                       'Any U, L, T WHERE U is EUser, U login L,'\
+                       'D created_by U, D title T',
+                       description=[['EUser', 'String', 'String']] * 5)
+        rs.req = self.request()
+        rs.vreg = self.env.vreg
+        
+        rsets = rs.split_rset(lambda e:e['login'])
+        self.assertEquals(len(rsets), 3)
+        self.assertEquals([login for _, login,_ in rsets[0]], ['adim', 'adim'])
+        self.assertEquals([login for _, login,_ in rsets[1]], ['syt'])
+        self.assertEquals([login for _, login,_ in rsets[2]], ['nico', 'nico'])
+        # make sure rs is unchanged
+        self.assertEquals([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico'])
+        
+        rsets = rs.split_rset(lambda e:e['login'], return_dict=True)
+        self.assertEquals(len(rsets), 3)
+        self.assertEquals([login for _, login,_ in rsets['nico']], ['nico', 'nico'])
+        self.assertEquals([login for _, login,_ in rsets['adim']], ['adim', 'adim'])
+        self.assertEquals([login for _, login,_ in rsets['syt']], ['syt'])
+        # make sure rs is unchanged
+        self.assertEquals([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico'])
+
+        rsets = rs.split_rset(lambda s: s.count('d'), col=2)
+        self.assertEquals(len(rsets), 2)
+        self.assertEquals([title for _, _, title in rsets[0]],
+                          [u"Adim chez les pinguins",
+                           u"Jardiner facile",
+                           u"L'épluchage du castor commun",])
+        self.assertEquals([title for _, _, title in rsets[1]],
+                          [u"Le carrelage en 42 leçons",
+                           u"La tarte tatin en 15 minutes",])
+        # make sure rs is unchanged
+        self.assertEquals([title for _, _, title in rs],
+                          [u'Adim chez les pinguins',
+                           u'Jardiner facile',
+                           u'Le carrelage en 42 leçons',
+                           u'La tarte tatin en 15 minutes',
+                           u"L'épluchage du castor commun"])
+        
+    def test_cached_syntax_tree(self):
+        """make sure syntax tree is cached"""
+        rqlst1 = self.rset.syntax_tree()
+        rqlst2 = self.rset.syntax_tree()
+        self.assert_(rqlst1 is rqlst2)
+
+    def test_get_entity_simple(self):
+        self.add_entity('EUser', login=u'adim', upassword='adim',
+                        surname=u'di mascio', firstname=u'adrien')
+        e = self.entity('Any X,T WHERE X login "adim", X surname T')
+        self.assertEquals(e['surname'], 'di mascio')
+        self.assertRaises(KeyError, e.__getitem__, 'firstname')
+        self.assertRaises(KeyError, e.__getitem__, 'creation_date')
+        self.assertEquals(pprelcachedict(e._related_cache), [])
+        e.complete()
+        self.assertEquals(e['firstname'], 'adrien')
+        self.assertEquals(pprelcachedict(e._related_cache), [])
+        
+    def test_get_entity_advanced(self):
+        self.add_entity('Bookmark', title=u'zou', path=u'/view')
+        self.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"')
+        rset = self.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN')
+        
+        e = rset.get_entity(0, 0)
+        self.assertEquals(e.row, 0)
+        self.assertEquals(e.col, 0)
+        self.assertEquals(e['title'], 'zou')
+        self.assertRaises(KeyError, e.__getitem__, 'path')
+        self.assertEquals(e.view('text'), 'zou')
+        self.assertEquals(pprelcachedict(e._related_cache), [])
+        
+        e = rset.get_entity(0, 1)
+        self.assertEquals(e.row, 0)
+        self.assertEquals(e.col, 1)
+        self.assertEquals(e['login'], 'anon')
+        self.assertRaises(KeyError, e.__getitem__, 'firstname')
+        self.assertEquals(pprelcachedict(e._related_cache),
+                          [])
+        e.complete()
+        self.assertEquals(e['firstname'], None)
+        self.assertEquals(e.view('text'), 'anon')
+        self.assertEquals(pprelcachedict(e._related_cache),
+                          [])
+        
+        self.assertRaises(NotAnEntity, rset.get_entity, 0, 2)
+        self.assertRaises(NotAnEntity, rset.get_entity, 0, 3)
+
+    def test_get_entity_relation_cache_compt(self):
+        rset = self.execute('Any X,S WHERE X in_state S, X login "anon"')
+        e = rset.get_entity(0, 0)
+        seid = self.execute('State X WHERE X name "activated"')[0][0]
+        # for_user / in_group are prefetched in EUser __init__, in_state should
+        # be filed from our query rset
+        self.assertEquals(pprelcachedict(e._related_cache),
+                          [('in_state_subject', [seid])])
+
+    def test_get_entity_advanced_prefilled_cache(self):
+        e = self.add_entity('Bookmark', title=u'zou', path=u'path')
+        self.commit()
+        rset = self.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, '
+                            'X title XT, S name SN, U login UL, X eid %s' % e.eid)
+        e = rset.get_entity(0, 0)
+        self.assertEquals(e['title'], 'zou')
+        self.assertEquals(pprelcachedict(e._related_cache),
+                          [('created_by_subject', [5])])
+        # first level of recursion
+        u = e.created_by[0]
+        self.assertEquals(u['login'], 'admin')
+        self.assertRaises(KeyError, u.__getitem__, 'firstname')
+        # second level of recursion
+        s = u.in_state[0]
+        self.assertEquals(s['name'], 'activated')
+        self.assertRaises(KeyError, s.__getitem__, 'description')
+
+        
+    def test_get_entity_cache_with_left_outer_join(self):
+        eid = self.execute('INSERT EUser E: E login "joe", E upassword "joe", E in_group G '
+                           'WHERE G name "users"')[0][0]
+        rset = self.execute('Any X,E WHERE X eid %(x)s, X primary_email E?', {'x': eid})
+        e = rset.get_entity(0, 0)
+        # if any of the assertion below fails with a KeyError, the relation is not cached
+        # related entities should be an empty list
+        self.assertEquals(e.related_cache('primary_email', 'subject', True), [])
+        # related rset should be an empty rset
+        cached = e.related_cache('primary_email', 'subject', False)
+        self.assertIsInstance(cached, ResultSet)
+        self.assertEquals(cached.rowcount, 0)
+        
+
+    def test_get_entity_union(self):
+        e = self.add_entity('Bookmark', title=u'manger', path=u'path')
+        rset = self.execute('Any X,N ORDERBY N WITH X,N BEING '
+                            '((Any X,N WHERE X is Bookmark, X title N)'
+                            ' UNION '
+                            ' (Any X,N WHERE X is EGroup, X name N))')
+        expected = (('EGroup', 'guests'), ('EGroup', 'managers'),
+                    ('Bookmark', 'manger'), ('EGroup', 'owners'),
+                    ('EGroup', 'users'))
+        for entity in rset.entities(): # test get_entity for each row actually
+            etype, n = expected[entity.row]
+            self.assertEquals(entity.id, etype)
+            attr = etype == 'Bookmark' and 'title' or 'name'
+            self.assertEquals(entity[attr], n)
+        
+    
+    def test_related_entity_union_subquery(self):
+        e = self.add_entity('Bookmark', title=u'aaaa', path=u'path')
+        rset = self.execute('Any X,N ORDERBY N WITH X,N BEING '
+                            '((Any X,N WHERE X is EGroup, X name N)'
+                            ' UNION '
+                            ' (Any X,N WHERE X is Bookmark, X title N))')
+        entity, rtype = rset.related_entity(0, 1)
+        self.assertEquals(entity.eid, e.eid)
+        self.assertEquals(rtype, 'title')
+        entity, rtype = rset.related_entity(1, 1)
+        self.assertEquals(entity.id, 'EGroup')
+        self.assertEquals(rtype, 'name')
+        rset = self.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING '
+                            '((Any X,N WHERE X is EGroup, X name N)'
+                            ' UNION '
+                            ' (Any X,N WHERE X is Bookmark, X title N))')
+        entity, rtype = rset.related_entity(0, 1)
+        self.assertEquals(entity.eid, e.eid)
+        self.assertEquals(rtype, 'title')
+        
+    def test_entities(self):
+        rset = self.execute('Any U,G WHERE U in_group G')
+        # make sure we have at least one element
+        self.failUnless(rset)
+        self.assertEquals(set(e.e_schema.type for e in rset.entities(0)),
+                          set(['EUser',]))
+        self.assertEquals(set(e.e_schema.type for e in rset.entities(1)),
+                          set(['EGroup',]))
+
+    def test_printable_rql(self):        
+        rset = self.execute(u'EEType X WHERE X final FALSE, X meta FALSE')
+        self.assertEquals(rset.printable_rql(),
+                          'Any X WHERE X final FALSE, X meta FALSE, X is EEType')
+
+
+    def test_searched_text(self):
+        rset = self.execute(u'Any X WHERE X has_text "foobar"')
+        self.assertEquals(rset.searched_text(), 'foobar')
+        rset = self.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'})
+        self.assertEquals(rset.searched_text(), 'foo')
+        
+   
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_schema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,258 @@
+"""unit tests for module cubicweb.schema"""
+
+import sys
+from os.path import join, isabs, basename, dirname
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from rql import RQLSyntaxError
+
+from yams import BadSchemaDefinition
+from yams.constraints import SizeConstraint, StaticVocabularyConstraint
+from yams.buildobjs import RelationDefinition, EntityType, RelationType
+
+from cubicweb.schema import CubicWebSchema, CubicWebEntitySchema, \
+     RQLConstraint, CubicWebSchemaLoader, ERQLExpression, RRQLExpression, \
+     normalize_expression
+from cubicweb.devtools import TestServerConfiguration as TestConfiguration
+
+DATADIR = join(dirname(__file__), 'data')
+
+# build a dummy schema ########################################################
+
+
+PERSONNE_PERMISSIONS =  {
+    'read':   ('managers', 'users', 'guests'),
+    'update': ('managers', 'owners'),
+    'add':    ('managers', ERQLExpression('X travaille S, S owned_by U')),
+    'delete': ('managers', 'owners',),
+    }
+
+CONCERNE_PERMISSIONS = {
+    'read':   ('managers', 'users', 'guests'),
+    'add':    ('managers', RRQLExpression('U has_update_permission S')),
+    'delete': ('managers', RRQLExpression('O owned_by U')),
+    }
+
+schema = CubicWebSchema('Test Schema')
+enote = schema.add_entity_type(EntityType('Note'))
+eaffaire = schema.add_entity_type(EntityType('Affaire'))
+eperson = schema.add_entity_type(EntityType('Personne', permissions=PERSONNE_PERMISSIONS))
+esociete = schema.add_entity_type(EntityType('Societe'))
+
+RELS = (
+    # attribute relations
+    ('Note date String'),
+    ('Note type String'),
+    ('Affaire sujet String'),
+    ('Affaire ref String'),
+    ('Personne nom String'),
+    ('Personne prenom String'),
+    ('Personne sexe String'),
+    ('Personne tel Int'),
+    ('Personne fax Int'),
+    ('Personne datenaiss Date'),
+    ('Personne TEST Boolean'),
+    ('Personne promo String'),
+    # real relations
+    ('Personne  travaille Societe'),
+    ('Personne  evaluee   Note'),
+    ('Societe evaluee   Note'),
+    ('Personne  concerne  Affaire'),
+    ('Personne  concerne  Societe'),
+    ('Affaire Concerne  Societe'),
+    )
+done = {}
+for rel in RELS:
+    _from, _type, _to = rel.split()
+    if not _type.lower() in done:
+        if _type == 'concerne':
+            schema.add_relation_type(RelationType(_type, permissions=CONCERNE_PERMISSIONS))
+        else:
+            schema.add_relation_type(RelationType(_type))
+        done[_type.lower()] = True
+    schema.add_relation_def(RelationDefinition(_from, _type, _to))
+
+class CubicWebSchemaTC(TestCase):
+
+    def test_normalize(self):
+        """test that entities, relations and attributes name are normalized
+        """
+        self.assertEqual(esociete.type, 'Societe')
+        self.assertEqual(schema.has_relation('TEST'), 0)
+        self.assertEqual(schema.has_relation('test'), 1)
+        self.assertEqual(eperson.subject_relation('test').type, 'test')
+        self.assertEqual(schema.has_relation('Concerne'), 0)
+        self.assertEqual(schema.has_relation('concerne'), 1)
+        self.assertEqual(schema.rschema('concerne').type, 'concerne')
+
+    def test_entity_perms(self):
+        eperson.set_default_groups()
+        self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests')))
+        self.assertEqual(eperson.get_groups('update'), set(('managers', 'owners',)))
+        self.assertEqual(eperson.get_groups('delete'), set(('managers', 'owners')))
+        self.assertEqual(eperson.get_groups('add'), set(('managers',)))
+        self.assertEqual([str(e) for e in eperson.get_rqlexprs('add')],
+                         ['Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s'])
+        eperson.set_groups('read', ('managers',))
+        self.assertEqual(eperson.get_groups('read'), set(('managers',)))
+        
+    def test_relation_perms(self):
+        rconcerne = schema.rschema('concerne')
+        rconcerne.set_default_groups()
+        self.assertEqual(rconcerne.get_groups('read'), set(('managers', 'users', 'guests')))
+        self.assertEqual(rconcerne.get_groups('delete'), set(('managers',)))
+        self.assertEqual(rconcerne.get_groups('add'), set(('managers', )))
+        rconcerne.set_groups('read', ('managers',))
+        self.assertEqual(rconcerne.get_groups('read'), set(('managers',)))
+        self.assertEqual([str(e) for e in rconcerne.get_rqlexprs('add')],
+                         ['Any S WHERE U has_update_permission S, S eid %(s)s, U eid %(u)s'])
+
+    def test_erqlexpression(self):
+        self.assertRaises(RQLSyntaxError, ERQLExpression, '1')
+        expr = ERQLExpression('X travaille S, S owned_by U')
+        self.assertEquals(str(expr), 'Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s')
+        
+    def test_rrqlexpression(self):
+        self.assertRaises(Exception, RRQLExpression, '1')
+        self.assertRaises(RQLSyntaxError, RRQLExpression, 'O X Y')
+        expr = RRQLExpression('U has_update_permission O')
+        self.assertEquals(str(expr), 'Any O WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s')
+        
+
+loader = CubicWebSchemaLoader()
+config = TestConfiguration('data')
+config.bootstrap_cubes()
+loader.lib_directory = config.schemas_lib_dir()
+    
+class SQLSchemaReaderClassTest(TestCase):
+
+    def test_knownValues_include_schema_files(self):
+        schema_files = loader.include_schema_files('Bookmark')
+        for file in schema_files:
+            self.assert_(isabs(file))
+        self.assertListEquals([basename(f) for f in schema_files], ['Bookmark.py'])
+
+    def test_knownValues_load_schema(self):
+        """read an url and return a Schema instance"""
+        schema = loader.load(config)
+        self.assert_(isinstance(schema, CubicWebSchema))
+        self.assertEquals(schema.name, 'data')
+        entities = [str(e) for e in schema.entities()]
+        entities.sort()
+        expected_entities = ['Bookmark', 'Boolean', 'Bytes', 'Card', 
+                             'Date', 'Datetime', 'Decimal',
+                             'EConstraint', 'EConstraintType', 'EEType',
+                             'EFRDef', 'EGroup', 'EmailAddress', 'ENFRDef',
+                             'EPermission', 'EProperty', 'ERType', 'EUser',
+                             'Float', 'Int', 'Interval', 
+                             'Password', 
+                             'RQLExpression', 
+                             'State', 'String', 'Time', 
+                             'Transition', 'TrInfo']
+        self.assertListEquals(entities, sorted(expected_entities))
+        relations = [str(r) for r in schema.relations()]
+        relations.sort()
+        expected_relations = ['add_permission', 'address', 'alias',
+                              'allowed_transition', 'bookmarked_by', 'canonical',
+
+                              'cardinality', 'comment', 'comment_format', 
+                              'composite', 'condition', 'constrained_by', 'content',
+                              'content_format', 'created_by', 'creation_date', 'cstrtype',
+
+                              'defaultval', 'delete_permission', 'description',
+                              'description_format', 'destination_state',
+
+                              'eid', 'expression', 'exprtype',
+
+                              'final', 'firstname', 'for_user',
+                              'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed',
+
+                              'has_text', 
+                              'identical_to', 'identity', 'in_group', 'in_state', 'indexed',
+                              'initial_state', 'inlined', 'internationalizable', 'is', 'is_instance_of',
+
+                              'label', 'last_login_time', 'login',
+
+                              'mainvars', 'meta', 'modification_date',
+
+                              'name', 
+
+                              'ordernum', 'owned_by',
+
+                              'path', 'pkey', 'primary_email', 
+
+                              'read_permission', 'relation_type', 'require_group',
+                              
+                              'specializes', 'state_of', 'surname', 'symetric', 'synopsis',
+
+                              'title', 'to_entity', 'to_state', 'transition_of',
+
+                              'upassword', 'update_permission', 'use_email',
+
+                              'value', 
+
+                              'wf_info_for', 'wikiid']
+    
+        self.assertListEquals(relations, expected_relations)
+
+        eschema = schema.eschema('EUser')
+        rels = sorted(str(r) for r in eschema.subject_relations())
+        self.assertListEquals(rels, ['created_by', 'creation_date', 'eid',
+                                     'firstname', 'has_text', 'identity',
+                                     'in_group', 'in_state', 'is',
+                                     'is_instance_of', 'last_login_time',
+                                     'login', 'modification_date', 'owned_by',
+                                     'primary_email', 'surname', 'upassword',
+                                     'use_email'])
+        rels = sorted(r.type for r in eschema.object_relations())
+        self.assertListEquals(rels, ['bookmarked_by', 'created_by', 'for_user',
+                                     'identity', 'owned_by', 'wf_info_for'])
+        rschema = schema.rschema('relation_type')
+        properties = rschema.rproperties('EFRDef', 'ERType')
+        self.assertEquals(properties['cardinality'], '1*')
+        constraints = properties['constraints']
+        self.failUnlessEqual(len(constraints), 1, constraints)
+        constraint = constraints[0]
+        self.failUnless(isinstance(constraint, RQLConstraint))
+        self.failUnlessEqual(constraint.restriction, 'O final TRUE')
+
+    def test_fulltext_container(self):
+        schema = loader.load(config)
+        self.failUnless('has_text' in schema['EUser'].subject_relations())
+        self.failIf('has_text' in schema['EmailAddress'].subject_relations())
+
+
+class BadSchemaRQLExprTC(TestCase):
+    def setUp(self):
+        self.loader = CubicWebSchemaLoader()
+        self.loader.defined = {}
+        self.loader._instantiate_handlers()
+
+    def _test(self, schemafile, msg):
+        self.loader.handle_file(join(DATADIR, schemafile))
+        ex = self.assertRaises(BadSchemaDefinition,
+                               self.loader._build_schema, 'toto', False)
+        self.assertEquals(str(ex), msg)
+        
+    def test_rrqlexpr_on_etype(self):
+        self._test('rrqlexpr_on_eetype.py', "can't use RRQLExpression on an entity type, use an ERQLExpression (ToTo)")
+        
+    def test_erqlexpr_on_rtype(self):
+        self._test('erqlexpr_on_ertype.py', "can't use ERQLExpression on a relation type, use a RRQLExpression (toto)")
+        
+    def test_rqlexpr_on_rtype_read(self):
+        self._test('rqlexpr_on_ertype_read.py', "can't use rql expression for read permission of a relation type (toto)")
+        
+    def test_rrqlexpr_on_attr(self):
+        self._test('rrqlexpr_on_attr.py', "can't use RRQLExpression on a final relation type (eg attribute relation), use an ERQLExpression (attr)")
+
+
+class NormalizeExpressionTC(TestCase):
+
+    def test(self):
+        self.assertEquals(normalize_expression('X  bla Y,Y blur Z  ,  Z zigoulou   X '),
+                                               'X bla Y, Y blur Z, Z zigoulou X')
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_vregistry.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,49 @@
+from logilab.common.testlib import unittest_main, TestCase
+
+from os.path import join
+
+from cubicweb import CW_SOFTWARE_ROOT as BASE
+from cubicweb.vregistry import VObject
+from cubicweb.cwvreg import CubicWebRegistry, UnknownProperty
+from cubicweb.cwconfig import CubicWebConfiguration
+
+class YesSchema:
+    def __contains__(self, something):
+        return True
+    
+class VRegistryTC(TestCase):
+
+    def setUp(self):
+        config = CubicWebConfiguration('data')
+        self.vreg = CubicWebRegistry(config)
+        self.vreg.schema = YesSchema()
+
+    def test_load(self):
+        self.vreg.load_file(join(BASE, 'web', 'views'), 'euser.py')
+        self.vreg.load_file(join(BASE, 'web', 'views'), 'baseviews.py')
+        fpvc = [v for v in self.vreg.registry_objects('views', 'primary') if v.accepts[0] == 'EUser'][0]
+        fpv = fpvc(None, None)
+        # don't want a TypeError due to super call
+        self.assertRaises(AttributeError, fpv.render_entity_attributes, None, None)
+
+    def test_load_interface_based_vojects(self):
+        self.vreg.load_file(join(BASE, 'web', 'views'), 'idownloadable.py')
+        self.vreg.load_file(join(BASE, 'web', 'views'), 'baseviews.py')
+        # check loading baseviews after idownloadable isn't kicking interface based views
+        self.assertEquals(len(self.vreg['views']['primary']), 2)
+                              
+    def test_autoselectors(self):
+        myselector1 = lambda *args: 1
+        myselector2 = lambda *args: 1
+        class AnAppObject(VObject):
+            __selectors__ = (myselector1, myselector2)
+        self.assertEquals(AnAppObject.__select__(), 2)
+
+    def test_properties(self):
+        self.failIf('system.version.cubicweb' in self.vreg['propertydefs'])
+        self.failUnless(self.vreg.property_info('system.version.cubicweb'))
+        self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key')
+        
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/toolsutils.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,292 @@
+"""some utilities for cubicweb tools
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os, sys
+from os import listdir, makedirs, symlink, environ, chmod, walk, remove
+from os.path import exists, join, abspath, normpath
+
+from logilab.common.clcommands import Command as BaseCommand, \
+     main_run as base_main_run, register_commands, pop_arg, cmd_run
+from logilab.common.compat import any
+
+from cubicweb import warning
+from cubicweb import ConfigurationError, ExecutionError
+
+def iter_dir(directory, condition_file=None, ignore=()):
+    """iterate on a directory"""
+    for sub in listdir(directory):
+        if sub in ('CVS', '.svn', '.hg'):
+            continue
+        if condition_file is not None and \
+               not exists(join(directory, sub, condition_file)):
+            continue
+        if sub in ignore:
+            continue
+        yield sub
+
+def create_dir(directory):
+    """create a directory if it doesn't exist yet"""
+    try:
+        makedirs(directory)
+        print 'created directory', directory
+    except OSError, ex:
+        import errno
+        if ex.errno != errno.EEXIST:
+            raise
+        print 'directory %s already exists' % directory
+                
+def create_symlink(source, target):
+    """create a symbolic link"""
+    if exists(target):
+        remove(target)
+    symlink(source, target)
+    print '[symlink] %s <-- %s' % (target, source)
+
+def create_copy(source, target):
+    import shutil
+    print '[copy] %s <-- %s' % (target, source)
+    shutil.copy2(source, target)
+    
+def rm(whatever):
+    import shutil
+    shutil.rmtree(whatever)
+    print 'removed %s' % whatever
+
+def show_diffs(appl_file, ref_file, askconfirm=True):
+    """interactivly replace the old file with the new file according to
+    user decision
+    """
+    import shutil
+    p_output = os.popen('diff -u %s %s' % (appl_file, ref_file), 'r')
+    diffs = p_output.read()
+    if diffs:
+        if askconfirm:
+            print 
+            print diffs
+            action = raw_input('replace (N/y/q) ? ').lower()
+        else:
+            action = 'y'
+        if action == 'y':
+            try:
+                shutil.copyfile(ref_file, appl_file)
+            except IOError:
+                os.system('chmod a+w %s' % appl_file)
+                shutil.copyfile(ref_file, appl_file)
+            print 'replaced'
+        elif action == 'q':
+            sys.exit(0)
+        else:
+            copy_file = appl_file + '.default'
+            copy = file(copy_file, 'w')
+            copy.write(open(ref_file).read())
+            copy.close()
+            print 'keep current version, the new file has been written to', copy_file
+    else:
+        print 'no diff between %s and %s' % (appl_file, ref_file)
+
+
+def copy_skeleton(skeldir, targetdir, context,
+                  exclude=('*.py[co]', '*.orig', '*~', '*_flymake.py'),
+                  askconfirm=False):
+    import shutil
+    from fnmatch import fnmatch
+    skeldir = normpath(skeldir)
+    targetdir = normpath(targetdir)
+    for dirpath, dirnames, filenames in walk(skeldir):
+        tdirpath = dirpath.replace(skeldir, targetdir)
+        create_dir(tdirpath)
+        for fname in filenames:
+            if any(fnmatch(fname, pat) for pat in exclude):
+                continue
+            fpath = join(dirpath, fname)
+            if 'CUBENAME' in fname:
+                tfpath = join(tdirpath, fname.replace('CUBENAME', context['cubename']))
+            elif 'DISTNAME' in fname:
+                tfpath = join(tdirpath, fname.replace('DISTNAME', context['distname']))
+            else:
+                tfpath = join(tdirpath, fname)
+            if fname.endswith('.tmpl'):
+                tfpath = tfpath[:-5]
+                if not askconfirm or not exists(tfpath) or \
+                       confirm('%s exists, overwrite?' % tfpath):
+                    fname = fill_templated_file(fpath, tfpath, context)
+                    print '[generate] %s <-- %s' % (tfpath, fpath)
+            elif exists(tfpath):
+                show_diffs(tfpath, fpath, askconfirm)
+            else:
+                shutil.copyfile(fpath, tfpath)
+                
+def fill_templated_file(fpath, tfpath, context):
+    fobj = file(tfpath, 'w')
+    templated = file(fpath).read()
+    fobj.write(templated % context)
+    fobj.close()
+
+def restrict_perms_to_user(filepath, log=None):
+    """set -rw------- permission on the given file"""
+    if log:
+        log('set %s permissions to 0600', filepath)
+    else:
+        print 'set %s permissions to 0600' % filepath
+    chmod(filepath, 0600)
+
+def confirm(question, default_is_yes=True):
+    """ask for confirmation and return true on positive answer"""
+    if default_is_yes:
+        input_str = '%s [Y/n]: '
+    else:
+        input_str = '%s [y/N]: '
+    answer = raw_input(input_str % (question)).strip().lower()
+    if default_is_yes:
+        if answer in ('n', 'no'):
+            return False
+        return True
+    if answer in ('y', 'yes'):
+        return True
+    return False
+
+def read_config(config_file):
+    """read the application configuration from a file and return it as a
+    dictionnary
+
+    :type config_file: str
+    :param config_file: path to the configuration file
+
+    :rtype: dict
+    :return: a dictionary with specified values associated to option names 
+    """
+    from logilab.common.fileutils import lines
+    config = current = {}
+    try:
+        for line in lines(config_file, comments='#'):
+            try:
+                option, value = line.split('=', 1)
+            except ValueError:
+                option = line.strip().lower()
+                if option[0] == '[':
+                    # start a section
+                    section = option[1:-1]
+                    assert not config.has_key(section), \
+                           'Section %s is defined more than once' % section
+                    config[section] = current = {}
+                    continue
+                print >> sys.stderr, 'ignoring malformed line\n%r' % line
+                continue
+            option = option.strip().replace(' ', '_')
+            value = value.strip()
+            current[option] = value or None
+    except IOError, ex:
+        warning('missing or non readable configuration file %s (%s)',
+                config_file, ex)
+    return config
+
+def env_path(env_var, default, name):
+    """get a path specified in a variable or using the default value and return
+    it.
+
+    :type env_var: str
+    :param env_var: name of an environment variable
+
+    :type default: str
+    :param default: default value if the environment variable is not defined
+    
+    :type name: str
+    :param name: the informal name of the path, used for error message
+    
+    :rtype: str
+    :return: the value of the environment variable or the default value
+
+    :raise `ConfigurationError`: if the returned path does not exist
+    """
+    path = environ.get(env_var, default)
+    if not exists(path):
+        raise ConfigurationError('%s path %s doesn\'t exist' % (name, path))
+    return abspath(path)
+
+
+
+_HDLRS = {}
+
+class metacmdhandler(type):
+    def __new__(mcs, name, bases, classdict):
+        cls = super(metacmdhandler, mcs).__new__(mcs, name, bases, classdict)
+        if getattr(cls, 'cfgname', None) and getattr(cls, 'cmdname', None):
+            _HDLRS.setdefault(cls.cmdname, []).append(cls)
+        return cls
+
+
+class CommandHandler(object):
+    """configuration specific helper for cubicweb-ctl commands"""
+    __metaclass__ = metacmdhandler
+    def __init__(self, config):
+        self.config = config
+
+class Command(BaseCommand):
+    """base class for cubicweb-ctl commands"""
+
+    def config_helper(self, config, required=True, cmdname=None):
+        if cmdname is None:
+            cmdname = self.name
+        for helpercls in _HDLRS.get(cmdname, ()):
+            if helpercls.cfgname == config.name:
+                return helpercls(config)
+        if config.name == 'all-in-one':
+            for helpercls in _HDLRS.get(cmdname, ()):
+                if helpercls.cfgname == 'repository':
+                    return helpercls(config)
+        if required:
+            msg = 'No helper for command %s using %s configuration' % (
+                cmdname, config.name)
+            raise ConfigurationError(msg)
+        
+    def fail(self, reason):
+        print "command failed:", reason
+        sys.exit(1)
+    
+                    
+def main_run(args, doc):
+    """command line tool"""
+    try:
+        base_main_run(args, doc)
+    except ConfigurationError, err:
+        print 'ERROR: ', err
+        sys.exit(1)
+    except ExecutionError, err:
+        print err
+        sys.exit(2)
+
+CONNECT_OPTIONS = (
+    ("user",
+     {'short': 'u', 'type' : 'string', 'metavar': '<user>',
+      'help': 'connect as <user> instead of being prompted to give it.',
+      }
+     ),
+    ("password",
+     {'short': 'p', 'type' : 'password', 'metavar': '<password>',
+      'help': 'automatically give <password> for authentication instead of \
+being prompted to give it.',
+      }),
+    ("host",
+     {'short': 'H', 'type' : 'string', 'metavar': '<hostname>',
+      'default': 'all-in-one',
+      'help': 'specify the name server\'s host name. Will be detected by \
+broadcast if not provided.',
+      }),
+    )
+
+def config_connect(appid, optconfig):
+    from cubicweb.dbapi import connect
+    from getpass import getpass
+    user = optconfig.user
+    if not user:
+        user = raw_input('login: ')
+    password = optconfig.password
+    if not password:
+        password = getpass('password: ')
+    return connect(user=user, password=password, host=optconfig.host, database=appid)
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/vregistry.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,532 @@
+"""
+* the vregistry handle various type of objects interacting
+  together. The vregistry handle registration of dynamically loaded
+  objects and provide a convenient api access to those objects
+  according to a context
+
+* to interact with the vregistry, object should inherit from the
+  VObject abstract class
+  
+* the registration procedure is delegated to a registerer. Each
+  registerable vobject must defines its registerer class using the
+  __registerer__ attribute.  A registerer is instantianted at
+  registration time after what the instance is lost
+  
+* the selection procedure has been generalized by delegating to a
+  selector, which is responsible to score the vobject according to the
+  current state (req, rset, row, col). At the end of the selection, if
+  a vobject class has been found, an instance of this class is
+  returned. The selector is instantiated at vobject registration
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from os import listdir, stat
+from os.path import dirname, join, realpath, split, isdir
+from logging import getLogger
+
+from cubicweb import CW_SOFTWARE_ROOT, set_log_methods
+from cubicweb import RegistryNotFound, ObjectNotFound, NoSelectableObject
+
+
+class vobject_helper(object):
+    """object instantiated at registration time to help a wrapped
+    VObject subclass
+    """
+
+    def __init__(self, registry, vobject):
+        self.registry = registry
+        self.vobject = vobject
+        self.config = registry.config
+        self.schema = registry.schema
+
+
+class registerer(vobject_helper):
+    """do whatever is needed at registration time for the wrapped
+    class, according to current application schema and already
+    registered objects of the same kind (i.e. same registry name and
+    same id).
+
+    The wrapped class may be skipped, some previously selected object
+    may be kicked out... After whatever works needed, if the object or
+    a transformed object is returned, it will be added to previously
+    registered objects.
+    """
+
+    def __init__(self, registry, vobject):
+        super(registerer, self).__init__(registry, vobject)
+        self.kicked = set()
+    
+    def do_it_yourself(self, registered):
+        raise NotImplementedError(str(self.vobject))
+        
+    def kick(self, registered, kicked):
+        self.debug('kicking vobject %s', kicked)
+        registered.remove(kicked)
+        self.kicked.add(kicked.classid())
+        
+    def skip(self):
+        self.debug('no schema compat, skipping %s', self.vobject)
+
+
+def selector(cls, *args, **kwargs):
+    """selector is called to help choosing the correct object for a
+    particular request and result set by returning a score.
+
+    it must implement a .score_method taking a request, a result set and
+    optionaly row and col arguments which return an int telling how well
+    the wrapped class apply to the given request and result set. 0 score
+    means that it doesn't apply.
+    
+    rset may be None. If not, row and col arguments may be optionally
+    given if the registry is scoring a given row or a given cell of
+    the result set (both row and col are int if provided).
+    """    
+    raise NotImplementedError(cls)
+
+
+class autoselectors(type):
+    """implements __selectors__ / __select__ compatibility layer so that:
+
+    __select__ = chainall(classmethod(A, B, C))
+
+    can be replaced by something like:
+    
+    __selectors__ = (A, B, C)
+    """
+    def __new__(mcs, name, bases, classdict):
+        if '__select__' in classdict and '__selectors__' in classdict:
+            raise TypeError("__select__ and __selectors__ "
+                            "can't be used together")
+        if '__select__' not in classdict and '__selectors__' in classdict:
+            selectors = classdict['__selectors__']
+            classdict['__select__'] = classmethod(chainall(*selectors))
+        return super(autoselectors, mcs).__new__(mcs, name, bases, classdict)
+
+    def __setattr__(self, attr, value):
+        if attr == '__selectors__':
+            self.__select__ = classmethod(chainall(*value))
+        super(autoselectors, self).__setattr__(attr, value)
+            
+
+class VObject(object):
+    """visual object, use to be handled somehow by the visual components
+    registry.
+
+    The following attributes should be set on concret vobject subclasses:
+    
+    :__registry__:
+      name of the registry for this object (string like 'views',
+      'templates'...)
+    :id:
+      object's identifier in the registry (string like 'main',
+      'primary', 'folder_box')
+    :__registerer__:
+      registration helper class
+    :__select__:
+      selection helper function
+    :__selectors__:
+      tuple of selectors to be chained
+      (__select__ and __selectors__ are mutually exclusive)
+      
+    Moreover, the `__abstract__` attribute may be set to True to indicate
+    that a vobject is abstract and should not be registered
+    """
+    __metaclass__ = autoselectors
+    # necessary attributes to interact with the registry
+    id = None
+    __registry__ = None
+    __registerer__ = None
+    __select__ = None
+
+    @classmethod
+    def registered(cls, registry):
+        """called by the registry when the vobject has been registered.
+
+        It must return the  object that will be actually registered (this
+        may be the right hook to create an instance for example). By
+        default the vobject is returned without any transformation.
+        """
+        return cls
+
+    @classmethod
+    def selected(cls, *args, **kwargs):
+        """called by the registry when the vobject has been selected.
+        
+        It must return the  object that will be actually returned by the
+        .select method (this may be the right hook to create an
+        instance for example). By default the selected object is
+        returned without any transformation.
+        """
+        return cls
+
+    @classmethod
+    def classid(cls):
+        """returns a unique identifier for the vobject"""
+        return '%s.%s' % (cls.__module__, cls.__name__)
+
+
+class VRegistry(object):
+    """class responsible to register, propose and select the various
+    elements used to build the web interface. Currently, we have templates,
+    views, actions and components.
+    """
+    
+    def __init__(self, config):#, cache_size=1000):
+        self.config = config
+        # dictionnary of registry (themself dictionnary) by name
+        self._registries = {}
+        self._lastmodifs = {}
+
+    def reset(self):
+        self._registries = {}
+        self._lastmodifs = {}
+
+    def __getitem__(self, key):
+        return self._registries[key]
+
+    def get(self, key, default=None):
+        return self._registries.get(key, default)
+
+    def items(self):
+        return self._registries.items()
+
+    def values(self):
+        return self._registries.values()
+
+    def __contains__(self, key):
+        return key in self._registries
+        
+    def register_vobject_class(self, cls, _kicked=set()):
+        """handle vobject class registration
+        
+        vobject class with __abstract__ == True in their local dictionnary or
+        with a name starting starting by an underscore are not registered.
+        Also a vobject class needs to have __registry__ and id attributes set
+        to a non empty string to be registered.
+
+        Registration is actually handled by vobject's registerer.
+        """
+        if (cls.__dict__.get('__abstract__') or cls.__name__[0] == '_'
+            or not cls.__registry__ or not cls.id):
+            return
+        # while reloading a module :
+        # if cls was previously kicked, it means that there is a more specific
+        # vobject defined elsewhere re-registering cls would kick it out
+        if cls.classid() in _kicked:
+            self.debug('not re-registering %s because it was previously kicked',
+                      cls.classid())
+        else:
+            regname = cls.__registry__
+            if cls.id in self.config['disable-%s' % regname]:
+                return
+            registry = self._registries.setdefault(regname, {})
+            vobjects = registry.setdefault(cls.id, [])
+            registerer = cls.__registerer__(self, cls)
+            cls = registerer.do_it_yourself(vobjects)
+            #_kicked |= registerer.kicked
+            if cls:
+                vobject = cls.registered(self)
+                try:
+                    vname = vobject.__name__
+                except AttributeError:
+                    vname = vobject.__class__.__name__
+                self.debug('registered vobject %s in registry %s with id %s',
+                          vname, cls.__registry__, cls.id)
+                vobjects.append(vobject)
+            
+    def unregister_module_vobjects(self, modname):
+        """removes registered objects coming from a given module
+
+        returns a dictionnary classid/class of all classes that will need
+        to be updated after reload (i.e. vobjects referencing classes defined
+        in the <modname> module)
+        """
+        unregistered = {}
+        # browse each registered object
+        for registry, objdict in self.items():
+            for oid, objects in objdict.items():
+                for obj in objects[:]:
+                    objname = obj.classid()
+                    # if the vobject is defined in this module, remove it
+                    if objname.startswith(modname):
+                        unregistered[objname] = obj
+                        objects.remove(obj)
+                        self.debug('unregistering %s in %s registry',
+                                  objname, registry)
+                    # if not, check if the vobject can be found in baseclasses
+                    # (because we also want subclasses to be updated)
+                    else:
+                        if not isinstance(obj, type):
+                            obj = obj.__class__
+                        for baseclass in obj.__bases__:
+                            if hasattr(baseclass, 'classid'):
+                                baseclassid = baseclass.classid()
+                                if baseclassid.startswith(modname):
+                                    unregistered[baseclassid] = baseclass
+                # update oid entry
+                if objects:
+                    objdict[oid] = objects
+                else:
+                    del objdict[oid]
+        return unregistered
+
+
+    def update_registered_subclasses(self, oldnew_mapping):
+        """updates subclasses of re-registered vobjects
+
+        if baseviews.PrimaryView is changed, baseviews.py will be reloaded
+        automatically and the new version of PrimaryView will be registered.
+        But all existing subclasses must also be notified of this change, and
+        that's what this method does
+
+        :param oldnew_mapping: a dict mapping old version of a class to
+                               the new version
+        """
+        # browse each registered object
+        for objdict in self.values():
+            for objects in objdict.values():
+                for obj in objects:
+                    if not isinstance(obj, type):
+                        obj = obj.__class__
+                    # build new baseclasses tuple
+                    newbases = tuple(oldnew_mapping.get(baseclass, baseclass)
+                                     for baseclass in obj.__bases__)
+                    # update obj's baseclasses tuple (__bases__) if needed
+                    if newbases != obj.__bases__:
+                        self.debug('updating %s.%s base classes',
+                                  obj.__module__, obj.__name__)
+                        obj.__bases__ = newbases
+
+    def registry(self, name):
+        """return the registry (dictionary of class objects) associated to
+        this name
+        """
+        try:
+            return self._registries[name]
+        except KeyError:
+            raise RegistryNotFound(name), None, sys.exc_info()[-1]
+
+    def registry_objects(self, name, oid=None):
+        """returns objects registered with the given oid in the given registry.
+        If no oid is given, return all objects in this registry
+        """
+        registry = self.registry(name)
+        if oid:
+            try:
+                return registry[oid]
+            except KeyError:
+                raise ObjectNotFound(oid), None, sys.exc_info()[-1]
+        else:
+            result = []
+            for objs in registry.values():
+                result += objs
+            return result
+        
+    def select(self, vobjects, *args, **kwargs):
+        """return an instance of the most specific object according
+        to parameters
+
+        raise NoSelectableObject if not object apply
+        """
+        score, winner = 0, None
+        for vobject in vobjects:
+            vobjectscore = vobject.__select__(*args, **kwargs)
+            if vobjectscore > score:
+                score, winner = vobjectscore, vobject
+        if winner is None:
+            raise NoSelectableObject('args: %s\nkwargs: %s %s'
+                                     % (args, kwargs.keys(), [repr(v) for v in vobjects]))
+        # return the result of the .selected method of the vobject
+        return winner.selected(*args, **kwargs)
+    
+    def possible_objects(self, registry, *args, **kwargs):
+        """return an iterator on possible objects in a registry for this result set
+
+        actions returned are classes, not instances
+        """
+        for vobjects in self.registry(registry).values():
+            try:
+                yield self.select(vobjects, *args, **kwargs)
+            except NoSelectableObject:
+                continue
+
+    def select_object(self, registry, cid, *args, **kwargs):
+        """return the most specific component according to the resultset"""
+        return self.select(self.registry_objects(registry, cid), *args, **kwargs)
+
+    def object_by_id(self, registry, cid, *args, **kwargs):
+        """return the most specific component according to the resultset"""
+        objects = self[registry][cid]
+        assert len(objects) == 1, objects
+        return objects[0].selected(*args, **kwargs)
+    
+    # intialization methods ###################################################
+
+    
+    def register_objects(self, path, force_reload=None):
+        if force_reload is None:
+            force_reload = self.config.mode == 'dev'
+        elif not force_reload:
+            # force_reload == False usually mean modules have been reloaded
+            # by another connection, so we want to update the registry
+            # content even if there has been no module content modification
+            self.reset()
+        # need to clean sys.path this to avoid import confusion pb (i.e.
+        # having the same module loaded as 'cubicweb.web.views' subpackage and
+        # as views'  or 'web.views' subpackage
+        # this is mainly for testing purpose, we should'nt need this in
+        # production environment
+        for webdir in (join(dirname(realpath(__file__)), 'web'),
+                       join(dirname(__file__), 'web')):
+            if webdir in sys.path:
+                sys.path.remove(webdir)
+        if CW_SOFTWARE_ROOT in sys.path:
+            sys.path.remove(CW_SOFTWARE_ROOT)        
+        # load views from each directory in the application's path
+        change = False
+        for fileordirectory in path:
+            if isdir(fileordirectory):
+                if self.read_directory(fileordirectory, force_reload):
+                    change = True
+            else:
+                directory, filename = split(fileordirectory)
+                if self.load_file(directory, filename, force_reload):
+                    change = True
+        if change:
+            for registry, objects in self.items():
+                self.debug('available in registry %s: %s', registry,
+                           sorted(objects))
+        return change
+    
+    def read_directory(self, directory, force_reload=False):
+        """read a directory and register available views"""
+        modified_on = stat(realpath(directory))[-2]
+        # only read directory if it was modified
+        _lastmodifs = self._lastmodifs
+        if directory in _lastmodifs and modified_on <= _lastmodifs[directory]:
+            return False
+        self.info('loading directory %s', directory)
+        for filename in listdir(directory):
+            if filename[-3:] == '.py':
+                try:
+                    self.load_file(directory, filename, force_reload)
+                except OSError:
+                    # this typically happens on emacs backup files (.#foo.py)
+                    self.warning('Unable to load file %s. It is likely to be a backup file',
+                                 filename)
+                except Exception, ex:
+                    if self.config.mode in ('dev', 'test'):
+                        raise
+                    self.exception('%r while loading file %s', ex, filename)
+        _lastmodifs[directory] = modified_on
+        return True
+
+    def load_file(self, directory, filename, force_reload=False):
+        """load visual objects from a python file"""
+        from logilab.common.modutils import load_module_from_modpath, modpath_from_file
+        filepath = join(directory, filename)
+        modified_on = stat(filepath)[-2]
+        modpath = modpath_from_file(join(directory, filename))
+        modname = '.'.join(modpath)
+        unregistered = {}
+        _lastmodifs = self._lastmodifs
+        if filepath in _lastmodifs:
+            # only load file if it was modified
+            if modified_on <= _lastmodifs[filepath]:
+                return
+            else:
+                # if it was modified, unregister all exisiting objects
+                # from this module, and keep track of what was unregistered
+                unregistered = self.unregister_module_vobjects(modname)
+        # load the module
+        module = load_module_from_modpath(modpath, use_sys=not force_reload)
+        registered = self.load_module(module)
+        # if something was unregistered, we need to update places where it was
+        # referenced 
+        if unregistered:
+            # oldnew_mapping = {}
+            oldnew_mapping = dict((unregistered[name], registered[name])
+                                  for name in unregistered if name in registered)
+            self.update_registered_subclasses(oldnew_mapping)
+        _lastmodifs[filepath] = modified_on
+        return True
+
+    def load_module(self, module):
+        registered = {}
+        self.info('loading %s', module)
+        for objname, obj in vars(module).items():
+            if objname.startswith('_'):
+                continue
+            self.load_ancestors_then_object(module.__name__, registered, obj)
+        return registered
+    
+    def load_ancestors_then_object(self, modname, registered, obj):
+        # skip imported classes
+        if getattr(obj, '__module__', None) != modname:
+            return
+        # skip non registerable object
+        try:
+            if not issubclass(obj, VObject):
+                return
+        except TypeError:
+            return
+        objname = '%s.%s' % (modname, obj.__name__)
+        if objname in registered:
+            return
+        registered[objname] = obj
+        for parent in obj.__bases__:
+            self.load_ancestors_then_object(modname, registered, parent)
+        self.load_object(obj)
+            
+    def load_object(self, obj):
+        try:
+            self.register_vobject_class(obj)
+        except Exception, ex:
+            if self.config.mode in ('test', 'dev'):
+                raise
+            self.exception('vobject %s registration failed: %s', obj, ex)
+        
+# init logging 
+set_log_methods(VObject, getLogger('cubicweb'))
+set_log_methods(VRegistry, getLogger('cubicweb.registry'))
+set_log_methods(registerer, getLogger('cubicweb.registration'))
+
+
+# advanced selector building functions ########################################
+
+def chainall(*selectors):
+    """return a selector chaining given selectors. If one of
+    the selectors fail, selection will fail, else the returned score
+    will be the sum of each selector'score
+    """
+    assert selectors
+    def selector(cls, *args, **kwargs):
+        score = 0
+        for selector in selectors:
+            partscore = selector(cls, *args, **kwargs)
+            if not partscore:
+                return 0
+            score += partscore
+        return score
+    return selector
+
+def chainfirst(*selectors):
+    """return a selector chaining given selectors. If all
+    the selectors fail, selection will fail, else the returned score
+    will be the first non-zero selector score
+    """
+    assert selectors
+    def selector(cls, *args, **kwargs):
+        for selector in selectors:
+            partscore = selector(cls, *args, **kwargs)
+            if partscore:
+                return partscore
+        return 0
+    return selector
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,46 @@
+"""CubicWeb web client core. You'll need a apache-modpython or twisted
+publisher to get a full CubicWeb web application
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web._exceptions import *    
+
+_ = unicode
+
+INTERNAL_FIELD_VALUE = '__cubicweb_internal_field__'
+
+
+class stdmsgs(object):
+    """standard ui message (in a class for bw compat)"""
+    BUTTON_OK     = _('button_ok')
+    BUTTON_APPLY  = _('button_apply')
+    BUTTON_CANCEL = _('button_cancel')
+    BUTTON_DELETE = _('button_delete')
+    YES = _('yes')
+    NO  = _('no')
+
+
+def eid_param(name, eid):
+    assert eid is not None
+    if eid is None:
+        eid = ''
+    return '%s:%s' % (name, eid)
+
+
+from logging import getLogger
+LOGGER = getLogger('cubicweb.web')
+
+FACETTES = set()
+
+
+## FACETTES = set( (
+##     # (relation, role, target's attribute)
+##     ('created_by', 'subject', 'login'),
+##     ('in_group', 'subject', 'name'),
+##     ('in_state', 'subject', 'name'),
+##     ))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/_exceptions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,57 @@
+# pylint: disable-msg=W0401,W0614
+"""exceptions used in the core of the CubicWeb web application
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb._exceptions import *
+
+class PublishException(CubicWebException):
+    """base class for publishing related exception"""
+    
+class RequestError(PublishException):
+    """raised when a request can't be served because of a bad input"""
+
+class NothingToEdit(RequestError):
+    """raised when an edit request doesn't specify any eid to edit"""
+    
+class NotFound(RequestError):
+    """raised when a 404 error should be returned"""
+
+class Redirect(PublishException):
+    """raised to redirect the http request"""
+    def __init__(self, location):
+        self.location = location
+
+class DirectResponse(Exception):
+    def __init__(self, response):
+        self.response = response
+
+class StatusResponse(Exception):
+    def __init__(self, status, content=''):
+        self.status = int(status)
+        self.content = content
+    
+class ExplicitLogin(AuthenticationError):
+    """raised when a bad connection id is given or when an attempt to establish
+    a connection failed"""
+
+class InvalidSession(CubicWebException):
+    """raised when a session id is found but associated session is not found or
+    invalid
+    """
+
+class RemoteCallFailed(RequestError):
+    """raised when a json remote call fails
+    """
+    def __init__(self, reason=''):
+        super(RequestError, self).__init__()
+        self.reason = reason
+
+    def dumps(self):
+        import simplejson
+        return simplejson.dumps({'reason': self.reason})
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/action.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,221 @@
+"""abstract action classes for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.appobject import AppRsetObject
+from cubicweb.common.registerers import action_registerer
+from cubicweb.common.selectors import add_etype_selector, \
+     searchstate_selector, searchstate_accept_one_selector, \
+     searchstate_accept_one_but_etype_selector
+    
+_ = unicode
+
+
+class Action(AppRsetObject):
+    """abstract action. Handle the .search_states attribute to match
+    request search state. 
+    """
+    __registry__ = 'actions'
+    __registerer__ = action_registerer
+    __selectors__ = (searchstate_selector,)
+    # by default actions don't appear in link search mode
+    search_states = ('normal',) 
+    property_defs = {
+        'visible':  dict(type='Boolean', default=True,
+                         help=_('display the action or not')),
+        'order':    dict(type='Int', default=99,
+                         help=_('display order of the action')),
+        'category': dict(type='String', default='moreactions',
+                         vocabulary=('mainactions', 'moreactions', 'addrelated',
+                                     'useractions', 'siteactions', 'hidden'),
+                         help=_('context where this component should be displayed')),
+    }
+    site_wide = True # don't want user to configuration actions eproperties
+    category = 'moreactions'
+    
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        user = req.user
+        action = cls.schema_action
+        if row is None:
+            score = 0
+            need_local_check = [] 
+            geteschema = cls.schema.eschema
+            for etype in rset.column_types(0):
+                accepted = cls.accept(user, etype)
+                if not accepted:
+                    return 0
+                if action:
+                    eschema = geteschema(etype)
+                    if not user.matching_groups(eschema.get_groups(action)):
+                        if eschema.has_local_role(action):
+                            # have to ckeck local roles
+                            need_local_check.append(eschema)
+                            continue
+                        else:
+                            # even a local role won't be enough
+                            return 0
+                score += accepted
+            if need_local_check:
+                # check local role for entities of necessary types
+                for i, row in enumerate(rset):
+                    if not rset.description[i][0] in need_local_check:
+                        continue
+                    if not cls.has_permission(rset.get_entity(i, 0), action):
+                        return 0
+                    score += 1
+            return score
+        col = col or 0
+        etype = rset.description[row][col]
+        score = cls.accept(user, etype)
+        if score and action:
+            if not cls.has_permission(rset.get_entity(row, col), action):
+                return 0
+        return score
+    
+    @classmethod
+    def has_permission(cls, entity, action):
+        """defined in a separated method to ease overriding (see ModifyAction
+        for instance)
+        """
+        return entity.has_perm(action)
+    
+    def url(self):
+        """return the url associated with this action"""
+        raise NotImplementedError
+    
+    def html_class(self):
+        if self.req.selected(self.url()):
+            return 'selected'
+        if self.category:
+            return 'box' + self.category.capitalize()
+
+class UnregisteredAction(Action):
+    """non registered action used to build boxes. Unless you set them
+    explicitly, .vreg and .schema attributes at least are None.
+    """
+    category = None
+    id = None
+    
+    def __init__(self, req, rset, title, path, **kwargs):
+        Action.__init__(self, req, rset)
+        self.title = req._(title)
+        self._path = path
+        self.__dict__.update(kwargs)
+        
+    def url(self):
+        return self._path
+
+
+class AddEntityAction(Action):
+    """link to the entity creation form. Concrete class must set .etype and
+    may override .vid
+    """
+    __selectors__ = (add_etype_selector, searchstate_selector)
+    vid = 'creation'
+    etype = None
+    
+    def url(self):
+        return self.build_url(vid=self.vid, etype=self.etype)
+
+
+class EntityAction(Action):
+    """an action for an entity. By default entity actions are only
+    displayable on single entity result if accept match.
+    """
+    __selectors__ = (searchstate_accept_one_selector,)
+    schema_action = None
+    condition = None
+    
+    @classmethod
+    def accept(cls, user, etype):
+        score = super(EntityAction, cls).accept(user, etype)
+        if not score:
+            return 0
+        # check if this type of entity has the necessary relation
+        if hasattr(cls, 'rtype') and not cls.relation_possible(etype):
+            return 0
+        return score
+
+    
+class LinkToEntityAction(EntityAction):
+    """base class for actions consisting to create a new object
+    with an initial relation set to an entity.
+    Additionaly to EntityAction behaviour, this class is parametrized
+    using .etype, .rtype and .target attributes to check if the
+    action apply and if the logged user has access to it
+    """
+    etype = None
+    rtype = None
+    target = None
+    category = 'addrelated'
+
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        entity = rset.get_entity(row or 0, col or 0)
+        # check if this type of entity has the necessary relation
+        if hasattr(cls, 'rtype') and not cls.relation_possible(entity.e_schema):
+            return 0
+        score = cls.accept(req.user, entity.e_schema)
+        if not score:
+            return 0
+        if not cls.check_perms(req, entity):
+            return 0
+        return score
+
+    @classmethod
+    def check_perms(cls, req, entity):
+        if not cls.check_rtype_perm(req, entity):
+            return False
+        # XXX document this:
+        # if user can create the relation, suppose it can create the entity
+        # this is because we usually can't check "add" permission before the
+        # entity has actually been created, and schema security should be
+        # defined considering this
+        #if not cls.check_etype_perm(req, entity):
+        #    return False
+        return True
+        
+    @classmethod
+    def check_etype_perm(cls, req, entity):
+        eschema = cls.schema.eschema(cls.etype)
+        if not eschema.has_perm(req, 'add'):
+            #print req.user.login, 'has no add perm on etype', cls.etype
+            return False
+        #print 'etype perm ok', cls
+        return True
+
+    @classmethod
+    def check_rtype_perm(cls, req, entity):
+        rschema = cls.schema.rschema(cls.rtype)
+        # cls.target is telling us if we want to add the subject or object of
+        # the relation
+        if cls.target == 'subject':
+            if not rschema.has_perm(req, 'add', toeid=entity.eid):
+                #print req.user.login, 'has no add perm on subject rel', cls.rtype, 'with', entity
+                return False
+        elif not rschema.has_perm(req, 'add', fromeid=entity.eid):
+            #print req.user.login, 'has no add perm on object rel', cls.rtype, 'with', entity
+            return False
+        #print 'rtype perm ok', cls
+        return True
+            
+    def url(self):
+        current_entity = self.rset.get_entity(self.row or 0, self.col or 0)
+        linkto = '%s:%s:%s' % (self.rtype, current_entity.eid, self.target)
+        return self.build_url(vid='creation', etype=self.etype,
+                              __linkto=linkto,
+                              __redirectpath=current_entity.rest_path(), # should not be url quoted!
+                              __redirectvid=self.req.form.get('__redirectvid', ''))
+
+
+class LinkToEntityAction2(LinkToEntityAction):
+    """LinkToEntity action where the action is not usable on the same
+    entity's type as the one refered by the .etype attribute
+    """
+    __selectors__ = (searchstate_accept_one_but_etype_selector,)
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/application.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,404 @@
+"""CubicWeb web client application object
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from time import clock, time
+
+from rql import BadRQLQuery
+
+from cubicweb import set_log_methods
+from cubicweb import (ValidationError, Unauthorized, AuthenticationError,
+                   NoSelectableObject, RepositoryError)
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.cwvreg import CubicWebRegistry
+from cubicweb.web import (LOGGER, StatusResponse, DirectResponse, Redirect, NotFound,
+                       RemoteCallFailed, ExplicitLogin, InvalidSession)
+from cubicweb.web.component import SingletonComponent
+
+# make session manager available through a global variable so the debug view can
+# print information about web session
+SESSION_MANAGER = None
+
+class AbstractSessionManager(SingletonComponent):
+    """manage session data associated to a session identifier"""
+    id = 'sessionmanager'
+    
+    def __init__(self):
+        self.session_time = self.vreg.config['http-session-time'] or None
+        assert self.session_time is None or self.session_time > 0
+        self.cleanup_session_time = self.vreg.config['cleanup-session-time'] or 120
+        assert self.cleanup_session_time > 0
+        self.cleanup_anon_session_time = self.vreg.config['cleanup-anonymous-session-time'] or 720
+        assert self.cleanup_anon_session_time > 0
+        if self.session_time:
+            assert self.cleanup_session_time < self.session_time
+            assert self.cleanup_anon_session_time < self.session_time
+        self.authmanager = self.vreg.select_component('authmanager')
+        assert self.authmanager, 'no authentication manager found'
+        
+    def clean_sessions(self):
+        """cleanup sessions which has not been unused since a given amount of
+        time. Return the number of sessions which have been closed.
+        """
+        self.debug('cleaning http sessions')
+        closed, total = 0, 0
+        for session in self.current_sessions():
+            no_use_time = (time() - session.last_usage_time)
+            total += 1
+            if session.anonymous_connection:
+                if no_use_time >= self.cleanup_anon_session_time:
+                    self.close_session(session)
+                    closed += 1
+            elif no_use_time >= self.cleanup_session_time:
+                self.close_session(session)
+                closed += 1
+        return closed, total - closed
+    
+    def has_expired(self, session):
+        """return True if the web session associated to the session is expired
+        """
+        return not (self.session_time is None or
+                    time() < session.last_usage_time + self.session_time)
+                
+    def current_sessions(self):
+        """return currently open sessions"""
+        raise NotImplementedError()
+            
+    def get_session(self, req, sessionid):
+        """return existing session for the given session identifier"""
+        raise NotImplementedError()
+
+    def open_session(self, req):
+        """open and return a new session for the given request
+        
+        :raise ExplicitLogin: if authentication is required
+        """
+        raise NotImplementedError()
+    
+    def close_session(self, session):
+        """close session on logout or on invalid session detected (expired out,
+        corrupted...)
+        """
+        raise NotImplementedError()
+
+
+class AbstractAuthenticationManager(SingletonComponent):
+    """authenticate user associated to a request and check session validity"""
+    id = 'authmanager'
+
+    def authenticate(self, req):
+        """authenticate user and return corresponding user object
+        
+        :raise ExplicitLogin: if authentication is required (no authentication
+        info found or wrong user/password)
+        """
+        raise NotImplementedError()
+
+    
+class CookieSessionHandler(object):
+    """a session handler using a cookie to store the session identifier
+
+    :cvar SESSION_VAR:
+      string giving the name of the variable used to store the session
+      identifier
+    """
+    SESSION_VAR = '__session'
+    
+    def __init__(self, appli):
+        self.session_manager = appli.vreg.select_component('sessionmanager')
+        assert self.session_manager, 'no session manager found'
+        global SESSION_MANAGER
+        SESSION_MANAGER = self.session_manager
+        if not 'last_login_time' in appli.vreg.schema:
+            self._update_last_login_time = lambda x: None
+
+    def clean_sessions(self):
+        """cleanup sessions which has not been unused since a given amount of
+        time
+        """
+        self.session_manager.clean_sessions()
+        
+    def set_session(self, req):
+        """associate a session to the request
+
+        Session id is searched from :
+        - # form variable
+        - cookie
+
+        if no session id is found, open a new session for the connected user
+        or request authentification as needed
+
+        :raise Redirect: if authentication has occured and succeed        
+        """
+        assert req.cnx is None # at this point no cnx should be set on the request
+        cookie = req.get_cookie()
+        try:
+            sessionid = str(cookie[self.SESSION_VAR].value)
+        except KeyError: # no session cookie
+            session = self.open_session(req)
+        else:
+            try:
+                session = self.get_session(req, sessionid)
+            except InvalidSession:
+                try:
+                    session = self.open_session(req)
+                except ExplicitLogin:
+                    req.remove_cookie(cookie, self.SESSION_VAR)
+                    raise
+        # remember last usage time for web session tracking
+        session.last_usage_time = time()
+
+    def get_session(self, req, sessionid):
+        return self.session_manager.get_session(req, sessionid)
+    
+    def open_session(self, req):
+        session = self.session_manager.open_session(req)
+        cookie = req.get_cookie()
+        cookie[self.SESSION_VAR] = session.sessionid
+        req.set_cookie(cookie, self.SESSION_VAR, maxage=None)
+        # remember last usage time for web session tracking
+        session.last_usage_time = time()
+        if not session.anonymous_connection:
+            self._postlogin(req)
+        return session
+
+    def _update_last_login_time(self, req):
+        try:
+            req.execute('SET X last_login_time NOW WHERE X eid %(x)s',
+                        {'x' : req.user.eid}, 'x')
+            req.cnx.commit()
+        except (RepositoryError, Unauthorized):
+            # ldap user are not writeable for instance
+            req.cnx.rollback()
+        except:
+            req.cnx.rollback()
+            raise
+        
+    def _postlogin(self, req):
+        """postlogin: the user has been authenticated, redirect to the original
+        page (index by default) with a welcome message
+        """
+        # Update last connection date
+        # XXX: this should be in a post login hook in the repository, but there
+        #      we can't differentiate actual login of automatic session
+        #      reopening. Is it actually a problem?
+        self._update_last_login_time(req)
+        args = req.form
+        args['__message'] = req._('welcome %s !') % req.user.login
+        if 'vid' in req.form:
+            args['vid'] = req.form['vid']
+        if 'rql' in req.form:
+            args['rql'] = req.form['rql']
+        path = req.relative_path(False)
+        if path == 'login':
+            path = 'view'
+        raise Redirect(req.build_url(path, **args))
+    
+    def logout(self, req):
+        """logout from the application by cleaning the session and raising
+        `AuthenticationError`
+        """
+        self.session_manager.close_session(req.cnx)
+        req.remove_cookie(req.get_cookie(), self.SESSION_VAR)
+        raise AuthenticationError()
+
+
+class CubicWebPublisher(object):
+    """Central registry for the web application. This is one of the central
+    object in the web application, coupling dynamically loaded objects with
+    the application's schema and the application's configuration objects.
+    
+    It specializes the VRegistry by adding some convenience methods to
+    access to stored objects. Currently we have the following registries
+    of objects known by the web application (library may use some others
+    additional registries):
+    * controllers, which are directly plugged into the application
+      object to handle request publishing
+    * views
+    * templates
+    * components
+    * actions
+    """
+    
+    def __init__(self, config, debug=None,
+                 session_handler_fact=CookieSessionHandler,
+                 vreg=None):
+        super(CubicWebPublisher, self).__init__()
+        # connect to the repository and get application's schema
+        if vreg is None:
+            vreg = CubicWebRegistry(config, debug=debug)
+        self.vreg = vreg
+        self.info('starting web application from %s', config.apphome)
+        self.repo = config.repository(vreg)
+        if not vreg.initialized:
+            self.config.init_cubes(self.repo.get_cubes())
+            vreg.init_properties(self.repo.properties())
+        vreg.set_schema(self.repo.get_schema())
+        # set the correct publish method
+        if config['query-log-file']:
+            from threading import Lock
+            self._query_log = open(config['query-log-file'], 'a')
+            self.publish = self.log_publish
+            self._logfile_lock = Lock()            
+        else:
+            self._query_log = None
+            self.publish = self.main_publish
+        # instantiate session and url resolving helpers
+        self.session_handler = session_handler_fact(self)
+        self.url_resolver = vreg.select_component('urlpublisher')
+    
+    def connect(self, req):
+        """return a connection for a logged user object according to existing
+        sessions (i.e. a new connection may be created or an already existing
+        one may be reused
+        """
+        self.session_handler.set_session(req)
+
+    def select_controller(self, oid, req):
+        """return the most specific view according to the resultset"""
+        vreg = self.vreg
+        try:
+            return vreg.select(vreg.registry_objects('controllers', oid),
+                               req=req, appli=self)
+        except NoSelectableObject:
+            raise Unauthorized(req._('not authorized'))
+            
+    # publish methods #########################################################
+        
+    def log_publish(self, path, req):
+        """wrapper around _publish to log all queries executed for a given
+        accessed path
+        """
+        try:
+            return self.main_publish(path, req)
+        finally:
+            cnx = req.cnx
+            self._logfile_lock.acquire()
+            try:
+                try:
+                    result = ['\n'+'*'*80]
+                    result.append(req.url())
+                    result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q for q in cnx.executed_queries]
+                    cnx.executed_queries = []
+                    self._query_log.write('\n'.join(result).encode(req.encoding))
+                    self._query_log.flush()
+                except Exception:
+                    self.exception('error while logging queries')
+            finally:
+                self._logfile_lock.release()
+
+    def main_publish(self, path, req):
+        """method called by the main publisher to process <path>
+        
+        should return a string containing the resulting page or raise a
+        `NotFound` exception
+
+        :type path: str
+        :param path: the path part of the url to publish
+        
+        :type req: `web.Request`
+        :param req: the request object
+
+        :rtype: str
+        :return: the result of the pusblished url
+        """
+        path = path or 'view'
+        # don't log form values they may contains sensitive information
+        self.info('publish "%s" (form params: %s)', path, req.form.keys())
+        # remove user callbacks on a new request (except for json controllers
+        # to avoid callbacks being unregistered before they could be called)
+        tstart = clock()
+        try:
+            try:
+                ctrlid, rset = self.url_resolver.process(req, path)
+                controller = self.select_controller(ctrlid, req)
+                result = controller.publish(rset=rset)
+                if req.cnx is not None:
+                    # req.cnx is None if anonymous aren't allowed and we are
+                    # displaying the cookie authentication form
+                    req.cnx.commit()
+            except (StatusResponse, DirectResponse):
+                req.cnx.commit()
+                raise
+            except Redirect:
+                # redirect is raised by edit controller when everything went fine,
+                # so try to commit
+                try:
+                    req.cnx.commit()
+                except ValidationError, ex:
+                    self.validation_error_handler(req, ex)
+                except Unauthorized, ex:
+                    req.data['errmsg'] = req._('You\'re not authorized to access this page. '
+                                               'If you think you should, please contact the site administrator.')
+                    self.error_handler(req, ex, tb=False)
+                except Exception, ex:
+                    self.error_handler(req, ex, tb=True)
+                else:
+                    # delete validation errors which may have been previously set
+                    if '__errorurl' in req.form:
+                        req.del_session_data(req.form['__errorurl'])
+                    raise
+            except (AuthenticationError, NotFound, RemoteCallFailed):
+                raise
+            except ValidationError, ex:
+                self.validation_error_handler(req, ex)
+            except (Unauthorized, BadRQLQuery), ex:
+                self.error_handler(req, ex, tb=False)
+            except Exception, ex:
+                self.error_handler(req, ex, tb=True)
+        finally:
+            if req.cnx is not None:
+                try:
+                    req.cnx.rollback()
+                except:
+                    pass # ignore rollback error at this point
+        self.info('query %s executed in %s sec', req.relative_path(), clock() - tstart)
+        return result
+
+    def validation_error_handler(self, req, ex):
+        ex.errors = dict((k, v) for k, v in ex.errors.items())
+        if '__errorurl' in req.form:
+            forminfo = {'errors': ex,
+                        'values': req.form,
+                        'eidmap': req.data.get('eidmap', {})
+                        }
+            req.set_session_data(req.form['__errorurl'], forminfo)
+            raise Redirect(req.form['__errorurl'])
+        self.error_handler(req, ex, tb=False)
+        
+    def error_handler(self, req, ex, tb=False):
+        excinfo = sys.exc_info()
+        self.exception(repr(ex))
+        req.set_header('Cache-Control', 'no-cache')
+        req.remove_header('Etag')
+        req.message = None
+        req.reset_headers()
+        try:
+            req.data['ex'] = ex
+            if tb:
+                req.data['excinfo'] = excinfo
+            req.form['vid'] = 'error'
+            content = self.vreg.main_template(req, 'main')
+        except:
+            content = self.vreg.main_template(req, 'error')
+        raise StatusResponse(500, content)
+    
+    def need_login_content(self, req):
+        return self.vreg.main_template(req, 'login')
+    
+    def loggedout_content(self, req):
+        return self.vreg.main_template(req, 'loggedout')
+    
+    def notfound_content(self, req):
+        template = req.property_value('ui.main-template') or 'main'
+        req.form['vid'] = '404'
+        return self.vreg.main_template(req, template)
+
+
+set_log_methods(CubicWebPublisher, LOGGER)
+set_log_methods(CookieSessionHandler, LOGGER)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/box.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,238 @@
+"""abstract box classes for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached
+from logilab.mtconverter import html_escape
+
+from cubicweb import Unauthorized
+from cubicweb.common.registerers import (accepts_registerer,
+                                      extresources_registerer,
+                                      etype_rtype_priority_registerer)
+from cubicweb.common.selectors import (etype_rtype_selector, onelinerset_selector,
+                                    accept_selector, accept_rtype_selector,
+                                    primaryview_selector, contextprop_selector)
+from cubicweb.common.view import Template
+from cubicweb.common.appobject import ReloadableMixIn
+
+from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget,
+                                   RawBoxItem, BoxSeparator)
+from cubicweb.web.action import UnregisteredAction
+
+_ = unicode
+
+
+class BoxTemplate(Template):
+    """base template for boxes, usually a (contextual) list of possible
+    
+    actions. Various classes attributes may be used to control the box
+    rendering.
+    
+    You may override on of the formatting callbacks is this is not necessary
+    for your custom box.
+    
+    Classes inheriting from this class usually only have to override call
+    to fetch desired actions, and then to do something like  ::
+
+        box.render(self.w)
+    """
+    __registry__ = 'boxes'
+    __selectors__ = Template.__selectors__ + (contextprop_selector,)
+    
+    categories_in_order = ()
+    property_defs = {
+        _('visible'): dict(type='Boolean', default=True,
+                           help=_('display the box or not')),
+        _('order'):   dict(type='Int', default=99,
+                           help=_('display order of the box')),
+        # XXX 'incontext' boxes are handled by the default primary view
+        _('context'): dict(type='String', default='left',
+                           vocabulary=(_('left'), _('incontext'), _('right')),
+                           help=_('context where this box should be displayed')),
+        }
+    context = 'left'
+    htmlitemclass = 'boxItem'
+
+    def sort_actions(self, actions):
+        """return a list of (category, actions_sorted_by_title)"""
+        result = []
+        actions_by_cat = {}
+        for action in actions:
+            actions_by_cat.setdefault(action.category, []).append((action.title, action))
+        for key, values in actions_by_cat.items():
+            actions_by_cat[key] = [act for title, act in sorted(values)]
+        for cat in self.categories_in_order:
+            if cat in actions_by_cat:
+                result.append( (cat, actions_by_cat[cat]) )
+        for item in sorted(actions_by_cat.items()):
+            result.append(item)
+        return result
+
+    def mk_action(self, title, path, escape=True, **kwargs):
+        """factory function to create dummy actions compatible with the
+        .format_actions method
+        """
+        if escape:
+            title = html_escape(title)
+        return self.box_action(self._action(title, path, **kwargs))
+    
+    def _action(self, title, path, **kwargs):
+        return UnregisteredAction(self.req, self.rset, title, path, **kwargs)        
+
+    # formating callbacks
+
+    def boxitem_link_tooltip(self, action):
+        if action.id:
+            return u'keyword: %s' % action.id
+        return u''
+
+    def box_action(self, action):
+        cls = getattr(action, 'html_class', lambda: None)() or self.htmlitemclass
+        return BoxLink(action.url(), self.req._(action.title),
+                       cls, self.boxitem_link_tooltip(action))
+        
+
+class RQLBoxTemplate(BoxTemplate):
+    """abstract box for boxes displaying the content of a rql query not
+    related to the current result set.
+    
+    It rely on etype, rtype (both optional, usable to control registration
+    according to application schema and display according to connected
+    user's rights) and rql attributes
+    """
+    __registerer__ = etype_rtype_priority_registerer
+    __selectors__ = BoxTemplate.__selectors__ + (etype_rtype_selector,)
+
+    rql  = None
+    
+    def to_display_rql(self):
+        assert self.rql is not None, self.id
+        return (self.rql,)
+    
+    def call(self, **kwargs):
+        try:
+            rset = self.req.execute(*self.to_display_rql())
+        except Unauthorized:
+            # can't access to something in the query, forget this box
+            return
+        if len(rset) == 0:
+            return
+        box = BoxWidget(self.req._(self.title), self.id)
+        for i, (teid, tname) in enumerate(rset):
+            entity = rset.get_entity(i, 0)
+            box.append(self.mk_action(tname, entity.absolute_url()))
+        box.render(w=self.w)
+
+        
+class UserRQLBoxTemplate(RQLBoxTemplate):
+    """same as rql box template but the rql is build using the eid of the
+    request's user
+    """
+
+    def to_display_rql(self):
+        assert self.rql is not None, self.id
+        return (self.rql, {'x': self.req.user.eid}, 'x')
+    
+
+class ExtResourcesBoxTemplate(BoxTemplate):
+    """base class for boxes displaying external resources such as the RSS logo.
+    It should list necessary resources with the .need_resources attribute.
+    """
+    __registerer__ = extresources_registerer
+    need_resources = ()
+
+
+class EntityBoxTemplate(BoxTemplate):
+    """base class for boxes related to a single entity"""
+    __registerer__ = accepts_registerer
+    __selectors__ = (onelinerset_selector, primaryview_selector,
+                     contextprop_selector, etype_rtype_selector,
+                     accept_rtype_selector, accept_selector)
+    accepts = ('Any',)
+    context = 'incontext'
+    
+    def call(self, row=0, col=0, **kwargs):
+        """classes inheriting from EntityBoxTemplate should defined cell_call,
+        """
+        self.cell_call(row, col, **kwargs)
+
+
+
+class EditRelationBoxTemplate(ReloadableMixIn, EntityBoxTemplate):
+    """base class for boxes which let add or remove entities linked
+    by a given relation
+
+    subclasses should define at least id, rtype and target
+    class attributes.
+    """
+    
+    def cell_call(self, row, col):
+        self.req.add_js('cubicweb.ajax.js')
+        entity = self.entity(row, col)
+        box = SideBoxWidget(display_name(self.req, self.rtype), self.id)
+        count = self.w_related(box, entity)
+        if count:
+            box.append(BoxSeparator())
+        self.w_unrelated(box, entity)
+        box.render(self.w)
+
+    def div_id(self):
+        return self.id
+
+    @cached
+    def xtarget(self):
+        if self.target == 'subject':
+            return 'object', 'subject'
+        return 'subject', 'object'
+        
+    def box_item(self, entity, etarget, rql, label):
+        """builds HTML link to edit relation between `entity` and `etarget`
+        """
+        x, target = self.xtarget()
+        args = {x[0] : entity.eid, target[0] : etarget.eid}
+        url = self.user_rql_callback((rql, args))
+        # for each target, provide a link to edit the relation
+        label = u'[<a href="%s">%s</a>] %s' % (url, label,
+                                               etarget.view('incontext'))
+        return RawBoxItem(label, liclass=u'invisible')
+    
+    def w_related(self, box, entity):
+        """appends existing relations to the `box`"""
+        rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype
+        related = self.related_entities(entity)
+        for etarget in related:
+            box.append(self.box_item(entity, etarget, rql, u'-'))
+        return len(related)
+    
+    def w_unrelated(self, box, entity):
+        """appends unrelated entities to the `box`"""
+        rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype
+        for etarget in self.unrelated_entities(entity):
+            box.append(self.box_item(entity, etarget, rql, u'+'))
+
+    def unrelated_entities(self, entity):
+        """returns the list of unrelated entities
+
+        if etype is not defined on the Box's class, the default
+        behaviour is to use the entity's appropraite vocabulary function
+        """
+        x, target = self.xtarget()
+        # use entity.unrelated if we've been asked for a particular etype
+        if hasattr(self, 'etype'):
+            return entity.unrelated(self.rtype, self.etype, x).entities()
+        # in other cases, use vocabulary functions
+        entities = []
+        for _, eid in entity.vocabulary(self.rtype, x):
+            if eid is not None:
+                rset = self.req.eid_rset(eid)
+                entities.append(rset.get_entity(0, 0))
+        return entities
+        
+    def related_entities(self, entity):
+        x, target = self.xtarget()
+        return entity.related(self.rtype, x, entities=True)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/component.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,164 @@
+"""abstract component class and base components definition for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.appobject import Component, SingletonComponent
+from cubicweb.common.utils import merge_dicts
+from cubicweb.common.view import VComponent, SingletonVComponent
+from cubicweb.common.registerers import action_registerer
+from cubicweb.common.selectors import (largerset_selector, onelinerset_selector, 
+                                    etype_rtype_selector, rqlcondition_selector,
+                                    accept_selector, contextprop_selector,
+                                    primaryview_selector, accept_rtype_selector)
+from cubicweb.common.uilib import html_escape
+
+_ = unicode
+
+
+class EntityVComponent(VComponent):
+    """abstract base class for additinal components displayed in content
+    headers and footer according to:
+    
+    * the displayed entity's type
+    * a context (currently 'header' or 'footer')
+
+    it should be configured using .accepts, .etype, .rtype, .target and
+    .context class attributes
+    """
+    
+    __registry__ = 'contentnavigation'
+    __registerer__ = action_registerer    
+    __selectors__ = (onelinerset_selector, primaryview_selector,
+                     contextprop_selector, etype_rtype_selector,
+                     accept_rtype_selector, accept_selector,
+                     rqlcondition_selector)
+    
+    property_defs = {
+        _('visible'):  dict(type='Boolean', default=True,
+                            help=_('display the box or not')),
+        _('order'):    dict(type='Int', default=99,
+                            help=_('display order of the component')),
+        _('context'):  dict(type='String', default='header',
+                            vocabulary=(_('navtop'), _('navbottom'), 
+                                        _('navcontenttop'), _('navcontentbottom')),
+                            #vocabulary=(_('header'), _('incontext'), _('footer')),
+                            help=_('context where this component should be displayed')),
+        _('htmlclass'):dict(type='String', default='mainRelated',
+                            help=_('html class of the component')),
+    }
+    
+    accepts = ('Any',)
+    context = 'navcontentbottom' # 'footer' | 'header' | 'incontext'
+    condition = None
+    
+    def call(self, view):
+        raise RuntimeError()
+
+    
+class NavigationComponent(VComponent):
+    """abstract base class for navigation components"""
+    __selectors__ = (largerset_selector,)
+    id = 'navigation'
+    page_size_property = 'navigation.page-size'
+    start_param = '__start'
+    stop_param = '__stop'
+    page_link_templ = u'<span class="slice"><a href="%s" title="%s">%s</a></span>'
+    selected_page_link_templ = u'<span class="selectedSlice"><a href="%s" title="%s">%s</a></span>'
+    previous_page_link_templ = next_page_link_templ = page_link_templ
+    no_previous_page_link = no_next_page_link = u''
+    
+    def __init__(self, req, rset):
+        super(NavigationComponent, self).__init__(req, rset)
+        self.starting_from = 0
+        self.total = rset.rowcount
+
+    def get_page_size(self):
+        try:
+            return self._page_size
+        except AttributeError:
+            self._page_size = self.req.property_value(self.page_size_property)
+            return self._page_size
+
+    def set_page_size(self, page_size):
+        self._page_size = page_size
+        
+    page_size = property(get_page_size, set_page_size)
+    
+    def page_boundaries(self):
+        try:
+            stop = int(self.req.form[self.stop_param]) + 1
+            start = int(self.req.form[self.start_param])
+        except KeyError:
+            start, stop = 0, self.page_size
+        self.starting_from = start
+        return start, stop
+        
+    def clean_params(self, params):
+        if self.start_param in params:
+            del params[self.start_param]
+        if self.stop_param in params:
+            del params[self.stop_param]
+
+    def page_link(self, path, params, start, stop, content):
+        url = self.build_url(path, **merge_dicts(params, {self.start_param : start,
+                                                          self.stop_param : stop,}))
+        url = html_escape(url)
+        if start == self.starting_from:
+            return self.selected_page_link_templ % (url, content, content)
+        return self.page_link_templ % (url, content, content)
+
+    def previous_link(self, params, content='&lt;&lt;', title=_('previous_results')):
+        start = self.starting_from
+        if not start :
+            return self.no_previous_page_link
+        start = max(0, start - self.page_size)
+        stop = start + self.page_size - 1
+        url = self.build_url(**merge_dicts(params, {self.start_param : start,
+                                                    self.stop_param : stop,}))
+        url = html_escape(url)
+        return self.previous_page_link_templ % (url, title, content)
+
+    def next_link(self, params, content='&gt;&gt;', title=_('next_results')):
+        start = self.starting_from + self.page_size
+        if start >= self.total:
+            return self.no_next_page_link
+        stop = start + self.page_size - 1
+        url = self.build_url(**merge_dicts(params, {self.start_param : start,
+                                                    self.stop_param : stop,}))
+        url = html_escape(url)
+        return self.next_page_link_templ % (url, title, content)
+
+
+class RelatedObjectsVComponent(EntityVComponent):
+    """a section to display some related entities"""
+    __selectors__ = (onelinerset_selector, primaryview_selector,
+                     etype_rtype_selector, accept_rtype_selector,
+                     contextprop_selector, accept_selector)
+    vid = 'list'
+
+    def rql(self):
+        """override this method if you want to use a custom rql query.
+        """
+        return None
+    
+    def call(self, view=None):
+        rql = self.rql()
+        if rql is None:
+            entity = self.rset.get_entity(0, 0)
+            if self.target == 'object':
+                role = 'subject'
+            else:
+                role = 'object'
+            rset = entity.related(self.rtype, role)
+        else:
+            eid = self.rset[0][0]
+            rset = self.req.execute(self.rql(), {'x': eid}, 'x')
+        if not rset.rowcount:
+            return
+        self.w(u'<div class="%s">' % self.div_class())
+        self.wview(self.vid, rset, title=self.req._(self.title).capitalize())
+        self.w(u'</div>')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/controller.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,257 @@
+"""abstract controler classe for CubicWeb web client
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from mx.DateTime import strptime, Error as MxDTError, TimeDelta
+
+from cubicweb import typed_eid
+from cubicweb.common.registerers import priority_registerer
+from cubicweb.common.selectors import in_group_selector
+from cubicweb.common.appobject import AppObject
+from cubicweb.web import LOGGER, Redirect, RequestError
+
+
+NAVIGATION_PARAMETERS = (('vid', '__redirectvid'),
+                         ('rql', '__redirectrql'),
+                         ('__redirectpath', '__redirectpath'),
+                         ('__redirectparams', '__redirectparams'),
+                         )
+NAV_FORM_PARAMETERS = [fp for ap, fp in NAVIGATION_PARAMETERS]
+
+def redirect_params(form):
+    """transform redirection parameters into navigation parameters
+    """
+    params = {}
+    # extract navigation parameters from redirection parameters
+    for navparam, redirectparam in NAVIGATION_PARAMETERS:
+        if navparam == redirectparam:
+            continue
+        if redirectparam in form:
+            params[navparam] = form[redirectparam]
+    return params
+
+def parse_relations_descr(rdescr):
+    """parse a string describing some relations, in the form
+    subjeids:rtype:objeids
+    where subjeids and objeids are eids separeted by a underscore
+
+    return an iterator on (subject eid, relation type, object eid) found
+    """
+    for rstr in rdescr:
+        subjs, rtype, objs = rstr.split(':')
+        for subj in subjs.split('_'):
+            for obj in objs.split('_'):
+                yield typed_eid(subj), rtype, typed_eid(obj)
+        
+def append_url_params(url, params):
+    """append raw parameters to the url. Given parameters, if any, are expected
+    to be already url-quoted.
+    """
+    if params:
+        if not '?' in url:
+            url += '?'
+        else:
+            url += '&'
+        url += params
+    return url
+
+
+class Controller(AppObject):
+    """a controller is responsible to make necessary stuff to publish
+    a request. There is usually at least one standard "view" controller
+    and another linked by forms to edit objects ("edit").
+    """
+    __registry__ = 'controllers'
+    __registerer__ = priority_registerer
+    __selectors__ = (in_group_selector,)
+    require_groups = ()
+
+    def __init__(self, *args, **kwargs):
+        super(Controller, self).__init__(*args, **kwargs)
+        # attributes use to control after edition redirection
+        self._after_deletion_path = None
+        self._edited_entity = None
+        
+    def publish(self, rset=None):
+        """publish the current request, with an option input rql string
+        (already processed if necessary)
+        """
+        raise NotImplementedError
+
+    # generic methods useful for concret implementations ######################
+    
+    def check_expected_params(self, params):
+        """check that the given list of parameters are specified in the form
+        dictionary
+        """
+        missing = []
+        for param in params:
+            if not self.req.form.get(param):
+                missing.append(param)
+        if missing:
+            raise RequestError('missing required parameter(s): %s'
+                               % ','.join(missing))
+    
+    def parse_datetime(self, value, etype='Datetime'):
+        """get a datetime or time from a string (according to etype)
+        Datetime formatted as Date are accepted
+        """
+        assert etype in ('Datetime', 'Date', 'Time'), etype
+        # XXX raise proper validation error
+        if etype == 'Datetime':
+            format = self.req.property_value('ui.datetime-format')
+            try:
+                return strptime(value, format)
+            except MxDTError:
+                pass
+        elif etype == 'Time':
+            format = self.req.property_value('ui.time-format')
+            try:
+                # (adim) I can't find a way to parse a Time with a custom format
+                date = strptime(value, format) # this returns a DateTime
+                return TimeDelta(date.hour, date.minute, date.second)
+            except MxDTError:
+                raise ValueError('can\'t parse %r (expected %s)' % (value, format))
+        try:
+            format = self.req.property_value('ui.date-format')
+            return strptime(value, format)
+        except MxDTError:
+            raise ValueError('can\'t parse %r (expected %s)' % (value, format))
+
+
+    def notify_edited(self, entity):
+        """called by edit_entity() to notify which entity is edited"""
+        # NOTE: we can't use entity.rest_path() at this point because
+        #       rest_path() could rely on schema constraints (such as a required
+        #       relation) that might not be satisfied yet (in case of creations)
+        if not self._edited_entity:
+            self._edited_entity = entity
+        
+    def delete_entities(self, eidtypes):
+        """delete entities from the repository"""
+        redirect_info = set()
+        eidtypes = tuple(eidtypes)
+        for eid, etype in eidtypes:
+            entity = self.req.eid_rset(eid, etype).get_entity(0, 0)
+            path, params = entity.after_deletion_path()
+            redirect_info.add( (path, tuple(params.iteritems())) )
+            entity.delete()
+        if len(redirect_info) > 1:
+            # In the face of ambiguity, refuse the temptation to guess.
+            self._after_deletion_path = 'view', ()
+        else:
+            self._after_deletion_path = iter(redirect_info).next()
+        if len(eidtypes) > 1:
+            self.req.set_message(self.req._('entities deleted'))
+        else:
+            self.req.set_message(self.req._('entity deleted'))
+        
+    def delete_relations(self, rdefs):
+        """delete relations from the repository"""
+        # FIXME convert to using the syntax subject:relation:eids
+        execute = self.req.execute
+        for subj, rtype, obj in rdefs:
+            rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
+            execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+        self.req.set_message(self.req._('relations deleted'))
+    
+    def insert_relations(self, rdefs):
+        """insert relations into the repository"""
+        execute = self.req.execute
+        for subj, rtype, obj in rdefs:
+            rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
+            execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+
+    
+    def reset(self):
+        """reset form parameters and redirect to a view determinated by given
+        parameters
+        """
+        newparams = {}
+        # sets message if needed
+        if self.req.message:
+            newparams['__message'] = self.req.message
+        if self.req.form.has_key('__action_apply'):
+            self._return_to_edition_view(newparams)
+        if self.req.form.has_key('__action_cancel'):
+            self._return_to_lastpage(newparams)
+        else:
+            self._return_to_original_view(newparams)
+
+
+    def _return_to_original_view(self, newparams):
+        """validate-button case"""
+        # transforms __redirect[*] parameters into regular form parameters
+        newparams.update(redirect_params(self.req.form))
+        # find out if we have some explicit `rql` needs
+        rql = newparams.pop('rql', None)
+        # if rql is needed (explicit __redirectrql or multiple deletions for
+        # instance), we have to use the old `view?rql=...` form
+        if rql:
+            path = 'view'
+            newparams['rql'] = rql
+        elif '__redirectpath' in self.req.form:
+            # if redirect path was explicitly specified in the form, use it
+            path = self.req.form['__redirectpath']
+        elif self._after_deletion_path:
+            # else it should have been set during form processing
+            path, params = self._after_deletion_path
+            params = dict(params) # params given as tuple
+            params.update(newparams)
+            newparams = params
+        elif self._edited_entity:
+            path = self._edited_entity.rest_path()
+        else:
+            path = 'view'
+        url = self.build_url(path, **newparams)
+        url = append_url_params(url, self.req.form.get('__redirectparams'))
+        raise Redirect(url)
+    
+
+    def _return_to_edition_view(self, newparams):
+        """apply-button case"""
+        form = self.req.form
+        if self._edited_entity:
+            path = self._edited_entity.rest_path()
+            newparams.pop('rql', None)
+        # else, fallback on the old `view?rql=...` url form
+        elif 'rql' in self.req.form:
+            path = 'view'
+            newparams['rql'] = form['rql']
+        else:
+            self.warning("the edited data seems inconsistent")
+            path = 'view'
+        # pick up the correction edition view
+        if form.get('__form_id'):
+            newparams['vid'] = form['__form_id']
+        # re-insert copy redirection parameters
+        for redirectparam in NAV_FORM_PARAMETERS:
+            if redirectparam in form:
+                newparams[redirectparam] = form[redirectparam]
+        raise Redirect(self.build_url(path, **newparams))
+
+
+    def _return_to_lastpage(self, newparams):
+        """cancel-button case: in this case we are always expecting to go back
+        where we came from, and this is not easy. Currently we suppose that
+        __redirectpath is specifying that place if found, else we look in the
+        request breadcrumbs for the last visited page.
+        """
+        if '__redirectpath' in self.req.form:
+            # if redirect path was explicitly specified in the form, use it
+            path = self.req.form['__redirectpath']
+            url = self.build_url(path, **newparams)
+            url = append_url_params(url, self.req.form.get('__redirectparams'))
+        else:
+            url = self.req.last_visited_page()
+        raise Redirect(url)
+
+
+from cubicweb import set_log_methods
+set_log_methods(Controller, LOGGER)
+
Binary file web/data/asc.gif has changed
Binary file web/data/banner.png has changed
Binary file web/data/bg.gif has changed
Binary file web/data/bg_trame_grise.png has changed
Binary file web/data/black-check.png has changed
Binary file web/data/bullet.png has changed
Binary file web/data/bullet_orange.png has changed
Binary file web/data/button.png has changed
Binary file web/data/calendar.gif has changed
Binary file web/data/critical.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.acl.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,54 @@
+/* styles for access control forms)
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+/******************************************************************************/
+/* security edition form (views/management.py)                                */
+/******************************************************************************/
+
+table.schemaInfo {
+  margin: 1ex 1em;
+  text-align: left;
+  border: 1px solid black;
+  border-collapse: collapse;
+}
+
+table.schemaInfo th,
+table.schemaInfo td {
+  padding: 0em 1em;
+  border: 1px solid black;
+}
+
+/******************************************************************************/
+/* user groups edition form (views/euser.py)                                  */
+/******************************************************************************/
+
+table#groupedit {
+  margin: 1ex 1em;
+  text-align: left;
+  border: 1px solid black;
+  border-collapse: collapse;
+}
+
+table#groupedit th,
+table#groupedit td {
+  padding: 0.5em 1em;
+}
+
+table#groupedit tr {
+  border-bottom: 1px solid black;
+}
+
+table#groupedit tr.nogroup {
+  border: 1px solid red;
+  margin: 1px;
+}
+
+table#groupedit td {
+  text-align: center;
+  padding: 0.5em;
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.ajax.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,375 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+CubicWeb.require('python.js');
+CubicWeb.require('htmlhelpers.js');
+
+var JSON_BASE_URL = baseuri() + 'json?';
+
+// cubicweb loadxhtml plugin to make jquery handle xhtml response
+jQuery.fn.loadxhtml = function(url, data, reqtype, mode) {
+    var ajax = null;
+    if (reqtype == 'post') {
+	ajax = jQuery.post;
+    } else {
+	ajax = jQuery.get;
+    }
+    if (this.size() > 1) {
+	log('loadxhtml was called with more than one element');
+    }
+    mode = mode || 'replace';
+    var callback = null;
+    if (data && data.callback) {
+	callback = data.callback;
+	delete data.callback;
+    }
+    var node = this.get(0); // only consider the first element
+    ajax(url, data, function(response) {
+	var domnode = getDomFromResponse(response);
+	if (mode == 'swap') {
+	    var origId = node.id;
+	    node = swapDOM(node, domnode);
+	    if (!node.id) {
+		node.id = origId;
+	    }
+	} else if (mode == 'replace') {
+	    jQuery(node).empty().append(domnode);
+	} else if (mode == 'append') {
+	    jQuery(node).append(domnode);
+	}
+	// find sortable tables if there are some
+	if (typeof(Sortable) != 'undefined') {
+	    Sortable.sortTables(node);
+	}
+	// find textareas and wrap them if there are some
+	if (typeof(FCKeditor) != 'undefined') {
+	    buildWysiwygEditors(node);
+	}
+
+	if (typeof initFacetBoxEvents != 'undefined') {
+	    initFacetBoxEvents(node);
+	}
+
+	if (typeof buildWidgets != 'undefined') {
+	    buildWidgets(node);
+	}
+
+	while (jQuery.isFunction(callback)) {
+	    callback = callback.apply(this, [domnode]);
+	}
+    });
+}
+
+
+
+/* finds each dynamic fragment in the page and executes the
+ * the associated RQL to build them (Async call)
+ */
+function loadDynamicFragments() {
+    var fragments = getElementsByTagAndClassName('div', 'dynamicFragment');
+    if (fragments.length == 0) {
+	return;
+    }
+    if (typeof LOADING_MSG == 'undefined') {
+	LOADING_MSG = 'loading'; // this is only a safety belt, it should not happen
+    }
+    for(var i=0; i<fragments.length; i++) {
+	var fragment = fragments[i];
+	fragment.innerHTML = '<h3>' + LOADING_MSG + ' ... <img src="data/loading.gif" /></h3>';
+	var rql = getNodeAttribute(fragment, 'cubicweb:rql');
+	var vid = getNodeAttribute(fragment, 'cubicweb:vid');
+        var extraparams = {};
+	var actrql = getNodeAttribute(fragment, 'cubicweb:actualrql');
+	if (actrql) { extraparams['actualrql'] = actrql; }
+	var fbvid = getNodeAttribute(fragment, 'cubicweb:fallbackvid');
+	if (fbvid) { extraparams['fallbackvid'] = fbvid; }
+
+	replacePageChunk(fragment.id, rql, vid, extraparams);
+    }
+}
+
+jQuery(document).ready(loadDynamicFragments);
+
+//============= base AJAX functions to make remote calls =====================//
+
+
+/*
+ * This function will call **synchronously** a remote method on the cubicweb server
+ * @param fname: the function name to call (as exposed by the JSONController)
+ * @param args: the list of arguments to pass the function
+ */
+function remote_exec(fname) {
+    setProgressCursor();
+    var props = {'mode' : "remote", 'fname' : fname, 'pageid' : pageid,
+     		 'arg': map(jQuery.toJSON, sliceList(arguments, 1))};
+    var result  = jQuery.ajax({url: JSON_BASE_URL, data: props, async: false}).responseText;
+    result = evalJSON(result);
+    resetCursor();
+    return result;
+}
+
+function remoteCallFailed(err, req) {
+    if (req.status == 500) {
+	updateMessage(err);
+    } else {
+	updateMessage(_("an error occured while processing your request"));
+    }
+}
+
+/*
+ * This function is the equivalent of MochiKit's loadJSONDoc but
+ * uses POST instead of GET
+ */
+function loadJSONDocUsingPOST(url, queryargs, mode) {
+    mode = mode || 'remote';
+    setProgressCursor();
+    var dataType = (mode == 'remote') ? "json":null;
+    var deferred = loadJSON(url, queryargs, 'POST', dataType);
+    deferred = deferred.addErrback(remoteCallFailed);
+//     if (mode == 'remote') {
+// 	deferred = deferred.addCallbacks(evalJSONRequest);
+//     }
+    deferred = deferred.addCallback(resetCursor);
+    return deferred;
+}
+
+
+function _buildRemoteArgs(fname) {
+    return  {'mode' : "remote", 'fname' : fname, 'pageid' : pageid,
+     	     'arg': map(jQuery.toJSON, sliceList(arguments, 1))};
+}
+
+/*
+ * This function will call **asynchronously** a remote method on the cubicweb server
+ * This function is a low level one. You should use `async_remote_exec` or
+ * `async_rawremote_exec` instead.
+ *
+ * @param fname: the function name to call (as exposed by the JSONController)
+ * @param funcargs: the function's arguments
+ * @param mode: rawremote or remote
+ */
+function _async_exec(fname, funcargs, mode) {
+    setProgressCursor();
+    var props = {'mode' : mode, 'fname' : fname, 'pageid' : pageid};
+    var args = map(urlEncode, map(jQuery.toJSON, funcargs));
+    args.unshift(''); // this is to be able to use join() directly
+    var queryargs = as_url(props) + args.join('&arg=');
+    return loadJSONDocUsingPOST(JSON_BASE_URL, queryargs, mode);
+}
+
+/*
+ * This function will call **asynchronously** a remote method on the cubicweb server
+ * @param fname: the function name to call (as exposed by the JSONController)
+ * additional arguments will be directly passed to the specified function
+ * Expected response type is Json.
+ */
+function async_remote_exec(fname /* ... */) {
+    return _async_exec(fname, sliceList(arguments, 1), 'remote');
+}
+
+/*
+ * This version of _async_exec doesn't expect a json response.
+ * It looks at http headers to guess the response type.
+ */
+function async_rawremote_exec(fname /* ... */) {
+    return _async_exec(fname, sliceList(arguments, 1), 'rawremote');
+}
+
+/*
+ * This function will call **asynchronously** a remote method on the cubicweb server
+ * @param fname: the function name to call (as exposed by the JSONController)
+ * @param varargs: the list of arguments to pass to the function
+ * This is an alternative form of `async_remote_exec` provided for convenience
+ */
+function async_remote_exec_varargs(fname, varargs) {
+    return _async_exec(fname, varargs, 'remote');
+}
+
+/* emulation of gettext's _ shortcut
+ */
+function _(message) {
+    return remote_exec('i18n', [message])[0];
+}
+
+function rqlexec(rql) {
+    return async_remote_exec('rql', rql);
+}
+
+function userCallback(cbname) {
+    async_remote_exec('user_callback', cbname);
+}
+
+function unloadPageData() {
+    // NOTE: do not make async calls on unload if you want to avoid
+    //       strange bugs
+    remote_exec('unload_page_data');
+}
+
+function openHash() {
+    if (document.location.hash) {
+	var nid = document.location.hash.replace('#', '');
+	var node = jQuery('#' + nid);
+	if (node) { removeElementClass(node, "hidden"); }
+    };
+}
+jQuery(document).ready(openHash);
+
+function reloadComponent(compid, rql, registry, nodeid, extraargs) {
+    registry = registry || 'components';
+    rql = rql || '';
+    nodeid = nodeid || (compid + 'Component');
+    extraargs = extraargs || {};
+  log('extraargs =', extraargs);
+    var node = getNode(nodeid);
+    var d = async_rawremote_exec('component', compid, rql, registry, extraargs);
+    d.addCallback(function(result, req) {
+	var domnode = getDomFromResponse(result);
+	if (node) {
+	    // make sure the component is visible
+	    removeElementClass(node, "hidden");
+	    swapDOM(node, domnode);
+	}
+    });
+    d.addCallback(resetCursor);
+    d.addErrback(function(xxx) {
+	updateMessage(_("an error occured"));
+	log(xxx);
+    });
+  return d;
+}
+
+/* XXX: HTML architecture of cubicweb boxes is a bit strange */
+function reloadBox(boxid, rql) {
+    reloadComponent(boxid, rql, 'boxes', boxid);
+}
+
+function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) {
+    var d = async_remote_exec('user_callback', cbname);
+    d.addCallback(function() {
+	reloadComponent(compid, rql, registry, nodeid);
+	if (msg) { updateMessage(msg); }
+    });
+    d.addCallback(resetCursor);
+    d.addErrback(function(xxx) {
+	updateMessage(_("an error occured"));
+	log(xxx);
+	return resetCursor();
+    });
+}
+
+function userCallbackThenReloadPage(cbname, msg) {
+    var d = async_remote_exec('user_callback', cbname);
+    d.addCallback(function() {
+	window.location.reload();
+	if (msg) { updateMessage(msg); }
+    });
+    d.addCallback(resetCursor);
+    d.addErrback(function(xxx) {
+	updateMessage(_("an error occured"));
+	log(xxx);
+	return resetCursor();
+    });
+}
+
+/*
+ * unregisters the python function registered on the server's side
+ * while the page was generated.
+ */
+function unregisterUserCallback(cbname) {
+    d = async_remote_exec('unregister_user_callback', cbname);
+    d.addCallback(function() {resetCursor();});
+    d.addErrback(function(xxx) {
+	updateMessage(_("an error occured"));
+	log(xxx);
+	return resetCursor();
+    });
+}
+
+
+/* executes an async query to the server and replaces a node's
+ * content with the query result
+ *
+ * @param nodeId the placeholder node's id
+ * @param rql the RQL query
+ * @param vid the vid to apply to the RQL selection (default if not specified)
+ * @param extraparmas table of additional query parameters
+ */
+function replacePageChunk(nodeId, rql, vid, extraparams, /* ... */ swap, callback) {
+    var params = null;
+    if (callback) {
+	params = {callback: callback};
+    }
+
+    var node = jQuery('#' + nodeId)[0];
+    var props = {};
+    if (node) {
+	props['rql'] = rql;
+	props['pageid'] = pageid;
+	if (vid) { props['vid'] = vid; }
+	if (extraparams) { jQuery.extend(props, extraparams); }
+	// FIXME we need to do as_url(props) manually instead of
+	// passing `props` directly to loadxml because replacePageChunk
+	// is sometimes called (abusively) with some extra parameters in `vid`
+	var mode = swap?'swap':'replace';
+	var url = JSON_BASE_URL + as_url(props);
+	jQuery(node).loadxhtml(url, params, 'get', mode);
+    } else {
+	log('Node', nodeId, 'not found');
+    }
+}
+
+/* XXX: this function should go in edition.js but as for now, htmlReplace
+ * references it.
+ *
+ * replace all textareas with fckeditors.
+ */
+function buildWysiwygEditors(parent) {
+    jQuery('textarea').each(function () {
+	if (this.getAttribute('cubicweb:type', 'wysiwyg')) {
+	    if (typeof FCKeditor != "undefined") {
+		var fck = new FCKeditor(this.id);
+		fck.Config['CustomConfigurationsPath'] = fckconfigpath;
+		fck.Config['DefaultLanguage'] = fcklang;
+		fck.BasePath = "fckeditor/";
+		fck.ReplaceTextarea();
+	    } else {
+		log('fckeditor could not be found.');
+	    }
+	}
+    });
+}
+
+jQuery(document).ready(buildWysiwygEditors);
+
+
+/* convenience function that returns a DOM node based on req's result. */
+function getDomFromResponse(response) {
+    if (typeof(response) == 'string') {
+	return html2dom(response);
+    }
+    var doc = response.documentElement;
+    var children = doc.childNodes;
+    if (!children.length) {
+	// no child (error cases) => return the whole document
+	return doc.cloneNode(true);
+    }
+    if (children.length == 1) {
+	// only one child => return it
+	return children[0].cloneNode(true);
+    }
+    // several children => wrap them in a single node and return the wrap
+    return DIV(null, map(methodcaller('cloneNode', true), children));
+}
+
+function postJSON(url, data, callback) {
+    return jQuery.post(url, data, callback, 'json');
+}
+
+function getJSON(url, data, callback){
+    return jQuery.get(url, data, callback, 'json');
+}
+
+CubicWeb.provide('ajax.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.bookmarks.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+CubicWeb.require('ajax.js');
+
+function removeBookmark(beid) {
+    d = async_remote_exec('delete_bookmark', beid);
+    d.addCallback(function(boxcontent) {
+	    reloadComponent('bookmarks_box', '', 'boxes', 'bookmarks_box');
+  	document.location.hash = '#header';
+ 	updateMessage(_("bookmark has been removed"));
+    });
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.calendar.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,341 @@
+/* styles for the calendar views
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+/******************************************************************************/
+/* one month calendar                                                         */
+/******************************************************************************/
+
+table.omcalendar {
+  background: #eeeeff;
+  width: 100%;
+  table-layout: fixed;
+  border: 1px solid #ccc;
+  border-collapse: separate;
+}
+
+table.omcalendar tr th {
+  background:  #e7e7e7;
+  color: #2952A3;
+  font-weight: bold;
+  border-bottom: 1px solid #ddd;
+}
+
+table.omcalendar tr td {
+  border: 1px solid #f0faff;
+  padding: 0px;
+  font-family: Arial, Sans-Serif;
+  color: #2952A3;
+  background: #fff;
+  border-bottom: 1px solid #ddd;
+}
+
+table.omcalendar tr td a {
+  font-family: Arial, Sans-Serif;
+  color: #2952A3;
+}
+
+table.omcalendar tr td div.cellContent {
+  margin: 0px;
+  padding: 0px;
+}
+
+table.omcalendar tr td div.calCellTitle {
+  width: 100%;
+}
+
+table.omcalendar tr td div.calCellTitle div.day {
+  display: block;
+  float: left;
+  clear: both;
+  text-align: left;
+}
+
+table.omcalendar tr td div.calCellTitle div.cmd {
+/*  display: none;
+  float: right;
+  clear: both; */
+  text-align: right;
+}
+
+table.omcalendar tr td div.calCellTitle div.cmd a {
+  display: none;
+}
+
+table.omcalendar tr td:hover div.calCellTitle div.cmd a {
+/* XXX to tweak (doesn't work! what a surprise)*/
+  display: inline;
+  float: right;
+  clear: none;
+  text-align: right;
+}
+
+table.omcalendar tr td div.calCellTitle div.stopper {
+  clear:pos;
+}
+
+table.omcalendar tr td {
+  padding: 3px 0.5em 1em;                         
+}
+
+table.omcalendar tr td div.task {
+  border-top: 1px solid #ddd;
+  height: 2.5ex;
+}
+
+table.omcalendar tr td div.task div.tooltip { 
+  display: none; 
+}
+
+table.omcalendar tr td:hover div.task:hover div.tooltip { 
+  font-style: normal;
+  display: block;
+  position: absolute;
+  padding: 5px;
+  color: #000;
+  border: 1px solid #bbb;
+  background: #ffc;
+  width:200px;
+  z-index: 100;
+  overflow: visible;
+}
+
+
+table.omcalendar tr td.outOfRange { background-color: #ddd; }
+
+
+table.omcalendar div.col0       { background-color: #FFB117; }
+table.omcalendar div.col1       { background-color: #FFF07D; }
+table.omcalendar div.col2       { background-color: #E0FF7D; }
+table.omcalendar div.col3       { background-color: #C1FF7D; }
+table.omcalendar div.col4       { background-color: #02FF7D; }
+table.omcalendar div.col5       { background-color: #7DFF97; }
+table.omcalendar div.colb       { background-color: #7DFFC6; }
+table.omcalendar div.col7       { background-color: #7DDBFF; }
+table.omcalendar div.col8       { background-color: #7DACFF; }
+table.omcalendar div.col9       { background-color: #7D8DFF; }
+table.omcalendar div.cola       { background-color: #9C7DFF; }
+table.omcalendar div.col6       { background-color: #BC7DFF; }
+
+table.omcalendar div.col0:hover { background-color: #EFE117; }
+table.omcalendar div.col1:hover { background-color: #EFE07D; }
+table.omcalendar div.col2:hover { background-color: #E0DF7D; }
+table.omcalendar div.col3:hover { background-color: #C1DF7D; }
+table.omcalendar div.col4:hover { background-color: #02DF7D; }
+table.omcalendar div.col5:hover { background-color: #70EF97; }
+table.omcalendar div.colb:hover { background-color: #70EFC6; }
+table.omcalendar div.col7:hover { background-color: #70DBEF; }
+table.omcalendar div.col8:hover { background-color: #70ACEF; }
+table.omcalendar div.col9:hover { background-color: #708DEF; }
+table.omcalendar div.cola:hover { background-color: #9C70EF; }
+table.omcalendar div.col6:hover { background-color: #BC70EF; }
+
+
+/******************************************************************************/
+/* one week calendar                                                          */
+/******************************************************************************/
+
+table#week {
+  min-height: 600px;
+  width: 100%;
+  table-layout: auto;
+  
+}
+
+
+table#week tr td div.hour {
+  vertical-align: top;
+  height: 8ex;
+  width: 100%;
+  position: relative;
+  border-bottom: 1px dotted #ccc;
+  /*padding-top: 1ex;*/
+}
+
+table#week tr th.today {
+  color: red;
+}
+
+table#week tr td div.hourline {
+  height: 0px;
+  width: 100%;
+  position: relative;
+  border-bottom: 1px dotted #ccc;
+  z-index: 10;
+  left: 0;
+}
+
+table#week tr td {
+  vertical-align: top;
+  height: 96ex;
+  width: 14.2%;
+  position: relative;
+}
+
+table#week tr td div.columndiv {
+  position: relative;
+  height: 102%;
+}
+
+table#week tr td div.columndiv div.task {
+  position: absolute;
+  overflow: hidden;
+}
+
+table#week tr td div.columndiv div.task div.bottommarker {
+  position: absolute;
+  bottom: 0;
+  width: 100%;
+  left: 0;
+  z-index: 5;
+  background: transparent;
+}
+table#week tr td div.columndiv div.task div.bottommarkerline {
+  background: red;
+  z-index: 5;
+  padding: 0;
+}
+table#week tr td div.columndiv div.task:hover {
+  overflow: visible;
+}
+
+table#week tr th.transparent {
+  background: transparent;
+  border: none;
+}
+
+
+/******************************************************************************/
+/* XXX old calendar views, to remove once old_calendar.py is removed          */
+/******************************************************************************/
+
+table.calendarPageHeader,
+table.smallCalendars, 
+ table.bigCalendars {
+ width: 90%;
+ border-collapse:separate; 
+ margin: 0px 1em;
+}
+
+
+table.smallCalendars td.calendar{
+ width: 33%;
+}
+
+td.calendar table {
+ margin: 1em 2em;
+ width: 80%;
+}
+
+.calendar th.month {
+ font-weight:bold;
+ padding-bottom:0.2em;
+ background: #cfceb7;
+}
+
+.calendar th.month a{ 
+ font: bold 110%  Georgia, Verdana;
+ color : #fff;
+ }
+
+table.weekCalendar{ 
+}
+
+table.weekCalendar th{ 
+ text-align : left;
+ padding: 0.6em 0.4em;
+ }
+
+table.weekCalendar td{ 
+ padding: 0.2em 0.4em }
+
+.semesterCalendar .amCell, .semesterCalendar .amCellEmpty{
+  margin-bottom:0;
+  border-style: none;
+  border-right:thin dotted;
+}
+
+table.semesterCalendar th{ 
+ padding: 0.6em 0.4em;
+}
+
+.semesterCalendar .pmCell, .semesterCalendar .pmCellEmpty{
+  border: none;
+  border-left:1px dotted #ccc;
+}
+
+.weeknum{ 
+ width:10%
+ }
+
+.cell, .cellEmpty{ 
+ border:1px solid #ccc;
+ padding: 3px 0.5em 2em;
+ width:10%;
+}
+
+.cellDay{ 
+ border:1px solid #ccc;
+ border-bottom : none;
+ padding: 3px 0.5em 3px;  
+ width:10%;}
+
+.amCell, .pmCell,
+.amCellEmpty, .pmCellEmpty{
+ padding: 3px 0.5em 3px;  
+ border:1px solid #ccc;
+ border-top:none;
+ width:10%;
+}
+
+.amCell, .amCellEmpty{
+ border-bottom:none;
+}
+
+.pmCell, .pmCellEmpty{
+ border-top:none;
+}
+
+.cellTitle {
+ font: bold 100% Arial, sans-serif;
+}
+
+.weekTitle {
+  padding:1em;
+}
+
+.weekCell, .weekEmptyCell {
+  border-top: 0px;
+}
+
+.cell span.cellTitle,
+.cellEmpty span.cellTitle {
+ background-color:transparent; 
+}
+
+div.cellContent{ 
+ padding: 0.1em;
+ font-size:90%;
+ }
+
+.weeknum, th.weekday{ 
+ padding:0.2em 0.4em;
+ color : #666;
+ font-size:90%;}
+
+div.event{ 
+ padding : 0.1em 0px;
+ margin:0.2em 0px; 
+ background: #eee;
+}
+
+td.prev {
+  text-align: left;
+}
+
+td.next {
+  text-align: right;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.calendar.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,320 @@
+/*
+ *  This file contains Calendar utilities
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+CubicWeb.require('python.js');
+CubicWeb.require('ajax.js');
+
+// IMPORTANT NOTE: the variables DAYNAMES AND MONTHNAMES will be added
+//                 by cubicweb automatically
+
+// dynamically computed (and cached)
+var _CAL_HEADER = null;
+
+TODAY = new Date();
+
+
+/*
+ * Calendar (graphical) widget
+ * public methods are :
+ *   __init__ :
+ *    @param containerId: the DOM node's ID where the calendar will be displayed
+ *    @param inputId: which input needs to be updated when a date is selected
+ *    @param year, @param month: year and month to be displayed
+ *    @param cssclass: CSS class of the calendar widget (default is commandCal)
+ *
+ *   show() / hide():
+ *    show or hide the calendar widget
+ *
+ *   toggle():
+ *    show (resp. hide) the calendar if it's hidden (resp. displayed)
+ * 
+ *   displayNextMonth(): (resp. displayPreviousMonth())
+ *    update the calendar to display next (resp. previous) month
+ */
+Calendar = function(containerId, inputId, year, month, cssclass) {
+    this.containerId = containerId;
+    this.inputId = inputId;
+    this.year = year;
+    this.month = month-1; // Javascript's counter starts at 0 for january
+    this.cssclass = cssclass || "popupCalendar";
+    this.visible = false;
+    this.domtable = null;
+
+    this.cellprops = { 'onclick'     : function() {dateSelected(this, containerId); },
+		       'onmouseover' : function() {this.style.fontWeight = 'bold'; },
+		       'onmouseout'  : function() {this.style.fontWeight = 'normal';}
+		     }
+
+    this.todayprops = jQuery.extend({}, this.cellprops, {'class' : 'today'});
+
+    this._rowdisplay = function(row) {
+	return TR(null, map(partial(TD, this.cellprops), row));
+    }
+
+    this._makecell = function(cellinfo) {
+	return TD(cellinfo[0], cellinfo[1]);
+    }
+
+    /* utility function (the only use for now is inside the calendar) */
+    this._uppercaseFirst = function(s) { return s.charAt(0).toUpperCase(); }
+    
+    /* accepts the cells data and builds the corresponding TR nodes
+     * @param rows a list of list of couples (daynum, cssprops)
+     */
+    this._domForRows = function(rows) {
+	var lines = []
+	for (i=0; i<rows.length; i++) {
+	    lines.push(TR(null, map(this._makecell, rows[i])));
+	}
+	return lines;
+    }
+
+    /* builds the calendar headers */
+    this._headdisplay = function(row) {
+	if (_CAL_HEADER) {
+	    return _CAL_HEADER;
+	}
+	daynames = map(this._uppercaseFirst, DAYNAMES);
+	_CAL_HEADER = TR(null, map(partial(TH, null), daynames));
+	return _CAL_HEADER;
+    }
+    
+    this._getrows = function() {
+	var rows = [];
+	var firstday = new Date(this.year, this.month, 1);
+	var stopdate = firstday.nextMonth();
+	var curdate = firstday.sub(firstday.getRealDay());
+	while (curdate.getTime() < stopdate) {
+	    var row = []
+	    for (var i=0; i<7; i++) {
+		if (curdate.getMonth() == this.month) {
+		    props = curdate.equals(TODAY) ? this.todayprops:this.cellprops;
+		    row.push([props, curdate.getDate()]);
+		} else {
+		    row.push([this.cellprops, ""]);
+		}
+		curdate.iadd(1);
+	    }
+	    rows.push(row);
+	}
+	return rows;
+    }
+
+    this._makecal = function() {
+	var rows = this._getrows();
+	var monthname = MONTHNAMES[this.month] + " " + this.year;
+	var prevlink = "javascript: togglePreviousMonth('" + this.containerId + "');";
+	var nextlink = "javascript: toggleNextMonth('" + this.containerId + "');";
+	this.domtable = TABLE({'class': this.cssclass},
+			      THEAD(null, TR(null,
+					     TH(null, A({'href' : prevlink}, "<<")),
+					     TH({'colspan' : 5, 'style' : "text-align: center;"}, monthname),
+					     TH(null, A({'href' : nextlink}, ">>")))),
+			      TBODY(null,
+				    this._headdisplay(),
+				    this._domForRows(rows))
+			     );
+	return this.domtable;
+    }
+
+    this._updateDiv = function() {
+	if (!this.domtable) {
+	    this._makecal();
+	}
+	jqNode(this.containerId).empty().append(this.domtable);
+	// replaceChildNodes($(this.containerId), this.domtable);
+    }
+
+    this.displayNextMonth = function() {
+	this.domtable = null;
+	if (this.month == 11) {
+	    this.year++;
+	}
+	this.month = (this.month+1) % 12;
+	this._updateDiv();
+    }
+
+    this.displayPreviousMonth = function() {
+	this.domtable = null;
+	if (this.month == 0) {
+	    this.year--;
+	}
+	this.month = (this.month+11) % 12;
+	this._updateDiv();
+    }
+    
+    this.show = function() {
+	if (!this.visible) {
+	    container = jqNode(this.containerId);
+	    if (!this.domtable) {
+		this._makecal();
+	    }
+	    container.empty().append(this.domtable);
+	    toggleVisibility(container);
+	    this.visible = true;
+	}
+    }
+
+    this.hide = function(event) {
+	var self;
+	if (event) {
+	    self = event.data.self;
+	} else {
+	    self = this;
+	}
+	if (self.visible) {
+	    toggleVisibility(self.containerId);
+	    self.visible = false;
+	}
+    }
+
+    this.toggle = function() {
+	if (this.visible) {
+	    this.hide();
+	}
+	else {
+	    this.show();
+	}
+    }
+
+    // call hide() when the user explicitly sets the focus on the matching input
+    jqNode(inputId).bind('focus', {'self': this}, this.hide); // connect(inputId, 'onfocus', this, 'hide');
+};
+
+// keep track of each calendar created
+Calendar.REGISTRY = {};
+
+/*
+ * popup / hide calendar associated to `containerId`
+ */	    
+function toggleCalendar(containerId, inputId, year, month) {
+    var cal = Calendar.REGISTRY[containerId];
+    if (!cal) {
+	cal = new Calendar(containerId, inputId, year, month);
+	Calendar.REGISTRY[containerId] = cal;
+    }
+    /* hide other calendars */
+    for (containerId in Calendar.REGISTRY) {
+	var othercal = Calendar.REGISTRY[containerId];
+	if (othercal !== cal) {
+	    othercal.hide();
+	}
+    }
+    cal.toggle();
+}
+
+
+/*
+ * ask for next month to calendar displayed in `containerId`
+ */
+function toggleNextMonth(containerId) {
+    var cal = Calendar.REGISTRY[containerId];
+    cal.displayNextMonth();
+}
+
+/*
+ * ask for previous month to calendar displayed in `containerId`
+ */
+function togglePreviousMonth(containerId) {
+    var cal = Calendar.REGISTRY[containerId];
+    cal.displayPreviousMonth();
+}
+
+
+/*
+ * Callback called when the user clicked on a cell in the popup calendar
+ */
+function dateSelected(cell, containerId) {
+    var cal = Calendar.REGISTRY[containerId];
+    var input = getNode(cal.inputId);
+    // XXX: the use of innerHTML might cause problems, but it seems to be
+    //      the only way understood by both IE and Mozilla. Otherwise,
+    //      IE accepts innerText and mozilla accepts textContent
+    var selectedDate = new Date(cal.year, cal.month, cell.innerHTML, 12);
+    var xxx = remote_exec("format_date", toISOTimestamp(selectedDate));
+    input.value = xxx;
+    cal.hide();
+}
+
+function whichElement(e)
+{
+var targ;
+if (!e)
+  {
+  var e=window.event;
+  }
+if (e.target)
+  {
+  targ=e.target;
+  }
+else if (e.srcElement)
+  {
+  targ=e.srcElement;
+  }
+if (targ.nodeType==3) // defeat Safari bug
+  {
+  targ = targ.parentNode;
+  }
+  return targ;
+}
+
+function getPosition(element) {
+  var left;
+  var top;
+  var offset;
+  // TODO: deal scrollbar positions also!
+  left = element.offsetLeft;
+  top = element.offsetTop;
+
+  if (element.offsetParent != null)
+    {
+      offset = getPosition(element.offsetParent);
+      left = left + offset[0];
+      top = top + offset[1];
+      
+    }
+  return [left, top];
+}
+
+function getMouseInBlock(event) {
+  var elt = event.target;
+  var x = event.clientX;
+  var y = event.clientY;
+  var w = elt.clientWidth;
+  var h = elt.clientHeight;
+  var offset = getPosition(elt);
+
+  x = 1.0*(x-offset[0])/w;
+  y = 1.0*(y-offset[1])/h;
+  return [x, y];
+}
+function getHourFromMouse(event, hmin, hmax) {
+  var pos = getMouseInBlock(event);
+  var y = pos[1];
+  return Math.floor((hmax-hmin)*y + hmin);
+}
+
+function addCalendarItem(event, hmin, hmax, year, month, day, duration, baseurl) {
+  var hour = getHourFromMouse(event, hmin, hmax);
+
+  if (0<=hour && hour < 24) {
+    baseurl += "&start="+year+"%2F"+month+"%2F"+day+"%20"+hour+":00";
+    baseurl += "&stop="+year+"%2F"+month+"%2F"+day+"%20"+(hour+duration)+":00";
+    
+    stopPropagation(event);
+    window.location.assign(baseurl);
+    return false;
+  }
+  return true;
+}
+
+function stopPropagation(event) {
+  event.cancelBubble = true;
+  if (event.stopPropagation) event.stopPropagation();  
+}
+     
+CubicWeb.provide('calendar.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.calendar_popup.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,65 @@
+/* styles for the calendar popup widget used to edit date fields
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div.calhelper {
+  background: url("calendar.gif") no-repeat;
+  border: 4px solid red;
+  padding-left:18px;
+  width: 10px;
+  height: 10px;
+  display: inline;
+}
+
+div.calpopup {
+  position: absolute;
+  margin-left: 130px;
+  margin-top: -16px;
+  width: 17em;
+}
+
+table.popupCalendar {
+  text-align: center;
+  border: 1px solid #ccc;
+  z-index: 100;
+}
+
+
+table.popupCalendar th {
+  background : #d9d9c1;
+  color: black;
+  padding: 2px 3px;
+}
+
+
+table.popupCalendar th.calTitle,
+table.popupCalendar th.prev,
+table.popupCalendar th.next {
+  color: white;
+  padding: 0px;
+  background : #acac95;
+}
+
+table.popupCalendar th.prev,
+table.popupCalendar th.next {
+  color: orangered;
+  font-size: 50%;
+  font-weight: bold;
+  padding: 2px 0px;
+}
+
+
+table.popupCalendar td {
+  width: 2em;
+  height: 2em;
+  background : #f6f5e1;
+  font-size: 85%;
+}
+
+table.popupCalendar td.today {
+  border: 2px solid black;
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.compat.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,500 @@
+/* MochiKit -> jQuery compatibility module */
+
+function forEach(array, func) {
+    for (var i=0, length=array.length; i<length; i++) {
+	func(array[i]);
+    }
+}
+
+function getElementsByTagAndClassName(tag, klass, root) {
+    root = root || document;
+    // FIXME root is not used in this compat implementation
+    return jQuery(tag + '.' + klass);
+}
+
+function map(func, array) {
+    // XXX jQUery tends to simplify lists with only one element :
+    // >>> y = ['a:b:c']
+    // >>> jQuery.map(y, function(y) { return y.split(':');})
+    // ["a", "b", "c"]
+    // where I would expect :
+    // [ ["a", "b", "c"] ]
+    // return jQuery.map(array, func);
+    var result = [];
+    for (var i=0,length=array.length;i<length;i++) {
+	result.push(func(array[i]));
+    }
+    return result;
+}
+
+function findValue(array, element) {
+    return jQuery.inArray(element, array);
+}
+
+function filter(func, array) {
+    return jQuery.grep(array, func);
+}
+
+function noop() {}
+
+function addElementClass(node, klass) {
+    jQuery(node).addClass(klass);
+}
+
+function toggleElementClass(node, klass) {
+    jQuery(node).toggleClass(klass);
+}
+
+function removeElementClass(node, klass) {
+    jQuery(node).removeClass(klass);
+}
+
+hasElementClass = jQuery.className.has
+
+
+function partial(func) {
+    var args = sliceList(arguments, 1);
+    return function() {
+	return func.apply(null, merge(args, arguments));
+    }
+}
+
+
+function log() {
+    // XXX dummy implementation
+    // console.log.apply(arguments); ???
+    var args = [];
+    for (var i=0; i<arguments.length; i++) {
+	args.push(arguments[i]);
+    }
+    if (typeof(window) != "undefined" && window.console
+        && window.console.log) {
+	window.console.log(args.join(' '));
+    }
+}
+
+function getNodeAttribute(node, attribute) {
+    return jQuery(node).attr(attribute);
+}
+
+function isArray(it){ // taken from dojo
+    return it && (it instanceof Array || typeof it == "array");
+}
+
+function isString(it){ // taken from dojo
+    return !!arguments.length && it != null && (typeof it == "string" || it instanceof String);
+}
+
+
+function isArrayLike(it) { // taken from dojo
+    return (it && it !== undefined &&
+	    // keep out built-in constructors (Number, String, ...) which have length
+	    // properties
+	    !isString(it) && !jQuery.isFunction(it) &&
+	    !(it.tagName && it.tagName.toLowerCase() == 'form') &&
+	    (isArray(it) || isFinite(it.length)));
+}
+
+
+function getNode(node) {
+    if (typeof(node) == 'string') {
+        return document.getElementById(node);
+    }
+    return node;
+}
+
+/* safe version of jQuery('#nodeid') because we use ':' in nodeids
+ * which messes with jQuery selection mechanism
+ */
+function jqNode(node) {
+    node = getNode(node);
+    if (node) {
+	return jQuery(node);
+    }
+    return null;
+}
+
+function evalJSON(json) { // trust source
+    return eval("(" + json + ")");
+}
+
+function urlEncode(str) {
+    if (typeof(encodeURIComponent) != "undefined") {
+        return encodeURIComponent(str).replace(/\'/g, '%27');
+    } else {
+        return escape(str).replace(/\+/g, '%2B').replace(/\"/g,'%22').rval.replace(/\'/g, '%27');
+    }
+}
+
+function swapDOM(dest, src) {
+    dest = getNode(dest);
+    var parent = dest.parentNode;
+    if (src) {
+        src = getNode(src);
+        parent.replaceChild(src, dest);
+    } else {
+        parent.removeChild(dest);
+    }
+    return src;
+}
+
+function replaceChildNodes(node/*, nodes...*/) {
+    var elem = getNode(node);
+    arguments[0] = elem;
+    var child;
+    while ((child = elem.firstChild)) {
+        elem.removeChild(child);
+    }
+    if (arguments.length < 2) {
+        return elem;
+    } else {
+	for (var i=1; i<arguments.length; i++) {
+	    elem.appendChild(arguments[i]);
+	}
+	return elem;
+    }
+}
+
+update = jQuery.extend;
+
+
+function createDomFunction(tag) {
+
+    function builddom(params, children) {
+	var node = document.createElement(tag);
+	for (key in params) {
+	    var value = params[key];
+	    if (key.substring(0, 2) == 'on') {
+		// this is an event handler definition
+		if (typeof value == 'string') {
+		    // litteral definition
+		    value = new Function(value);
+		}
+		node[key] = value;
+	    } else { // normal node attribute
+		node.setAttribute(key, params[key]);
+	    }
+	}
+	if (children) {
+	    if (!isArrayLike(children)) {
+		children = [children];
+		for (var i=2; i<arguments.length; i++) {
+		    var arg = arguments[i];
+		    if (isArray(arg)) {
+			children = merge(children, arg);
+		    } else {
+			children.push(arg);
+		    }
+		}
+	    }
+	    for (var i=0; i<children.length; i++) {
+		var child = children[i];
+		if (typeof child == "string" || typeof child == "number") {
+		    child = document.createTextNode(child);
+		}
+		node.appendChild(child);
+	    }
+	}
+	return node;
+    }
+    return builddom;
+}
+
+A = createDomFunction('a');
+BUTTON = createDomFunction('button');
+BR = createDomFunction('br');
+CANVAS = createDomFunction('canvas');
+DD = createDomFunction('dd');
+DIV = createDomFunction('div');
+DL = createDomFunction('dl');
+DT = createDomFunction('dt');
+FIELDSET = createDomFunction('fieldset');
+FORM = createDomFunction('form');
+H1 = createDomFunction('H1');
+H2 = createDomFunction('H2');
+H3 = createDomFunction('H3');
+H4 = createDomFunction('H4');
+H5 = createDomFunction('H5');
+H6 = createDomFunction('H6');
+HR = createDomFunction('hr');
+IMG = createDomFunction('img');
+INPUT = createDomFunction('input');
+LABEL = createDomFunction('label');
+LEGEND = createDomFunction('legend');
+LI = createDomFunction('li');
+OL = createDomFunction('ol');
+OPTGROUP = createDomFunction('optgroup');
+OPTION = createDomFunction('option');
+P = createDomFunction('p');
+PRE = createDomFunction('pre');
+SELECT = createDomFunction('select');
+SPAN = createDomFunction('span');
+STRONG = createDomFunction('strong');
+TABLE = createDomFunction('table');
+TBODY = createDomFunction('tbody');
+TD = createDomFunction('td');
+TEXTAREA = createDomFunction('textarea');
+TFOOT = createDomFunction('tfoot');
+TH = createDomFunction('th');
+THEAD = createDomFunction('thead');
+TR = createDomFunction('tr');
+TT = createDomFunction('tt');
+UL = createDomFunction('ul');
+
+// cubicweb specific
+IFRAME = createDomFunction('iframe');
+
+// dummy ultra minimalist implementation on deferred for jQuery
+function Deferred() {
+    this.__init__(this);
+}
+
+jQuery.extend(Deferred.prototype, {
+    __init__: function() {
+	this.onSuccess = [];
+	this.onFailure = [];
+	this.req = null;
+    },
+
+    addCallback: function(callback) {
+	this.onSuccess.push([callback, sliceList(arguments, 1)]);
+	return this;
+    },
+
+    addErrback: function(callback) {
+	this.onFailure.push([callback, sliceList(arguments, 1)]);
+	return this;
+    },
+
+    success: function(result) {
+	try {
+	    for (var i=0; i<this.onSuccess.length; i++) {
+		var callback = this.onSuccess[i][0];
+		var args = merge([result, this.req], this.onSuccess[i][1]);
+		callback.apply(null, args);
+	    }
+	} catch (error) {
+	    this.error(this.xhr, null, error);
+	}
+    },
+
+    error: function(xhr, status, error) {
+	for (var i=0; i<this.onFailure.length; i++) {
+	    var callback = this.onFailure[i][0];
+	    var args = merge([error, this.req], this.onFailure[i][1]);
+	    callback.apply(null, args);
+	}
+    }
+
+});
+
+
+/** @id MochiKit.DateTime.toISOTime */
+toISOTime = function (date, realISO/* = false */) {
+    if (typeof(date) == "undefined" || date === null) {
+        return null;
+    }
+    var hh = date.getHours();
+    var mm = date.getMinutes();
+    var ss = date.getSeconds();
+    var lst = [
+        ((realISO && (hh < 10)) ? "0" + hh : hh),
+        ((mm < 10) ? "0" + mm : mm),
+        ((ss < 10) ? "0" + ss : ss)
+    ];
+    return lst.join(":");
+};
+
+_padTwo = function (n) {
+    return (n > 9) ? n : "0" + n;
+};
+
+/** @id MochiKit.DateTime.toISODate */
+toISODate = function (date) {
+    if (typeof(date) == "undefined" || date === null) {
+        return null;
+    }
+    return [
+        date.getFullYear(),
+        _padTwo(date.getMonth() + 1),
+        _padTwo(date.getDate())
+    ].join("-");
+};
+
+
+/** @id MochiKit.DateTime.toISOTimeStamp */
+toISOTimestamp = function (date, realISO/* = false*/) {
+    if (typeof(date) == "undefined" || date === null) {
+        return null;
+    }
+    var sep = realISO ? "T" : " ";
+    var foot = realISO ? "Z" : "";
+    if (realISO) {
+        date = new Date(date.getTime() + (date.getTimezoneOffset() * 60000));
+    }
+    return toISODate(date) + sep + toISOTime(date, realISO) + foot;
+};
+
+
+function loadJSON(url, data, type, dataType) {
+    d = new Deferred();
+    jQuery.ajax({
+	url: url,
+	type: type,
+	data: data,
+	dataType: dataType,
+
+	beforeSend: function(xhr) {
+	    d.req = xhr;
+	},
+
+	success: function(data, status) {
+	    d.success(data);
+	},
+
+	error: function(xhr, status, error) {
+	    error = evalJSON(xhr.responseText);
+	    d.error(xhr, status, error['reason']);
+	}
+    });
+    return d;
+}
+
+/* depth-first implementation of the nodeWalk function found
+ * in MochiKit.Base
+ * cf. http://mochikit.com/doc/html/MochiKit/Base.html#fn-nodewalk
+ */
+function nodeWalkDepthFirst(node, visitor) {
+    var children = visitor(node);
+    if (children) {
+	for(var i=0; i<children.length; i++) {
+	    nodeWalkDepthFirst(children[i], visitor);
+	}
+    }
+}
+
+
+/* Returns true if all the given Array-like or string arguments are not empty (obj.length > 0) */
+function isNotEmpty(obj) {
+    for (var i = 0; i < arguments.length; i++) {
+        var o = arguments[i];
+        if (!(o && o.length)) {
+            return false;
+        }
+    }
+    return true;
+}
+
+/** this implementation comes from MochiKit  */
+function formContents(elem/* = document.body */) {
+    var names = [];
+    var values = [];
+    if (typeof(elem) == "undefined" || elem === null) {
+        elem = document.body;
+    } else {
+        elem = getNode(elem);
+    }
+    nodeWalkDepthFirst(elem, function (elem) {
+        var name = elem.name;
+        if (isNotEmpty(name)) {
+            var tagName = elem.tagName.toUpperCase();
+            if (tagName === "INPUT"
+                && (elem.type == "radio" || elem.type == "checkbox")
+                && !elem.checked
+               ) {
+                return null;
+            }
+            if (tagName === "SELECT") {
+                if (elem.type == "select-one") {
+                    if (elem.selectedIndex >= 0) {
+                        var opt = elem.options[elem.selectedIndex];
+                        var v = opt.value;
+                        if (!v) {
+                            var h = opt.outerHTML;
+                            // internet explorer sure does suck.
+                            if (h && !h.match(/^[^>]+\svalue\s*=/i)) {
+                                v = opt.text;
+                            }
+                        }
+                        names.push(name);
+                        values.push(v);
+                        return null;
+                    }
+                    // no form elements?
+                    names.push(name);
+                    values.push("");
+                    return null;
+                } else {
+                    var opts = elem.options;
+                    if (!opts.length) {
+                        names.push(name);
+                        values.push("");
+                        return null;
+                    }
+                    for (var i = 0; i < opts.length; i++) {
+                        var opt = opts[i];
+                        if (!opt.selected) {
+                            continue;
+                        }
+                        var v = opt.value;
+                        if (!v) {
+                            var h = opt.outerHTML;
+                            // internet explorer sure does suck.
+                            if (h && !h.match(/^[^>]+\svalue\s*=/i)) {
+                                v = opt.text;
+                            }
+                        }
+                        names.push(name);
+                        values.push(v);
+                    }
+                    return null;
+                }
+            }
+            if (tagName === "FORM" || tagName === "P" || tagName === "SPAN"
+                || tagName === "DIV"
+               ) {
+                return elem.childNodes;
+            }
+            names.push(name);
+            values.push(elem.value || '');
+            return null;
+        }
+        return elem.childNodes;
+    });
+    return [names, values];
+}
+
+function merge(array1, array2) {
+    var result = [];
+    for (var i=0,length=arguments.length; i<length; i++) {
+	var array = arguments[i];
+	for (var j=0,alength=array.length; j<alength; j++) {
+	    result.push(array[j]);
+	}
+    }
+    return result;
+}
+
+var KEYS = {
+    KEY_ESC: 27,
+    KEY_ENTER: 13
+}
+
+// XHR = null;
+// function test() {
+//     var d = loadJSON('http://crater:9876/json?mode=remote&fname=i18n&pageid=xxx&arg=' + jQuery.toJSON(['modify']));
+//     d = d.addCallback(function (result, xhr) {
+// 	XHR = xhr;
+// 	log('got ajax result 1' + result + xhr);
+// 	log('got ajax result 1' + xhr);
+// 	log('got ajax result 1' + xhr + 'arguments =', arguments.length);
+//     });
+//     d.addCallback(function (x, req, y, z) {
+// 	log('callback 2 x =' + x, ' req=', req, 'y =', y, 'z=',z);
+//     }, 12, 13)
+//     d.addErrback(function (error, xhr) {
+// 	XHR = xhr;
+// 	log('got err', error, ' code =', xhr.status, 'arguments length=', arguments.length);
+//     })
+// }
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1011 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+/******************************************************************************/
+/* main styles                                                                */
+/******************************************************************************/
+
+*{
+  margin:0px;
+  padding :0px;
+}
+
+html, body {  
+  background: #e2e2e2;
+}
+
+body {
+  font-size: 69%;
+  font-weight: normal;
+  font-family: Verdana, sans-serif;
+}
+
+
+h1 {
+  font-size: 188%;
+  margin: 0.2em 0px 0.3em;
+  border-bottom: 1px solid #000;
+}
+
+
+h2, h3 {
+  margin-top: 0.2em;
+  margin-bottom: 0.3em;
+}
+
+h2 {
+  font-size: 135%;
+}
+
+h3 {
+  font-size: 130%;
+}
+
+h4 {
+  font-size: 120%;
+  margin: 0.2em 0px;
+}
+
+h5 {
+  font-size:110%;
+}
+
+h6{
+  font-size:105%;
+}
+
+a, a:active, a:link, a:visited {
+  color: #ff4500;
+  text-decoration: none;
+}
+
+a:hover{
+  text-decoration: underline;
+}
+
+a.grayedout {
+  display: block;
+  padding-left: 2px;
+  color: #808080;
+}
+
+a.grayedout:visited{
+  color: #808080;
+}
+
+a.grayedout:hover {
+  color: #000;
+}
+
+a img {
+  border: none;
+  text-align: center;
+}
+
+p {
+  margin-top: 0em;
+  margin-bottom: 0.2em;
+  padding-top: 2px;
+}
+
+
+table, td, input, select {
+  font-size: 100%;
+}
+
+table {
+  border-collapse: collapse;
+  border: none;
+}
+
+table th, table td {
+  vertical-align: top;
+}
+
+table td img {
+  vertical-align: middle;
+}
+
+ol {
+  margin: 1px 0px 1px 16px;
+}
+
+ul{
+  margin: 1px 0px 1px 4px;
+  list-style-type: none;
+}
+
+ul li {
+  margin-top: 2px;
+  padding: 0px 0px 2px 8px;
+  background: url("bullet_orange.png") 0% 6px no-repeat;
+}
+
+pre {
+  font-family: Courier, "Courier New", Monaco, monospace;
+  font-size: 100%;
+  color: #000;
+  background-color: #f2f2f2;
+  border: 1px solid #ccc;
+  /*overflow:auto; */
+}
+
+blockquote {
+  font-family: Courier, "Courier New", serif;
+  font-size: 120%;
+  margin: 5px 0px;
+  padding: 0.8em;
+  background-color: #f2f2f2;
+  border: 1px solid #ccc;
+}
+
+note, code {
+  font-size: 120%;
+  color: #000;
+  background-color: #f2f2f2;
+  border: 1px solid #ccc;
+}
+
+input, textarea {
+  padding: 0.2em;
+  vertical-align: middle;
+  border: 1px solid #ccc;
+}
+
+fieldset {
+  border: none;
+}
+
+legend {
+  padding: 0px 2px;
+  font: bold 1em Verdana, sans-serif;
+}
+
+label, .label {
+  font-weight: bold ! important;
+}
+
+iframe {
+  border: 0px;
+}
+
+dt {
+	font-size:	1.17em;
+	font-weight:	600;
+}
+
+dd {
+	margin: 0.6em 0 1.5em 2em;
+}
+
+/******************************************************************************/
+/* generic classes                                                            */
+/******************************************************************************/
+
+.odd {
+  background-color: #f7f6f1;
+}
+
+.even {
+  background-color: transparent;
+}
+
+.hr {
+  border-bottom: 1px dotted #ccc;
+  margin: 1em 0px;
+}
+
+.left {
+  float: left;
+}
+
+.right {
+  float: right;
+}
+
+.clear {
+  clear: both;
+}
+
+.noborder {
+  border: none;
+}
+
+.strong{
+  font-weight:bold;
+}
+
+.title {
+  text-align: left;
+  font-size:  large;
+  font-weight: bold;
+}
+
+.titleUnderline {
+  color: #000;
+  clear: left;
+  margin: 0px;
+  padding-top: 0.5em;
+  border-bottom: 1px solid black;
+}
+
+.helper{
+  font-size: 96%;
+  color: #555544;
+}
+
+.helper:hover {
+  color: #000;
+  cursor: default;
+}
+
+.hidden {
+  display: none;
+  visibility: hidden;
+}
+
+.needsvalidation {
+  font-style: italic;
+  color: grey ! important;
+}
+
+.folder {
+  /* disable odd/even under folder class */
+  background-color: transparent;
+}
+
+li.invisible {
+  list-style: none;
+  background: none;
+  padding: 0px 0px 1px 1px;
+}
+
+li.invisible div {
+  display: inline;
+}
+
+div.row {
+ clear: both;
+ padding-bottom:0.4px
+}
+
+div.row span.label{ 
+ padding-right:1em
+}
+
+div.field {
+  margin-left: 0.2em;
+  display: inline;
+}
+
+div.popup {
+  position: absolute;
+  z-index: 400;
+  background: #fff;
+  border: 1px solid black;
+  text-align: left;
+  float:left;
+}
+
+div.popup ul li a {
+  text-decoration: none;
+  color: black;
+}
+
+/******************************************************************************/
+/* header / layout                                                            */
+/******************************************************************************/
+
+.logo {
+ background: #fff;
+ border-left: #f2f2f2;
+}
+
+span#appliName {
+ font-weight: bold;
+ color: #000;
+ white-space: nowrap;
+}
+
+#header {
+  background: #ff7700 url("banner.png") left top repeat-x;
+  text-align: left;
+}
+
+table#header td {
+  vertical-align: middle;
+}
+
+table#header a {
+color: #000;
+}
+
+td#headtext {
+  width: 100%;
+}
+
+/*FIXME appear with 4px width in IE6*/
+div#stateheader{
+  width: 66%;
+}
+
+div#page {
+  background: #e2e2e2;
+  position: relative;
+  min-height: 800px;
+}
+ 
+td#contentcol {
+  padding: 8px 5px 5px 10px;
+}
+
+
+div#pageContent {
+  clear: both;
+  padding: 10px 1em 2em;
+  background: #ffffff;
+  border: 1px solid #ccc;
+}
+
+#contentheader {
+  margin: 0px;
+  padding: 0.2em 0.5em 0.5em 0.5em;
+}
+
+#contentheader a {
+  color: #000;
+}
+
+td.firstcolumn {
+  width: 220px;
+}
+
+td.navcol {
+  width: 16em;
+}
+
+div.footer {
+  text-align: center;
+}
+div.footer a {
+  color: #000;
+  text-decoration: none;
+}
+
+/******************************************************************************/
+/* help button                                                                */
+/******************************************************************************/
+
+a.help{
+  display: block;
+  margin: 0px 5px 0px 8px;
+  height: 17px;
+  width: 17px;
+  background: url('help.png') 0% 0% no-repeat;
+}
+
+a.help:hover {
+  background-position: 0px -16px;
+  text-decoration: none;
+}
+
+/******************************************************************************/
+/* rql bar                                                                    */
+/******************************************************************************/
+
+div#rqlform {
+  width: 100%;
+}
+
+input#rql{
+ border: 1px solid #ccc;
+ width: 80%;
+ margin-left: 12px;
+}
+
+/******************************************************************************/
+/* user actions menu                                                          */
+/******************************************************************************/
+
+a.logout, a.logout:visited, a.logout:hover{
+  color: #fff;
+  text-decoration: none;
+}
+
+div#userActionsBox {
+  width: 14em;
+  text-align: right;
+}
+
+div#userActionsBox a.popupMenu {
+  color: black;
+  text-decoration: underline;
+}
+
+/******************************************************************************/
+/* buttons                                                                    */
+/******************************************************************************/
+
+input#rqlboxsubmit, input#rqlboxsubmit2 {
+  background: #fffff8 url("go.png") 50% 50% no-repeat;
+  width: 20px;
+  height: 20px;
+  margin: 0px;
+}
+
+input.button,
+input.formButton,
+input.validateButton,
+input.searchButton,
+input.loginButton {
+  border-top: 1px solid #edecd2;
+  border-left: 1px solid #edecd2;
+  border-right: 1px solid #cfceb7;
+  border-bottom: 1px solid #cfceb7;
+  background: #fffff8 url("button.png") bottom left repeat-x;
+}
+
+input.searchButton {
+  background: #f0eff0 url("gradient-grey-up.png") left top repeat-x;
+}
+
+input.button,
+input.validateButton {
+  margin: 1em 1em 0px 0px ! important;
+}
+
+/******************************************************************************/
+/* primary view                                                               */
+/******************************************************************************/
+
+.mainInfo  {
+  margin-right: 1em;
+  padding: 0.2em;
+}
+
+div.mainRelated {
+  border: none;
+  margin-right: 1em;
+  padding: 0.5em 0.2em 0.2em;
+}
+
+div.sideRelated h4,
+div.sideRelated h5 {
+  margin-top: 0px;
+  margin-bottom: 0px;
+}
+
+div.sideRelated {
+  margin-right: 1em;
+  padding: 12px 0px 12px 12px;
+}
+
+div.metadata {
+  font-size: 90%;
+  margin: 5px 0px 3px;
+  color: #666;
+  font-style: italic;
+  text-align: right;
+}
+
+div.section {
+  margin-top: 0.5em;
+  width:100%;
+}
+
+div.section a:hover {
+  text-decoration: none;
+}
+
+
+
+/******************************************************************************/
+/* boxes                                                                      */
+/******************************************************************************/
+
+div.navboxes {
+  padding-left: 3px;
+  margin-top: 8px;
+}
+
+div.boxFrame {
+  width: 100%;
+}
+
+div.boxTitle {
+  padding-top: 0px;
+  padding-bottom: 0.2em;
+  font: bold 100% Georgia;
+  overflow: hidden;
+  color: #fff;
+  background: #ff9900 url("search.png") left bottom repeat-x;
+}
+
+div.searchBoxFrame div.boxTitle,
+div.greyBoxFrame div.boxTitle {
+  background: #cfceb7;
+}
+
+div.boxTitle span,
+div.sideBoxTitle span {
+  padding: 0px 5px;
+  white-space: nowrap;
+}
+
+div.sideBoxTitle span,
+div.searchBoxFrame div.boxTitle span,
+div.greyBoxFrame div.boxTitle span {
+  color: #222211;
+}
+
+.boxFrame a {
+  color: #000;
+}
+
+div.boxContent {
+  padding: 3px 0px;
+  background: #fff;
+  border-top: none;
+}
+
+ul.boxListing {
+  margin: 0px;
+  padding: 0px 3px;
+}
+
+ul.boxListing li,
+ul.boxListing ul li {
+  display: inline;
+  margin: 0px;
+  padding: 0px;
+  background-image: none;
+}
+
+ul.boxListing ul {
+  margin: 0px 0px 0px 7px;
+  padding: 1px 3px;
+}
+
+ul.boxListing a {
+  color: #000;
+  display: block;
+  padding: 1px 9px 1px 3px;
+}
+
+ul.boxListing a.selected {
+  color: #FF4500;
+  font-weight: bold;
+}
+
+ul.boxListing a.boxBookmark:hover,
+ul.boxListing a:hover,
+ul.boxListing ul li a:hover {
+  text-decoration: none;
+  background: #eeedd9;
+  color: #111100;
+}
+
+a.boxMenu {
+  background: transparent url("puce_down.png") 98% 6px no-repeat;
+  display: block;
+  padding: 1px 9px 1px 3px;
+}
+
+a.popupMenu {
+  background: transparent url("puce_down_black.png") 2% 6px no-repeat;
+  padding-left: 2em;
+}
+
+ul.boxListing ul li a:hover {
+  background: #eeedd9  url("bullet_orange.png") 0% 6px no-repeat;
+}
+
+a.boxMenu:hover {
+  background: #eeedd9 url("puce_down.png") 98% 6px no-repeat;
+  cursor: pointer;
+}
+
+ul.boxListing a.boxBookmark {
+  padding-left: 3px;
+  background-image:none;
+  background:#fff;
+}
+
+ul.boxListing ul li a {
+  background: #fff url("bullet_orange.png") 0% 6px no-repeat;
+  padding: 1px 3px 0px 10px;
+}
+
+div.searchBoxFrame div.boxContent {
+  padding: 4px 4px 3px;
+  background: #f0eff0 url("gradient-grey-up.png") left top repeat-x;
+}
+
+div.shadow{
+  height: 14px;
+  background: url("shadow.gif") no-repeat top right;
+}
+
+div.sideBoxTitle {
+  padding: 0.2em 0px;
+  background: #cfceb7;
+  display: block;
+  font: bold 100% Georgia;
+}
+
+div.sideBox {
+  padding: 0.2em 0px;
+  margin-bottom: 0.5em;
+  background: #eeedd9;
+}
+
+div.sideBoxBody {
+  padding: 0.2em 5px;
+}
+
+div.sideBoxBody a {
+  color:#555544;
+}
+
+div.sideBoxBody a:hover {
+  text-decoration: underline;
+}
+
+div.sideBox table td {
+  padding-right: 1em;
+}
+
+div.boxPref {
+  margin: 10px 0px 0px;
+}
+
+div.boxPrefTitle {
+  font-weight: bold;
+  background: #cfceb7;
+  margin-bottom: 6px;
+  padding-bottom: 0.2em;
+  overflow: hidden;
+}
+
+div.boxPrefTitle span{
+ padding:0px 5px;
+}
+
+/******************************************************************************/
+/* inline edition and generic form classes                                    */
+/******************************************************************************/
+
+div.inlineedit {
+  display: none;
+}
+
+div.editableField {
+  display: inline;
+}
+
+div.editableField:hover,
+div.editableField p:hover {
+  background-color: #eeedd9;
+}
+
+option.separator {
+  font-weight: bold;
+  background: #ccc;
+  text-align: center;
+}
+
+input.error {
+  background: transparent url("error.png") 100% 50% no-repeat;
+}
+
+span.error {
+  display: block;
+  font-weight: bold;
+  color: #ed0d0d;
+}
+
+/******************************************************************************/
+/* navigation                                                                 */
+/******************************************************************************/
+
+div#etyperestriction {
+  margin-bottom: 1ex;
+  border-bottom: 1px solid #ccc;
+}
+
+div.navigation a {
+  text-align: center;
+  text-decoration: none;
+}
+
+div.prevnext {
+  width: 100%;
+  margin-bottom: 1em;
+}
+
+div.prevnext a {
+  color: #000;
+}
+
+span.slice a:visited,
+span.slice a:hover{
+  color: #555544;
+}
+
+span.selectedSlice a:visited,
+span.selectedSlice a {
+  color: #000;
+}
+
+/******************************************************************************/
+/* messages                                                                   */
+/******************************************************************************/
+
+.warning,
+.message,
+.errorMessage,
+.searchMessage,
+.statemessage {
+  padding: 0.3em 0.3em 0.3em 1em;
+  font-weight: bold;
+}
+
+.simpleMessage {
+  margin: 4px 0px;
+  font-weight: bold;
+  color: #ff7700;
+}
+
+div#appMsg, div.appMsg {
+  border: 1px solid #cfceb7;
+  margin-bottom: 8px;
+  padding: 3px;
+  background: #f8f8ee;
+}
+
+div#rqlinput {
+  border: 1px solid #cfceb7;
+  margin-bottom: 8px;
+  padding: 3px;
+  background: #cfceb7;
+}
+
+.message {
+  margin: 0px;
+  background: #f8f8ee url("information.png") 5px center no-repeat;
+  padding-left: 15px;
+}
+
+.errorMessage {
+  margin: 10px 0px;
+  padding-left: 25px;
+  background: #f7f6f1 url("critical.png") 2px center no-repeat;
+  color: #ed0d0d;
+  border: 1px solid #cfceb7;
+}
+
+.searchMessage {
+  margin-top: 0.5em;
+  border-top: 1px solid #cfceb7;
+  background: #eeedd9 url("information.png") 0% 50% no-repeat; /*dcdbc7*/
+}
+
+.stateMessage {
+  border: 1px solid #ccc;
+  background: #f8f8ee url("information.png") 10px 50% no-repeat;
+  padding:4px 0px 4px 20px;
+  border-width: 1px 0px 1px 0px;
+}
+
+/* warning messages like "There are too many results ..." */
+.warning {
+  padding-left: 25px;
+  background: #f2f2f2 url("critical.png") 3px 50% no-repeat;
+}
+
+/* label shown in the top-right hand corner during form validation */
+div#progress {
+  position: fixed;
+  right: 5px;
+  top: 0px;
+  background: #222211;
+  color: white;
+  font-weight: bold;
+  display: none;
+}
+
+/******************************************************************************/
+/* listing table                                                              */
+/******************************************************************************/
+
+table.listing {
+ margin: 10px 0em;
+ color: #000;
+ width: 100%;
+ border-right: 1px solid #dfdfdf;
+}
+
+
+table.listing thead th.over {
+  background-color: #746B6B;
+  cursor: pointer;
+}
+
+table.listing th,
+table.listing td {
+  padding: 3px 0px 3px 5px;
+  border: 1px solid #dfdfdf;
+  border-right: none;
+}
+
+table.listing th {
+  font-weight: bold;
+  background: #ebe8d9 url("button.png") repeat-x;
+}
+
+table.listing td a,
+table.listing td a:visited {
+  color: #666;
+}
+
+table.listing a:hover,
+table.listing tr.highlighted td a {
+  color:#000;
+}
+
+table.listing td.top {
+  border: 1px solid white;
+  border-bottom: none;
+  text-align: right ! important;
+  /* insane IE row bug workaround */
+  position: relative;
+  left: -1px;
+  top: -1px;
+}
+
+/******************************************************************************/
+/* drag and drop zone (XXX only used in seo for now)                          */
+/******************************************************************************/
+
+div.droppable {
+  border: 1px dashed black;
+  margin: 2px;
+  padding: 2px;
+  height: 15px;
+}
+
+div.drophover {
+  background: #f4f5ed;
+}
+
+/******************************************************************************/
+/* search box                                                                 */
+/******************************************************************************/
+
+input#norql{
+  width:13em;
+  margin-right: 2px;
+}
+
+/******************************************************************************/
+/* filter box                                                                 */
+/******************************************************************************/
+
+#filter_box input {
+  width: 180px;
+}
+#filter_box label {
+  float: left;
+}
+#filter_box select.radio {
+  width: 47px;
+  float: right;
+}
+#filter_box select {
+  width: 185px;
+}
+#filter_box option.disabled {
+  background: lightgray;
+}
+
+/******************************************************************************/
+/* table filter form                                                          */
+/******************************************************************************/
+
+table.filter th {
+  font-weight: bold;
+  background: #ebe8d9 url("button.png") repeat-x;
+  padding: 0.3em;
+  border-bottom: 1px solid #cfceb7;
+  text-align: left;
+}
+
+table.filter td {
+  padding: 0.6em 0.2em;
+}
+
+table.filter td select {
+  padding:1px 2px;
+}
+
+div#tableActionsBox {
+ direction:rtl;
+ float:right
+}
+
+/******************************************************************************/
+/* error view (views/management.py)                                           */
+/******************************************************************************/
+
+div.pycontext { /* html traceback */
+  font-family: Verdana, sans-serif;
+  font-size: 80%;
+  padding: 1em;
+  margin: 10px 0px 5px 20px;
+  background-color: #dee7ec;
+}
+
+div.pycontext span.name {
+  color: #ff0000;
+}
+
+/******************************************************************************/
+/* index view (views/startup.py)                                              */
+/******************************************************************************/
+
+table.startup {
+  width: 100%;
+}
+
+table.startup td {
+  padding: 0.1em 0.2em;
+}
+
+table.startup td.addcol {
+  text-align: right;
+  width: 0.5em;
+}
+
+table.startup th{
+  padding-top: 3px;
+  padding-bottom: 3px;
+  text-align: left;
+}
+
+/******************************************************************************/
+/* addcombobox                                                                */
+/******************************************************************************/
+
+input#newopt{ 
+ width:120px ; 
+ display:block;
+ float:left;
+ }
+
+div#newvalue{ 
+ margin-top:2px;
+ }
+
+#add_newopt{
+ background: #fffff8 url("go.png") 50% 50% no-repeat;
+ width: 20px;
+ line-height: 20px; 
+ display:block;
+ float:left;
+}
+
+/******************************************************************************/
+/* facets (XXX to continue)                                                   */
+/******************************************************************************/
+
+div.facetBody {
+  height: 10em;
+  overflow: auto;
+}
+
+.facetValueDisabled {
+  text-decoration: underline;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.edition.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,530 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+CubicWeb.require('python.js');
+CubicWeb.require('htmlhelpers.js');
+CubicWeb.require('ajax.js');
+
+
+//============= Eproperty form functions =====================================//
+
+/* called on Eproperty key selection:
+ * - get the selected value
+ * - get a widget according to the key by a sync query to the server
+ * - fill associated div with the returned html
+ *
+ * @param varname the name of the variable as used in the original creation form
+ * @param tabindex the tabindex that should be set on the widget
+ */
+function setPropValueWidget(varname, tabindex) {
+    var key = firstSelected(jQuery('#pkey:'+varname));
+    if (key) {
+	var args = _buildRemoteArgs('prop_widget', key, varname, tabindex);
+	jQuery('#div:value:'+varname).loadxhtml(JSON_BASE_URL, args, 'post');
+    }
+}
+
+
+// *** EDITION FUNCTIONS ****************************************** //
+
+/*
+ * this function is called when an AJAX form was generated to
+ * make sure tabindex remains consistent
+ */
+function reorderTabindex(start) {
+    var form = getNode('entityForm');
+    var inputTypes = ['INPUT', 'SELECT', 'TEXTAREA'];
+    var tabindex = (start==null)?15:start;
+    nodeWalkDepthFirst(form, function(elem) {
+        var tagName = elem.tagName.toUpperCase();
+	if (inputTypes.contains(tagName)) {
+	    if (getNodeAttribute(elem, 'tabindex') != null) {
+		tabindex += 1;
+		elem.setAttribute('tabindex', tabindex);
+	    }
+	    return null;
+	}
+	return filter(isElementNode, elem.childNodes);
+    });
+}
+
+function showMatchingSelect(selectedValue, eid) {
+    if (selectedValue) {
+	divId = 'div' + selectedValue + '_' + eid;
+	var divNode = jQuery('#' + divId);
+	if (!divNode.length) {
+	    var args = {vid: 'unrelateddivs', relation: selectedValue,
+			rql: rql_for_eid(eid), pageid: pageid,
+			'__notemplate': 1};
+	    jQuery.get(JSON_BASE_URL, args, function(response) {
+		// append generated HTML to the cell
+		jQuery('#unrelatedDivs_' + eid).append(getDomFromResponse(response));
+		_showMatchingSelect(eid, jQuery('#' + divId));
+	    });
+	    // deferred = doXHR(JSON_BASE_URL + queryString(args));
+	    // deferred.addCallback(_buildAndShowMatchingSelect, eid, divId);
+	} else {
+	    _showMatchingSelect(eid, divNode);
+	}
+    }
+    else {
+	_showMatchingSelect(eid, null);
+    }
+}
+
+
+
+// @param divStr a HTML string returned by the server
+// function _buildAndShowMatchingSelect(eid, divId, req) {
+//     var tdNode = jQuery('#unrelatedDivs_' + eid);
+//     // append generated HTML to the cell
+//     tdNode.appendChild(getDomFromRequest(req));
+//     _showMatchingSelect(eid, jQuery('#' + divId));
+// }
+
+// @param divNode is a jQuery selection
+function _showMatchingSelect(eid, divNode) {
+    // hide all divs, and then show the matching one
+    // (would actually be better to directly hide the displayed one)
+    jQuery('#unrelatedDivs_' + eid).children().hide();
+    // divNode not found means 'no relation selected' (i.e. first blank item)
+    if (divNode && divNode.length) {
+	divNode.show();
+    }
+}
+
+// this function builds a Handle to cancel pending insertion
+function buildPendingInsertHandle(elementId, element_name, selectNodeId, eid) {
+   jscall = "javascript: cancelPendingInsert('" + [elementId, element_name, selectNodeId, eid].join("', '") + "')";
+   return A({'class' : 'handle', 'href' : jscall,
+	     'title' : _("cancel this insert")}, '[x]');
+}
+
+function buildEntityLine(relationName, selectedOptionNode, comboId, eid) {
+   // textContent doesn't seem to work on selectedOptionNode
+   var content = selectedOptionNode.firstChild.nodeValue;
+   var handle = buildPendingInsertHandle(selectedOptionNode.id, 'tr', comboId, eid);
+   var link = A({'href' : 'view?rql=' + selectedOptionNode.value,
+	  	 'class' : 'editionPending', 'id' : 'a' + selectedOptionNode.id},
+		content);
+   var tr = TR({'id' : 'tr' + selectedOptionNode.id}, [ TH(null, relationName),
+							TD(null, [handle, link])
+						      ]);
+   try {
+      var separator = getNode('relationSelectorRow_' + eid);
+      //dump('relationSelectorRow_' + eid) XXX warn dump is not implemented in konqueror (at least)
+      // XXX Warning: separator.parentNode is not (always ?) the
+      // table itself, but an intermediate node (TableSectionElement)
+      var tableBody = separator.parentNode;
+      tableBody.insertBefore(tr, separator);
+   } catch(ex) {
+      log("got exception(2)!" + ex);
+   }
+}
+
+function buildEntityCell(relationName, selectedOptionNode, comboId, eid) {
+    var handle = buildPendingInsertHandle(selectedOptionNode.id, 'div_insert_', comboId, eid);
+    var link = A({'href' : 'view?rql=' + selectedOptionNode.value,
+		  'class' : 'editionPending', 'id' : 'a' + selectedOptionNode.id},
+		 content);
+    var div = DIV({'id' : 'div_insert_' + selectedOptionNode.id}, [handle, link]);
+    try {
+	var td = jQuery('#cell'+ relationName +'_'+eid);
+	td.appendChild(div);
+    } catch(ex) {
+	alert("got exception(3)!" + ex);
+    }
+}
+
+function addPendingInsert(optionNode, eid, cell, relname) {
+    var value = getNodeAttribute(optionNode, 'value');
+    if (!value) {
+	// occurs when the first element in the box is selected (which is not
+	// an entity but the combobox title)
+        return;
+    }
+    // 2nd special case
+    if (value.indexOf('http') == 0) {
+	document.location = value;
+	return;
+    }
+    // add hidden parameter
+    var entityForm = jQuery('#entityForm');
+    var oid = optionNode.id.substring(2); // option id is prefixed by "id"
+    remote_exec('add_pending_insert', oid.split(':'));
+    var selectNode = optionNode.parentNode;
+    // remove option node
+    selectNode.removeChild(optionNode);
+    // add line in table
+    if (cell) {
+      // new relation as a cell in multiple edit
+      // var relation_name = relationSelected.getAttribute('value');
+      // relation_name = relation_name.slice(0, relation_name.lastIndexOf('_'));
+      buildEntityCell(relname, optionNode, selectNode.id, eid);
+    }
+    else {
+	var relationSelector = getNode('relationSelector_'+eid);
+	var relationSelected = relationSelector.options[relationSelector.selectedIndex];
+	// new relation as a line in simple edit
+	buildEntityLine(relationSelected.text, optionNode, selectNode.id, eid);
+    }
+}
+
+function cancelPendingInsert(elementId, element_name, comboId, eid) {
+    // remove matching insert element
+    var entityView = jqNode('a' + elementId).text();
+    jqNode(element_name + elementId).remove();
+    if (comboId) {
+	// re-insert option in combobox if it was taken from there
+	var selectNode = getNode(comboId);
+	if (selectNode){
+	   var options = selectNode.options;
+	   var node_id = elementId.substring(0, elementId.indexOf(':'));
+	   options[options.length] = OPTION({'id' : elementId, 'value' : node_id}, entityView);
+	}
+    }
+    remote_exec('remove_pending_insert', elementId.split(':'));
+}
+
+// this function builds a Handle to cancel pending insertion
+function buildPendingDeleteHandle(elementId, eid) {
+  var jscall = "javascript: addPendingDelete('" + elementId + ', ' + eid + "');";
+  return A({'href' : jscall, 'class' : 'pendingDeleteHandle',
+    'title' : _("delete this relation")}, '[x]');
+}
+
+// @param nodeId eid_from:r_type:eid_to
+function addPendingDelete(nodeId, eid) {
+    var d = async_remote_exec('add_pending_delete', nodeId.split(':'));
+    d.addCallback(function () {
+	// and strike entity view
+	jqNode('span' + nodeId).addClass('pendingDelete');
+	// replace handle text
+	jqNode('handle' + nodeId).text('+');
+    });
+}
+
+// @param nodeId eid_from:r_type:eid_to
+function cancelPendingDelete(nodeId, eid) {
+    var d = async_remote_exec('remove_pending_delete', nodeId.split(':'));
+    d.addCallback(function () {
+	// reset link's CSS class
+	jqNode('span' + nodeId).removeClass('pendingDelete');
+	// replace handle text
+	jqNode('handle' + nodeId).text('x');
+    });
+}
+
+// @param nodeId eid_from:r_type:eid_to
+function togglePendingDelete(nodeId, eid) {
+    // node found means we should cancel deletion
+    if ( hasElementClass(getNode('span' + nodeId), 'pendingDelete') ) {
+	cancelPendingDelete(nodeId, eid);
+    } else {
+	addPendingDelete(nodeId, eid);
+    }
+}
+
+
+function selectForAssociation(tripletIdsString, originalEid) {
+    var tripletlist = map(function (x) { return x.split(':'); },
+			  tripletIdsString.split('-'));
+    var d = async_remote_exec('add_pending_inserts', tripletlist);
+    d.addCallback(function () {
+	var args = {vid: 'edition', __mode: 'normal',
+		    rql: rql_for_eid(originalEid)};
+	document.location = 'view?' + as_url(args);
+    });
+
+}
+
+
+function updateInlinedEntitiesCounters(rtype) {
+    jQuery('#inline' + rtype + 'slot span.icounter').each(function (i) {
+	this.innerHTML = i+1;
+    });
+    // var divnode = jQuery('#inline' + rtype + 'slot');
+    // var iforms = getElementsByTagAndClassName('span', 'icounter', divnode);
+    // for (var i=0; i<iforms.length; i++) {
+    //   iforms[i].innerHTML = i+1;
+    // }
+}
+
+/*
+ * makes an AJAX request to get an inline-creation view's content
+ * @param peid : the parent entity eid
+ * @param ptype : the parent entity type
+ * @param ttype : the target (inlined) entity type
+ * @param rtype : the relation type between both entities
+ */
+function addInlineCreationForm(peid, ptype, ttype, rtype, role) {
+    var d = async_rawremote_exec('inline_creation_form', peid, ptype, ttype, rtype, role);
+    d.addCallback(function (response) {
+	var linknode = getNode('add' + rtype + ':' + peid + 'link');
+	var form = jQuery(getDomFromResponse(response));
+	form.css('display', 'none');
+	form.insertBefore(linknode.parentNode).slideDown('fast');
+	// setStyle(form, {display: 'none'});
+	// insertSiblingNodesBefore(linknode.parentNode, form);
+	updateInlinedEntitiesCounters(rtype);
+	// slideDown(form, {'duration':0.6});
+	reorderTabindex();
+	form.trigger('inlinedform-added');
+	// MochiKit.Signal.signal(CubicWeb, 'inlinedform-added', form);
+    });
+    d.addErrback(function (xxx) {
+	log('xxx =', xxx);
+    });
+}
+
+/*
+ * removes the part of the form used to edit an inlined entity
+ */
+function removeInlineForm(peid, rtype, eid) {
+    jqNode(['div', peid, rtype, eid].join('-')).slideUp('fast', function() {
+	$(this).remove();
+	updateInlinedEntitiesCounters(rtype);
+    });
+}
+
+/*
+ * alternatively adds or removes the hidden input that make the
+ * edition of the relation `rtype` possible between `peid` and `eid`
+ * @param peid : the parent entity eid
+ * @param rtype : the relation type between both entities
+ * @param eid : the inlined entity eid
+ */
+function removeInlinedEntity(peid, rtype, eid) {
+    var nodeid = ['rel', peid, rtype, eid].join('-');
+    var divid = ['div', peid, rtype, eid].join('-');
+    var noticeid = ['notice', peid, rtype, eid].join('-');
+    var node = jqNode(nodeid);
+    if (node && node.length) {
+	node.remove();
+	jqNode(divid).fadeTo('fast', 0.5);
+	// setOpacity(divid, 0.4);
+	jqNode(noticeid).fadeIn('fast');
+	// appear(jQuery('#' + noticeid), {'duration': 0.5});
+    }
+}
+
+function restoreInlinedEntity(peid, rtype, eid) {
+    var nodeid = ['rel', peid, rtype, eid].join('-');
+    var divid = ['div', peid, rtype, eid].join('-');
+    var noticeid = ['notice', peid, rtype, eid].join('-');
+    var node = jqNode(nodeid);
+    if (!(node && node.length)) {
+	node = INPUT({type: 'hidden', id: nodeid,
+		      name: rtype+':'+peid, value: eid});
+	jqNode(['fs', peid, rtype, eid].join('-')).append(node);
+	// appendChildNodes(fs, node);
+	jqNode(divid).fadeTo('fast', 1);
+	// setOpacity(divid, 1);
+	jqNode(noticeid).hide();
+	// jQuery('#' + noticeid).hide();
+    }
+}
+
+function _clearPreviousErrors(formid) {
+    jQuery('#' + formid + ' span.error').remove();
+}
+
+function _displayValidationerrors(formid, eid, errors) {
+    var globalerrors = [];
+    var firsterrfield = null;
+    for (fieldname in errors) {
+	var errmsg = errors[fieldname];
+	var fieldid = fieldname + ':' + eid;
+	var field = jqNode(fieldname + ':' + eid);
+	if (field && getNodeAttribute(field, 'type') != 'hidden') {
+	    if ( !firsterrfield ) {
+		firsterrfield = 'err-' + fieldid;
+	    }
+	    addElementClass(field, 'error');
+	    var span = SPAN({'id': 'err-' + fieldid, 'class': "error"}, errmsg);
+	    field.before(span);
+	} else {
+	    firsterrfield = formid;
+	    globalerrors.push(fieldname + ': ' + errmsg);
+	}
+    }
+    if (globalerrors.length) {
+	if (globalerrors.length == 1) {
+	    var innernode = SPAN(null, globalerrors[0]);
+	} else {
+	    var innernode = UL(null, map(LI, globalerrors));
+	}
+	// insert DIV and innernode before the form
+	var div = DIV({'class' : "errorMessage"});
+	div.appendChild(innernode);
+	jQuery('#' + formid).before(div);
+    }
+    return firsterrfield || formid;
+}
+
+
+function handleFormValidationResponse(formid, onsuccess, result) {
+    // Success
+    if (result[0]) {
+	if (onsuccess) {
+	    return onsuccess(result[1]);
+	} else {
+	    document.location.href = result[1];
+	    return ;
+	}
+    }
+    unfreezeFormButtons(formid);
+    // Failures
+    _clearPreviousErrors(formid);
+    var descr = result[1];
+    // Unknown structure
+    if ( !isArrayLike(descr) || descr.length != 2 ) {
+	log('got strange error :', descr);
+	updateMessage(descr);
+	return ;
+    }
+    _displayValidationerrors(formid, descr[0], descr[1]);
+    updateMessage(_("please correct errors below"));
+    document.location.hash = '#header';
+    return false;
+}
+
+
+/* unfreeze form buttons when the validation process is over*/
+function unfreezeFormButtons(formid) {
+    jQuery('#progress').hide();
+    jQuery('#' + formid + ' input.validateButton').removeAttr('disabled');
+    return true;
+}
+
+/* disable form buttons while the validation is being done */
+function freezeFormButtons(formid) {
+    var formbuttons = jQuery(formid + ' input.validateButton');
+    jQuery('#progress').show();
+    jQuery(formid + ' input.validateButton').attr('disabled', 'disabled');
+    return true;
+}
+
+/* used by additional submit buttons to remember which button was clicked */
+function postForm(bname, bvalue, formid) {
+    var form = getNode(formid);
+    if (bname) {
+	form.appendChild(INPUT({type: 'hidden', name: bname, value: bvalue}));
+    }
+    var onsubmit = form.onsubmit;
+    if (!onsubmit || (onsubmit && onsubmit())) {
+	form.submit();
+    }
+}
+
+
+/* called on load to set target and iframeso object.
+ * NOTE: this is a hack to make the XHTML compliant.
+ * NOTE2: `object` nodes might be a potential replacement for iframes
+ * NOTE3: there is a XHTML module allowing iframe elements but there
+ *        is still the problem of the form's `target` attribute
+ */
+function setFormsTarget() {
+    jQuery('form.entityForm').each(function () {
+	var form = jQuery(this);
+	var target = form.attr('cubicweb:target');
+	if (target) {
+	    form.attr('target', target);
+	    /* do not use display: none because some browser ignore iframe
+             *     with no display */
+	    form.append(IFRAME({name: target, id: target,
+				src: 'javascript: void(0)',
+				width: '0px', height: '0px'}));
+	}
+    });
+}
+
+$(document).ready(setFormsTarget);
+
+function _sendForm(formid, action) {
+    var zipped = formContents(formid);
+    return async_remote_exec('validate_form', action, zipped[0], zipped[1]);
+}
+
+/*
+ * called on traditionnal form submission : the idea is to try
+ * to post the form. If the post is successful, `validateForm` redirects
+ * to the appropriate URL. Otherwise, the validation errors are displayed
+ * around the corresponding input fields.
+ */
+function validateForm(formid, action, onsuccess) {
+    try {
+	var d = _sendForm(formid, action);
+    } catch (ex) {
+	log('got exception', ex);
+	return false;
+    }
+    function _callback(result, req) {
+	handleFormValidationResponse(formid, onsuccess, result);
+    }
+    // d.addCallback(handleFormValidationResponse, formid, onsuccess);
+    d.addCallback(_callback);
+    return false;
+}
+
+/*
+ * called by live-edit forms to submit changes
+ * @param formid : the dom id of the form used
+ * @param rtype : the attribute being edited
+ * @param eid : the eid of the entity being edited
+ * @param reload: boolean to reload page if true (when changing URL dependant data)
+ */
+function inlineValidateForm(formid, rtype, eid, divid, reload) {
+    try {
+	var form = getNode(formid);
+	if (typeof FCKeditorAPI != "undefined") {
+	    for ( var name in FCKeditorAPI.__Instances ) {
+		var oEditor = FCKeditorAPI.__Instances[name] ;
+		if ( oEditor.GetParentForm() == form ) {
+		    oEditor.UpdateLinkedField();
+		}
+	    }
+	}
+	var zipped = formContents(form);
+	var d = async_remote_exec('edit_field', 'apply', zipped[0], zipped[1], rtype, eid);
+    } catch (ex) {
+	log('got exception', ex);
+	return false;
+    }
+    d.addCallback(function (result, req) {
+        handleFormValidationResponse(formid, noop, result);
+	if (reload) {
+	    document.location.href = result[1];
+	} else {
+	    var fieldview = getNode(divid);
+	    // XXX using innerHTML is very fragile and won't work if
+	    // we mix XHTML and HTML
+	    fieldview.innerHTML = result[2];
+	    // switch inline form off only if no error
+	    if (result[0]) {
+		// hide global error messages
+		jQuery('div.errorMessage').remove();
+		jQuery('#appMsg').hide();
+		cancelInlineEdit(eid, rtype, divid);
+	    }
+	}
+	return false;
+    });
+    return false;
+}
+
+/**** inline edition ****/
+function showInlineEditionForm(eid, rtype, divid) {
+    jQuery('#' + divid).hide();
+    jQuery('#' + divid + '-form').show();
+}
+
+function cancelInlineEdit(eid, rtype, divid) {
+    jQuery('#' + divid).show();
+    jQuery('#' + divid + '-form').hide();
+}
+
+CubicWeb.provide('edition.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.fckcwconfig.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,34 @@
+// cf /usr/share/fckeditor/fckconfig.js
+
+FCKConfig.AutoDetectLanguage	= false ;
+
+FCKConfig.ToolbarSets["Default"] = [
+    // removed : 'Save','NewPage','DocProps','-','Templates','-','Preview'
+	['Source'],
+    // removed: 'Print','-','SpellCheck'
+	['Cut','Copy','Paste','PasteText','PasteWord'],
+	['Undo','Redo','-','Find','Replace','-','SelectAll','RemoveFormat'],
+    //['Form','Checkbox','Radio','TextField','Textarea','Select','Button','ImageButton','HiddenField'],
+	'/',
+    // ,'StrikeThrough','-','Subscript','Superscript'
+	['Bold','Italic','Underline'],
+    // ,'-','Outdent','Indent','Blockquote'
+	['OrderedList','UnorderedList'],
+	['JustifyLeft','JustifyCenter','JustifyRight','JustifyFull'],
+	['Link','Unlink','Anchor'],
+    // removed : 'Image','Flash','Smiley','PageBreak'
+	['Table','Rule','SpecialChar'],
+	'/',
+	['Style','FontFormat','FontName','FontSize'],
+	['TextColor','BGColor'],
+    //,'ShowBlocks'
+	['FitWindow','-','About']		// No comma for the last row.
+] ;
+
+// 'Flash','Select','Textarea','Checkbox','Radio','TextField','HiddenField','ImageButton','Button','Form',
+FCKConfig.ContextMenu = ['Generic','Link','Anchor','Image','BulletedList','NumberedList','Table'] ;
+
+FCKConfig.LinkUpload = false ;
+FCKConfig.ImageUpload = false ;
+FCKConfig.FlashUpload = false ;
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.form.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,191 @@
+/* styles for generated forms
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div.formTitle {
+  width: 100%;
+  font-size : 160%;
+  font-weight: bold;
+  color: orangered;
+  padding-bottom : 0.4em;
+  text-transform: capitalize;
+  background: url("bg_trame_grise.png") left bottom repeat-x;
+  margin-bottom: 0.6em
+}
+
+div.formTitle span {
+  padding: 4px 0px;
+  display: block;
+}
+
+div.iformTitle {
+  font-weight: bold;
+  font-size: 110%;
+  color: #222211;
+  background: #e4ead8;
+  border: 1px solid #E4EAD8;  /*#b7b6a3 */
+  border-bottom: none;
+}
+
+div.iformTitle span {
+  margin: 0.5em;
+}
+
+div.iformTitle span.icounter {
+  margin: 0em;
+}
+
+div.iformTitle a {
+  font-size: 80%;
+}
+
+div.formBody {
+  width:100%;
+}
+
+fieldset.subentity {
+  border: 1px solid #E4EAD8;
+  display: block;
+  margin-bottom: 1em;
+  padding: 0.4em;
+}
+
+table.attributeForm {
+  border: 1px solid #E4EAD8;
+  margin-bottom: 1em;
+  padding: 0.8em;
+}
+
+fieldset.subentity table td {
+  padding: 2px;
+  vertical-align: top;
+}
+
+fieldset.subentity table.attributeForm {
+    border: none;
+    padding: 0em;
+}
+
+.attributeForm,
+.attributeForm th,
+.attributeForm td {
+  border-collapse: separate;
+}
+
+table.attributeForm th,
+table.attributeForm td {
+  padding : 0px 2px;
+}
+
+table.attributeForm th {
+  text-align: right;
+}
+
+table.attributeForm div#comfirmPsw {
+}
+
+table.attributeForm input,
+table.attributeForm textarea,
+table.attributeForm select {
+  border: 1px solid #E4EAD8;  /*#b7b6a3*/
+}
+
+table.attributeForm textarea {
+  font: 100% normal Courier, monospace;
+  width: 100%;
+}
+
+table.attributeForm label,
+.entityForm .label {
+  padding : 0.2em  10px 0.2em 0.4em;
+}
+
+table.attributeForm label.required {
+  background: url("required.png") 100% 50% no-repeat;
+}
+
+.entityForm input.validateButton {
+  margin: 5px 10px 5px 0px;
+}
+
+a.addEntity {
+  display:block ;
+  margin-bottom:1em ;
+  padding-left:0.2em ;
+  font-weight: bold ;
+}
+
+table#relatedEntities {
+  margin: 10px 0px;
+  border-collapse: separate;
+}
+
+table#relatedEntities th,
+table#relatedEntities td {
+  padding : 0px 2px;
+}
+
+table#relatedEntities tr.separator {
+  border-bottom: none;
+  text-align: center;
+}
+
+table#relatedEntities th {
+  text-align: right;
+  width:200em;
+}
+
+table#relatedEntities th.warning {
+  border: none;
+  text-align: center;
+}
+
+table#relatedEntities td span {
+  display: inline;
+}
+
+table#relatedEntities ul {
+  margin: 0px;
+  padding: 0px;
+}
+
+table#relatedEntities td {
+  width: 100%;
+}
+
+a.handle {
+ font-family: Courier;
+ font-size: 120%;
+}
+
+a.editionPending {
+  color: #557755;
+  font-weight: bold;
+}
+
+div.pendingDelete {
+  text-decoration: line-through;
+}
+
+div.notransform {
+  text-transform: none;
+}
+
+div.inlinedform {
+  margin-left: 2em;
+}
+
+div.trame_grise {
+  background: url("bg_trame_grise.png") left top repeat-x;
+}
+
+div.notice {
+  display: none;
+  font-style: italic;
+  font-size: 110%;
+  padding-left: 2em;
+  background : #f8f8ee url("information.png") 5px center no-repeat ;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.formfilter.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,200 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+CubicWeb.require('htmlhelpers.js');
+CubicWeb.require('ajax.js');
+
+//============= filter form functions ========================================//
+
+function copyParam(origparams, newparams, param) {
+    var index = findValue(origparams[0], param);
+    if (index > -1) {
+	newparams[param] = origparams[1][index];
+    }
+}
+
+function facetFormContent(form) {
+  var names = [];
+  var values = [];
+  jQuery(form).find('.facet').each(function () {
+      var facetName = jQuery(this).find('.facetTitle').attr('cubicweb:facetName');
+      var facetValues = jQuery(this).find('.facetValueSelected').each(function(x) {
+	  names.push(facetName);
+	  values.push(this.getAttribute('cubicweb:value'));
+      });
+  });
+  jQuery(form).find('input').each(function () {
+      names.push(this.name);
+      values.push(this.value);
+  });
+    jQuery(form).find('select option[@selected]').each(function () {
+	names.push(this.parentNode.name);
+	values.push(this.value);
+    });
+  return [names, values];
+}
+
+function buildRQL(divid, vid, paginate, vidargs) {
+    jQuery(CubicWeb).trigger('facets-content-loading', [divid, vid, paginate, vidargs]);
+    var form = getNode(divid+'Form');
+    var zipped = facetFormContent(form);
+    zipped[0].push('facetargs')
+    zipped[1].push(vidargs)
+    var d = async_remote_exec('filter_build_rql', zipped[0], zipped[1]);
+    d.addCallback(function(result) {
+	var rql = result[0];
+	var toupdate = result[1];
+	var extraparams = vidargs;
+	var displayactions = jQuery('#' + divid).attr('cubicweb:displayactions');
+	if (displayactions) { extraparams['displayactions'] = displayactions; }
+	if (paginate) { extraparams['paginate'] = '1'; }
+	// copy some parameters
+	// XXX cleanup vid/divid mess
+	// if vid argument is specified , the one specified in form params will
+	// be overriden by replacePageChunk
+	copyParam(zipped, extraparams, 'vid');
+	extraparams['divid'] = divid;
+	copyParam(zipped, extraparams, 'divid');
+	copyParam(zipped, extraparams, 'subvid');
+	// paginate used to know if the filter box is acting, in which case we
+	// want to reload action box to match current selection
+	replacePageChunk(divid, rql, vid, extraparams, true, function() {
+	  jQuery(CubicWeb).trigger('facets-content-loaded', [divid, rql, vid, extraparams]);
+	});
+	if (paginate) {
+	    // FIXME the edit box might not be displayed in which case we don't
+	    // know where to put the potential new one, just skip this case
+	    // for now
+	    if (jQuery('#edit_box').length) {
+		reloadComponent('edit_box', rql, 'boxes', 'edit_box');
+	    }
+	}
+	var d = async_remote_exec('filter_select_content', toupdate, rql);
+	d.addCallback(function(updateMap) {
+	    for (facetId in updateMap) {
+		var values = updateMap[facetId];
+		jqNode(facetId).find('.facetCheckBox').each(function () {
+		    var value = this.getAttribute('cubicweb:value');
+		    if (!values.contains(value)) {
+			if (!jQuery(this).hasClass('facetValueDisabled')) {
+			    jQuery(this).addClass('facetValueDisabled');
+			}
+		    } else {
+			if (jQuery(this).hasClass('facetValueDisabled')) {
+			    jQuery(this).removeClass('facetValueDisabled');
+			}
+		    }
+		});
+	    }
+	});
+    });
+}
+
+
+var SELECTED_IMG = baseuri()+"data/black-check.png";
+var UNSELECTED_IMG = baseuri()+"data/no-check-no-border.png";
+
+function initFacetBoxEvents(root){
+    root = root || document;
+    jQuery(root).find('div.facetBody').each(function (){
+    	 var height = jQuery(this).height();
+	 if (height > 160){
+	     jQuery(this).addClass('owerflowed');
+	 }
+    });
+    jQuery(root).find('form').each(function () {
+	var form = jQuery(this);
+	var facetargs = form.attr('cubicweb:facetargs');
+	if (facetargs) {
+	    form.submit(function() {
+		var facetargs = evalJSON(form.attr('cubicweb:facetargs'));
+	        buildRQL.apply(null, facetargs); //(divid, vid, paginate, extraargs);
+	        return false;
+	    });
+	    form.find('div.facet').each(function() {
+		var facet = jQuery(this);
+		facet.find('div.facetCheckBox').each(function (i) {
+		    this.setAttribute('cubicweb:idx', i);
+		});
+		facet.find('div.facetCheckBox').click(function () {
+		    var facetargs = evalJSON(form.attr('cubicweb:facetargs'));
+		    var $this = jQuery(this);
+		    if ($this.hasClass('facetValueSelected')) {
+			$this.removeClass('facetValueSelected');
+			$this.find('img').attr('src', UNSELECTED_IMG);
+			var index = parseInt($this.attr('cubicweb:idx'));
+			var shift = jQuery.grep(facet.find('.facetValueSelected'), function (n) {
+			    var nindex = parseInt(n.getAttribute('cubicweb:idx'));
+			    return nindex > index;
+			}).length;
+			index += shift;
+			var parent = this.parentNode;
+			jQuery(parent).find('.facetCheckBox:nth('+index+')').after(this);
+		    } else {
+			var lastSelected = facet.find('.facetValueSelected:last');
+			if (lastSelected.length) {
+			    lastSelected.after(this);
+			} else {
+			    var parent = this.parentNode;
+			    jQuery(parent).prepend(this);
+			}
+			jQuery(this).addClass('facetValueSelected');
+			jQuery(this).find('img').attr('src', SELECTED_IMG);
+		    }
+		    buildRQL.apply(null, facetargs); // (divid, vid, paginate, extraargs);
+		    facet.find('.facetBody').animate({scrollTop: 0}, '');
+		});
+		facet.find('select.facetOperator').change(function() {
+		    var nbselected = facet.find('div.facetValueSelected').length;
+		    if (nbselected >= 2) {
+			buildRQL.apply(null, facetargs); // (divid, vid, paginate, extraargs);
+		    }
+		});
+		facet.find('div.facetTitle').click(function() {
+		  facet.find('div.facetBody').toggleClass('hidden').toggleClass('opened');
+		  jQuery(this).toggleClass('opened');
+		   });
+
+	    });
+	}
+    });
+}
+
+// trigger this function on document ready event if you provide some kind of
+// persistent search (eg crih)
+function reorderFacetsItems(root){
+    root = root || document;
+    jQuery(root).find('form').each(function () {
+	var form = jQuery(this);
+	var facetargs = form.attr('cubicweb:facetargs');
+	if (facetargs) {
+	    form.find('div.facet').each(function() {
+		var facet = jQuery(this);	
+		var lastSelected = null;
+		facet.find('div.facetCheckBox').each(function (i) {
+		    var $this = jQuery(this);
+		    if ($this.hasClass('facetValueSelected')) {
+			if (lastSelected) {
+			    lastSelected.after(this);
+			} else {
+			    var parent = this.parentNode;
+			    jQuery(parent).prepend(this);
+			}
+			lastSelected = $this;
+		    }
+		});
+	    });
+	}
+    });
+}
+
+// we need to differenciate cases where initFacetBoxEvents is called
+// with one argument or without any argument. If we use `initFacetBoxEvents`
+// as the direct callback on the jQuery.ready event, jQuery will pass some argument
+// of his, so we use this small anonymous function instead.
+jQuery(document).ready(function() {initFacetBoxEvents();});
+
+CubicWeb.provide('formfilter.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.gmap.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,74 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ *
+ *
+ */
+
+Widgets.GMapWidget = defclass('GMapWidget', null, {
+  __init__: function(wdgnode) {
+    // Assume we have imported google maps JS
+    if (GBrowserIsCompatible()) {
+      var uselabelstr = wdgnode.getAttribute('cubicweb:uselabel');
+      var uselabel = true;
+      if (uselabelstr){
+	if (uselabelstr == 'True'){
+	  uselabel = true;
+	}
+	else{
+	  uselabel = false;
+	}
+      }
+      var map = new GMap2(wdgnode);
+      map.addControl(new GSmallMapControl());
+      var jsonurl = wdgnode.getAttribute('cubicweb:loadurl');
+      var self = this; // bind this to a local variable
+      jQuery.getJSON(jsonurl, function(geodata) {
+	if (geodata.center) {
+	  var zoomLevel = 8; // FIXME arbitrary !
+	  map.setCenter(new GLatLng(geodata.center.latitude, geodata.center.longitude),
+		        zoomLevel);
+	}
+	for (var i=0; i<geodata.markers.length; i++) {
+	  var marker = geodata.markers[i];
+	  self.createMarker(map, marker, i+1, uselabel);
+	}
+      });
+      jQuery(wdgnode).after(this.legendBox);
+    } else { // incompatible browser
+      jQuery.unload(GUnload);
+    }
+  },
+
+  createMarker: function(map, marker, i, uselabel) {
+    var point = new GLatLng(marker.latitude, marker.longitude);
+    var icon = new GIcon();
+    icon.image = marker.icon[0];
+    icon.iconSize = new GSize(marker.icon[1][0], marker.icon[1][1]) ;
+    icon.iconAnchor = new GPoint(marker.icon[2][0], marker.icon[2][1]);
+    if(marker.icon[3]){
+      icon.shadow4 =  marker.icon[3];
+    }
+
+    var gmarker = new LabeledMarker(point, {
+      icon: icon,
+      title: marker.title,
+      labelText: uselabel?'<strong>' + i + '</strong>':'',
+      labelOffset: new GSize(2, -32)
+    });
+    map.addOverlay(gmarker);
+    GEvent.addListener(gmarker, 'click', function() {
+      jQuery.post(marker.bubbleUrl, function(data) {
+	map.openInfoWindowHtml(point, data);
+      });
+    });
+  },
+
+  appendLegendItem: function(marker, i) {
+    var ul = this.legendBox.firstChild;
+    ul.appendChild(LI(null, [SPAN({'class': "itemNo"}, i),
+                             SPAN(null, marker.title)]));
+  }
+
+});
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.goa.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+/*
+ *  functions specific to cubicweb on google appengine
+ *
+ *  :organization: Logilab
+ *  :copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+/* overrides rql_for_eid function from htmlhelpers.hs */
+function rql_for_eid(eid) { return 'Any X WHERE X eid "' + eid + '"'; }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.html_tree.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,90 @@
+table.tree {
+  border-collapse: collapse;
+}
+
+table.tree td {
+  padding: 0px;
+}
+
+div.tree_cell {
+  border: 1px solid black;
+  margin-top: 2px;
+  margin-bottom: 2px;
+  margin-left: 0px;
+  margin-right: 0px;  
+  padding-top: 2px;
+  padding-bottom: 2px;
+  padding-left: 5px;
+  padding-right: 5px;
+}
+
+#selected {
+  border: 2px solid black;
+}
+
+table.tree td.tree_cell {
+  padding: 0px;
+}
+
+table.tree td.tree_cell_1_1,
+td.tree_cell_1_2 {
+  border-bottom: 1px solid black;
+}
+table.tree td.tree_cell_1_3 {
+  border-right: 1px solid black;
+}
+table.tree td.tree_cell_1_4 {
+  border: 0px;
+}
+
+
+table.tree td.tree_cell_2_1,
+td.tree_cell_2_3 {
+  border-right: 1px solid black;
+}
+table.tree td.tree_cell_2_2,
+td.tree_cell_2_4 {
+  border: 0px;
+}
+
+
+table.tree td.tree_cell_3_1,
+td.tree_cell_3_3 {
+  border-right: 1px solid black;
+}
+table.tree td.tree_cell_3_2 {
+  border-bottom: 1px solid black;
+}
+table.tree td.tree_cell_3_4 {
+  border: 0px;
+}
+
+
+table.tree td.tree_cell_4_1 {
+  border-right: 1px solid black;
+}
+table.tree td.tree_cell_4_2 {
+  border-bottom: 1px solid black;
+}
+table.tree td.tree_cell_4_3,
+td.tree_cell_4_4 {
+  border: 0px;
+}
+
+
+table.tree td.tree_cell_5_1,
+td.tree_cell_5_2 {
+  border-bottom: 1px solid black;
+}
+table.tree td.tree_cell_5_3,
+td.tree_cell_5_4 {
+  border: 0px;
+}
+
+
+table.tree td.tree_cell_0_1,
+td.tree_cell_0_2,
+td.tree_cell_0_3,
+td.tree_cell_0_4 {
+  border: 0px;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.htmlhelpers.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,256 @@
+CubicWeb.require('python.js');
+
+/* returns the document's baseURI. (baseuri() uses document.baseURI if
+ * available and inspects the <base> tag manually otherwise.)
+*/
+function baseuri() {
+    var uri = document.baseURI;
+    if (uri) { // some browsers don't define baseURI
+	return uri;
+    }
+    var basetags = document.getElementsByTagName('base');
+    if (basetags.length) {
+	return getNodeAttribute(basetags[0], 'href');
+    }
+    return '';
+}
+
+function insertText(text, areaId) {
+    var textarea = jQuery('#' + areaId);
+    if (document.selection) { // IE
+        var selLength;
+        textarea.focus();
+        sel = document.selection.createRange();
+        selLength = sel.text.length;
+        sel.text = text;
+        sel.moveStart('character', selLength-text.length);
+        sel.select();
+    } else if (textarea.selectionStart || textarea.selectionStart == '0') { // mozilla
+        var startPos = textarea.selectionStart;
+        var endPos = textarea.selectionEnd;
+	// insert text so that it replaces the [startPos, endPos] part
+        textarea.value = textarea.value.substring(0,startPos) + text + textarea.value.substring(endPos,textarea.value.length);
+	// set cursor pos at the end of the inserted text
+        textarea.selectionStart = textarea.selectionEnd = startPos+text.length;
+        textarea.focus();
+    } else { // safety belt for other browsers
+        textarea.value += text;
+    }
+}
+
+/* taken from dojo toolkit */
+function setCaretPos(element, start, end){
+    if(!end){ end = element.value.length; }  // NOTE: Strange - should be able to put caret at start of text?
+    // Mozilla
+    // parts borrowed from http://www.faqts.com/knowledge_base/view.phtml/aid/13562/fid/130
+    if(element.setSelectionRange){
+        element.focus();
+        element.setSelectionRange(start, end);
+    } else if(element.createTextRange){ // IE
+        var range = element.createTextRange();
+        with(range){
+            collapse(true);
+            moveEnd('character', end);
+            moveStart('character', start);
+            select();
+        }
+    } else { //otherwise try the event-creation hack (our own invention)
+        // do we need these?
+        element.value = element.value;
+        element.blur();
+        element.focus();
+        // figure out how far back to go
+        var dist = parseInt(element.value.length)-end;
+        var tchar = String.fromCharCode(37);
+        var tcc = tchar.charCodeAt(0);
+        for(var x = 0; x < dist; x++){
+            var te = document.createEvent("KeyEvents");
+            te.initKeyEvent("keypress", true, true, null, false, false, false, false, tcc, tcc);
+            element.dispatchEvent(te);
+        }
+    }
+}
+
+function setProgressMessage(label) {
+    var body = document.getElementsByTagName('body')[0];
+    body.appendChild(DIV({id: 'progress'}, label));
+    jQuery('#progress').show();
+}
+
+function resetProgressMessage() {
+    var body = document.getElementsByTagName('body')[0];
+    jQuery('#progress').hide();
+}
+
+
+/* set body's cursor to 'progress'
+ */
+function setProgressCursor() {
+    var body = document.getElementsByTagName('body')[0];
+    body.style.cursor = 'progress';
+}
+
+/*
+ * reset body's cursor to default (mouse cursor). The main
+ * purpose of this function is to be used as a callback in the
+ * deferreds' callbacks chain.
+ */
+function resetCursor(result) {
+    var body = document.getElementsByTagName('body')[0];
+    body.style.cursor = 'default';
+    // pass result to next callback in the callback chain
+    return result;
+}
+
+function updateMessage(msg) {
+    var msgdiv = DIV({'class':'message'});
+    // don't pass msg to DIV() directly because DIV will html escape it
+    // and msg should alreay be html escaped at this point.
+    msgdiv.innerHTML = msg;
+    jQuery('#appMsg').removeClass('hidden').empty().append(msgdiv);
+}
+
+/* builds an url from an object (used as a dictionnary)
+ * Notable difference with MochiKit's queryString: as_url does not
+ * *url_quote* each value found in the dictionnary
+ * 
+ * >>> as_url({'rql' : "RQL", 'x': [1, 2], 'itemvid' : "oneline"})
+ * rql=RQL&vid=list&itemvid=oneline&x=1&x=2
+ */
+function as_url(props) {
+    var chunks = [];
+    for(key in props) {
+	var value = props[key];
+	// generate a list of couple key=value if key is multivalued
+	if (isArrayLike(value)) {
+	    for (var i=0; i<value.length;i++) {
+		chunks.push(key + '=' + value[i]);
+	    }
+	} else {
+	    chunks.push(key + '=' + value);
+	}
+    }
+    return chunks.join('&');
+}
+
+/* return selected value of a combo box if any 
+ */
+function firstSelected(selectNode) {
+    var selection = filter(attrgetter('selected'), selectNode.options);
+    return (selection.length>0) ? getNodeAttribute(selection[0], 'value'):null;
+}
+
+/* toggle visibility of an element by its id
+ */
+function toggleVisibility(elemId) {
+    jqNode(elemId).toggleClass('hidden');
+}
+
+/*
+ * return true (resp. false) if <element> (resp. doesn't) matches <properties>
+ */
+function elementMatches(properties, element) {
+    for (prop in properties) {
+	if (getNodeAttribute(element, prop) != properties[prop]) {
+	    return false;
+	}
+    }
+    return true;
+}
+
+/* returns the list of elements in the document matching the tag name
+ * and the properties provided
+ * 
+ * @param tagName the tag's name
+ * @param properties a js Object used as a dict
+ * @return an iterator (if a *real* array is needed, you can use the
+ *                      list() function)
+ */
+function getElementsMatching(tagName, properties, /* optional */ parent) {
+    var filterfunc = partial(elementMatches, properties);
+    parent = parent || document;
+    return filter(filterfunc, parent.getElementsByTagName(tagName));
+}
+
+/*
+ * sets checked/unchecked status of checkboxes 
+ */
+function setCheckboxesState(nameprefix, checked){
+    // XXX: this looks in *all* the document for inputs
+    var elements = getElementsMatching('input', {'type': "checkbox"});
+    filterfunc = function(cb) { return nameprefix && cb.name.startsWith(nameprefix); };
+    forEach(filter(filterfunc, elements), function(cb) {cb.checked=checked;});
+}
+
+function setCheckboxesState2(nameprefix, value, checked){
+    // XXX: this looks in *all* the document for inputs
+    var elements = getElementsMatching('input', {'type': "checkbox"});
+    filterfunc = function(cb) { return nameprefix && cb.name.startsWith(nameprefix) && cb.value == value; };
+    forEach(filter(filterfunc, elements), function(cb) {cb.checked=checked;});
+}
+
+/*
+ * centers an HTML element on the screen
+ */
+function centerElement(obj){
+    var vpDim = getViewportDimensions();
+    var elemDim = getElementDimensions(obj);
+    setElementPosition(obj, {'x':((vpDim.w - elemDim.w)/2),
+			     'y':((vpDim.h - elemDim.h)/2)});
+}
+
+/* this function is a hack to build a dom node from html source */
+function html2dom(source) {
+    var tmpNode = SPAN();
+    tmpNode.innerHTML = source;
+    if (tmpNode.childNodes.length == 1) {
+	return tmpNode.firstChild;
+    }
+    else {
+	// we leave the span node when `source` has no root node
+	// XXX This is cleary not the best solution, but css/html-wise,
+	///    a span not should not be too  much disturbing
+	return tmpNode;
+    }
+}
+
+
+// *** HELPERS **************************************************** //
+function rql_for_eid(eid) { return 'Any X WHERE X eid ' + eid; }
+function isTextNode(domNode) { return domNode.nodeType == 3; }
+function isElementNode(domNode) { return domNode.nodeType == 1; }
+
+function changeLinkText(link, newText) {
+    jQuery(link).text(newText);
+//    for (var i=0; i<link.childNodes.length; i++) {
+//	var node = link.childNodes[i];
+//	if (isTextNode(node)) {
+//	    swapDOM(node, document.createTextNode(newText));
+//	    break;
+//	}
+//    }
+}
+
+
+function autogrow(area) {
+    if (area.scrollHeight > area.clientHeight && !window.opera) {
+	if (area.rows < 20) {
+	    area.rows += 2;
+	}
+    }
+}
+
+//============= page loading events ==========================================//
+function roundedCornersOnLoad() {
+    roundClass("div", "sideBox", {corners: "bottom", compact:false}); 
+    roundClass("div", "boxTitle", {corners: "top",  compact:true}); 
+    roundClass("div", "boxPrefTitle", {corners: "top",  compact:true}); 
+    roundClass("div", "sideBoxTitle", {corners: "top",  compact:true}); 
+    roundClass("th", "month", {corners: "top",  compact:true});
+}
+
+// jQuery(document).ready(roundedCornersOnLoad); 
+
+
+CubicWeb.provide('htmlhelpers.js');
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.ie.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+/* specific workarounds for Internet Explorer */
+div.calpopup {
+  margin-left: 0px;
+  margin-top: 0px;
+}
+
+/* quick and dirty solution for pop to be 
+   correctly displayed on right edge of window */
+div.popupWrapper{ 
+  direction:rtl;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.iprogress.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,84 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+/******************************************************************************/
+/* progressbar                                                                */
+/******************************************************************************/
+
+.done{ background:red }
+
+.inprogress{ background:green }
+
+.overpassed{ background: yellow}
+
+
+.progressbar {
+ height: 10px;
+ background: #008000;
+}
+
+.progressbarback {
+ border: 1px solid #000000;
+ background: transparent;
+ height: 10px;
+ width: 100px;
+}
+
+/******************************************************************************/
+/* progress table                                                             */
+/******************************************************************************/
+
+table.progress{
+ /* The default table view */
+ margin: 10px 0px;
+ color : #000;
+ width:100%;
+ font-size:98%;
+ border:2px solid #ebe8d9;
+}
+
+table.progress th{ 
+ text-align:left;
+ white-space:nowrap;
+ font-weight : bold;
+ background:#ebe8d9 url("button.png") repeat-x;
+ padding:2px 3px;
+}
+
+table.progress th,
+table.progress td{ 
+ border: 1px solid #dedede;
+ margin:0px;
+}
+
+table.progress td{ 
+ text-align:right;
+ padding:2px 5px 2px 2px;
+}
+
+table.progress th.tdleft,
+table.progress td.tdleft{ 
+ text-align:left;
+ padding:2px 3px 2px 5px;
+}
+
+
+table.progress tr.highlighted{
+ background-color: #f4f5ed; 
+}
+
+table.progress tr.highlighted .progressbarback {
+ border: 1px solid #555;
+}
+
+table.progress .progressbarback {
+ border: 1px solid #777;
+}
+
+.progress_data{ 
+ padding-right:3px;
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.login.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,82 @@
+/* styles for the login popup and login form
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div#popupLoginBox {
+  position: absolute;
+  z-index: 400;
+  right: 0px;
+  width: 23em;
+  padding: 0px 1px 1px;
+  font-weight: bold;
+  background: #E4EAD8; 
+}
+
+div#popupLoginBox div#loginContent {
+  background: #e6e4ce;
+  padding: 5px 3px 4px;
+}
+
+div#loginBox { 
+  position : absolute;
+  top: 15%;
+  left : 50%;
+  margin-left: -11em;
+  width: 24em;
+  background: #fff;
+  border: 2px solid #cfceb7;
+  padding-bottom: 0.5em;
+  text-align: center;
+}
+
+div#loginBox h1 { 
+  color: #FF7700;
+  font-size: 140%;
+}
+
+div#loginTitle { 
+  color: #fff;
+  font-weight: bold;
+  font-size: 140%;
+  text-align: center;
+  padding: 3px 0px;
+  background: #ff7700 url("banner.png") left top repeat-x;
+}
+
+div#loginBox div#loginContent form { 
+  padding-top: 1em;
+  width: 90%; 
+  margin: auto;  
+}
+
+#popupLoginBox table td { 
+  padding: 0px 3px; 
+  white-space: nowrap;
+}
+
+#loginContent table { 
+  padding: 0px 0.5em;
+  margin: auto;
+}
+
+#loginBox table td { 
+  padding: 0px 3px 0.6em; 
+  white-space: nowrap;
+}
+
+#loginBox .loginButton { 
+  margin-top: 0.6em;
+ }
+
+#loginContent input.data { 
+  width:12em;
+}
+
+input.loginButton {
+  display:block;
+  margin: 2px 0px 0px;
+  background: #f0eff0 url("gradient-grey-up.png") left top repeat-x; 
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.mailform.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,66 @@
+/* styles for the email form (views/massmailing.py)
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div#compose {
+  border: 1px solid #DBDCE3;
+  background-color: #E9F5F7;
+  font-family:Verdana,Tahoma,Arial,sans-serif;
+  padding: 1em 1ex;
+}
+
+table.headersform {
+  width: 100%;
+}
+
+div#compose td#buttonbar {
+  padding: 0.5ex 0ex;
+}
+
+table.headersform td.hlabel {
+  padding-top: 0.5ex;
+  color: #444444;
+  text-align: right;
+}
+
+table.headersform td.hvalue {
+  padding-top: 0.5ex;
+  padding-left: 0.5em;
+  width: 100%;
+}
+
+table.headersform td.hvalue input#mailsubj {
+  width: 47em; 
+}
+
+div#compose div#toolbar {
+  margin: 0.5em 0em;
+  height: 29px;
+}
+
+div#compose div#toolbar ul {
+  list-style-image: none;
+  list-style-position: outside;
+  list-style-type:none;
+  margin:0px;
+  padding:0px;
+  /* border: 1px solid #DBDCE3; */
+}
+
+div#compose div#toolbar li {
+  background: none;
+  padding-left: 1em;
+  float: left;
+}
+
+div#compose div#toolbar li a {
+  font-family: Verdana,Tahoma,Arial,sans-serif;
+  color: #444444;
+}
+
+div#substitutions {
+  padding-left: 1em;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.preferences.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+/* styles for preferences form (views/management.py)
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+.componentTitle{
+ font-weight:bold;
+ color: #ff7700;
+ padding:0px 4px;
+}
+
+h2.propertiesform a{
+ display:block;
+ margin: 10px 0px 6px 0px;
+ font-weight: bold;
+ color: #222211;
+ padding: 0.2em 0.2em 0.2em 16px;
+ background:#eeedd9 url("puce_down.png") 3px center no-repeat;
+ font-size:76%;
+}
+
+h2.propertiesform a:hover{
+ color:#000;
+ background-color:#cfceb7;
+ text-decoration:none;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.print.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+td#speedbar, img.logo, div.header{ 
+ display:none }
+
+a{color:black }
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.python.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,398 @@
+/*
+ * This file contains extensions for standard javascript types
+ *
+ */
+
+ONE_DAY = 86400000; // (in milliseconds)
+
+// ========== DATE EXTENSIONS ========== ///
+
+Date.prototype.equals = function(other) {
+    /* compare with other date ignoring time differences */
+    if (this.getYear() == other.getYear() &&
+	this.getMonth() == other.getMonth() &&
+	this.getDate() == other.getDate()) {
+	return true;
+    }
+    return false;
+}
+
+Date.prototype.add = function(days) {
+    var res = new Date()
+    res.setTime(this.getTime() + (days * ONE_DAY))
+    return res
+}
+
+Date.prototype.sub = function(days) {
+    return this.add(-days);
+}
+
+Date.prototype.iadd = function(days) {
+    // in-place add
+    this.setTime(this.getTime() + (days * ONE_DAY))
+    // avoid strange rounding problems !!
+    this.setHours(12);
+}
+
+Date.prototype.isub = function(days) {
+    // in-place sub
+    this.setTime(this.getTime() - (days * ONE_DAY))
+}
+
+/*
+ * returns the first day of the next month
+ */
+Date.prototype.nextMonth = function() {
+    if (this.getMonth() == 11) {
+	var d =new Date(this.getFullYear()+1, 0, 1);
+	return d;
+    } else {
+	var d2 = new Date(this.getFullYear(), this.getMonth()+1, 1);
+	return d2;
+    }
+}
+
+/*
+ * returns the day of week, 0 being monday, 6 being sunday
+ */
+Date.prototype.getRealDay = function() {
+    // getDay() returns 0 for Sunday ==> 6 for Saturday
+    return (this.getDay()+6) % 7;
+}
+
+
+var _DATE_FORMAT_REGXES = {
+    'Y': new RegExp('^-?[0-9]+'),
+    'd': new RegExp('^[0-9]{1,2}'),
+    'm': new RegExp('^[0-9]{1,2}'),
+    'H': new RegExp('^[0-9]{1,2}'),
+    'M': new RegExp('^[0-9]{1,2}')
+}
+
+/*
+ * _parseData does the actual parsing job needed by `strptime`
+ */
+function _parseDate(datestring, format) {
+    var skip0 = new RegExp('^0*[0-9]+');
+    var parsed = {};
+    for (var i1=0,i2=0;i1<format.length;i1++,i2++) {
+	var c1 = format[i1];
+	var c2 = datestring[i2];
+	if (c1 == '%') {
+	    c1 = format[++i1];
+	    var data = _DATE_FORMAT_REGXES[c1].exec(datestring.substring(i2));
+	    if (!data.length) {
+		return null;
+	    }
+	    data = data[0];
+	    i2 += data.length-1;
+	    var value = parseInt(data, 10);
+	    if (isNaN(value)) {
+		return null;
+	    }
+	    parsed[c1] = value;
+	    continue;
+	}
+	if (c1 != c2) {
+	    return null;
+	}
+    }
+    return parsed;
+}
+
+/*
+ * basic implementation of strptime. The only recognized formats
+ * defined in _DATE_FORMAT_REGEXES (i.e. %Y, %d, %m, %H, %M)
+ */
+function strptime(datestring, format) {
+    var parsed = _parseDate(datestring, format);
+    if (!parsed) {
+	return null;
+    }
+    // create initial date (!!! year=0 means 1900 !!!)
+    date = new Date(0, 0, 1, 0, 0);
+    date.setFullYear(0); // reset to year 0
+    if (parsed.Y) {
+	date.setFullYear(parsed.Y);
+    }
+    if (parsed.m) {
+	if (parsed.m < 1 || parsed.m > 12) {
+	    return null;
+	}
+	// !!! month indexes start at 0 in javascript !!!
+	date.setMonth(parsed.m - 1);
+    }
+    if (parsed.d) {
+	if (parsed.m < 1 || parsed.m > 31) {
+	    return null;
+	}
+	date.setDate(parsed.d)
+    }
+    if (parsed.H) {
+	if (parsed.H < 0 || parsed.H > 23) {
+	    return null;
+	}
+	date.setHours(parsed.H)
+    }
+    if (parsed.M) {
+	if (parsed.M < 0 || parsed.M > 59) {
+	    return null;
+	}
+	date.setMinutes(parsed.M)
+    }
+    return date;
+}
+
+// ========== END OF DATE EXTENSIONS ========== ///
+
+
+
+// ========== ARRAY EXTENSIONS ========== ///
+Array.prototype.contains = function(element) {
+    return findValue(this, element) != -1;
+}
+
+// ========== END OF ARRAY EXTENSIONS ========== ///
+
+
+
+// ========== STRING EXTENSIONS ========== //
+
+/* python-like startsWith method for js strings
+ * >>>
+ */
+String.prototype.startsWith = function(prefix) {
+    return this.indexOf(prefix) == 0;
+}
+
+/* python-like endsWith method for js strings */
+String.prototype.endsWith = function(suffix) {
+    var startPos = this.length - suffix.length;
+    if (startPos < 0) { return false; }
+    return this.lastIndexOf(suffix, startPos) == startPos;
+}
+
+/* python-like strip method for js strings */
+String.prototype.strip = function() {
+    return this.replace(/^\s*(.*?)\s*$/, "$1");
+};
+
+/* py-equiv: string in list */
+String.prototype.in_ = function(values) {
+    return findValue(values, this) != -1;
+}
+
+/* py-equiv: str.join(list) */
+String.prototype.join = function(args) {
+    return args.join(this);
+}
+
+/* python-like list builtin
+ * transforms an iterable in a js sequence
+ * >>> gen = ifilter(function(x) {return x%2==0}, range(10))
+ * >>> s = list(gen)
+ * [0,2,4,6,8]
+ */
+function list(iterable) {
+    iterator = iter(iterable);
+    var result = [];
+    while (true) {
+	/* iterates until StopIteration occurs */
+	try {
+	    result.push(iterator.next());
+	} catch (exc) {
+	    if (exc != StopIteration) { throw exc; }
+	    return result;
+	}
+    }
+}
+
+/* py-equiv: getattr(obj, attrname, default=None) */
+function getattr(obj, attrname, defaultValue) {
+    // when not passed, defaultValue === undefined
+    return obj[attrname] || defaultValue;
+}
+
+/* py-equiv: operator.attrgetter */
+function attrgetter(attrname) {
+    return function(obj) { return getattr(obj, attrname); };
+}
+
+
+/* returns a subslice of `lst` using `start`/`stop`/`step`
+ * start, stop might be negative
+ *
+ * >>> sliceList(['a', 'b', 'c', 'd', 'e', 'f'], 2)
+ * ['c', 'd', 'e', 'f']
+ * >>> sliceList(['a', 'b', 'c', 'd', 'e', 'f'], 2, -2)
+ * ['c', 'd']
+ * >>> sliceList(['a', 'b', 'c', 'd', 'e', 'f'], -3)
+ * ['d', 'e', 'f']
+ */
+function sliceList(lst, start, stop, step) {
+    var start = start || 0;
+    var stop = stop || lst.length;
+    var step = step || 1;
+    if (stop < 0) {
+	stop = max(lst.length+stop, 0);
+    }
+    if (start < 0) {
+	start = min(lst.length+start, lst.length);
+    }
+    var result = [];
+    for (var i=start; i < stop; i+=step) {
+	result.push(lst[i]);
+    }
+    return result;
+}
+
+/* returns a partial func that calls a mehod on its argument
+ * py-equiv: return lambda obj: getattr(obj, methname)(*args)
+ */
+function methodcaller(methname) {
+    var args = sliceList(arguments, 1);
+    return function(obj) {
+	return obj[methname].apply(obj, args);
+    };
+}
+
+/* use MochiKit's listMin / listMax */
+function min() { return listMin(arguments); }
+function max() { return listMax(arguments); }
+
+// tricky multiple assign
+// function assign(lst, varnames) {
+//     var length = min(lst.length, varnames.length);
+//     for(var i=0; i<length; i++) {
+// 	window[varnames[i]] = lst[i];
+//     }
+// }
+
+/*
+ * >>> d = dict(["x", "y", "z"], [0, 1, 2])
+ * >>> d['y']
+ * 1
+ * >>> d.y
+ * 1
+ */
+function dict(keys, values) {
+    if (keys.length != values.length) {
+	throw "got different number of keys and values !";
+    }
+    var newobj = {};
+    for(var i=0; i<keys.length; i++) {
+	newobj[keys[i]] = values[i];
+    }
+    return newobj;
+}
+
+
+function concat() {
+    return ''.join(list(arguments));
+}
+
+
+/**** class factories ****/
+
+// transforms a function into an unbound method
+function makeUnboundMethod(meth) {
+    function unboundMeth(self) {
+	var newargs = sliceList(arguments, 1);
+	return meth.apply(self, newargs);
+    }
+    unboundMeth.__name__ = meth.__name__;
+    return unboundMeth;
+}
+
+function attachMethodToClass(cls, methname, meth) {
+    meth.__name__ = methname;
+    // XXX : this is probably bad for memory usage
+    cls.__dict__[methname] = meth;
+    cls[methname] = makeUnboundMethod(meth); // for the class itself
+    cls.prototype[methname] = meth; // for the instance
+}
+
+// simple internal function that tells if the attribute should
+// be copied from baseclasses or not
+function _isAttrSkipped(attrname) {
+    var skipped = ['__class__', '__dict__', '__bases__', 'prototype'];
+    for (var i=0; i < skipped.length; i++) {
+	if (skipped[i] == attrname) {
+	    return true;
+	}
+    }
+    return false;
+}
+
+// internal function used to build the class constructor
+function makeConstructor(userctor) {
+    return function() {
+	// this is a proxy to user's __init__
+	if(userctor) {
+	    userctor.apply(this, arguments);
+	}
+    };
+}
+
+/* this is a js class factory. objects returned by this function behave
+ * more or less like a python class. The `class` function prototype is
+ * inspired by the python `type` builtin
+ * Important notes :
+ *  -> methods are _STATICALLY_ attached when the class it created
+ *  -> multiple inheritance was never tested, which means it doesn't work ;-)
+ */
+function defclass(name, bases, classdict) {
+    var baseclasses = bases || [];
+
+    // this is the static inheritance approach (<=> differs from python)
+    var basemeths = {};
+    var reverseLookup = [];
+    for(var i=baseclasses.length-1; i >= 0; i--) {
+	reverseLookup.push(baseclasses[i]);
+    }
+    reverseLookup.push({'__dict__' : classdict});
+
+    for(var i=0; i < reverseLookup.length; i++) {
+	var cls = reverseLookup[i];
+	for (prop in cls.__dict__) {
+	    // XXX hack to avoid __init__, __bases__...
+	    if ( !_isAttrSkipped(prop) ) {
+		basemeths[prop] = cls.__dict__[prop];
+	    }
+	}
+    }
+    var userctor = basemeths['__init__'];
+    constructor = makeConstructor(userctor);
+
+    // python-like interface
+    constructor.__name__ = name;
+    constructor.__bases__ = baseclasses;
+    constructor.__dict__ = {};
+    constructor.prototype.__class__ = constructor;
+    // make bound / unbound methods
+    for (methname in basemeths) {
+	attachMethodToClass(constructor, methname, basemeths[methname]);
+    }
+
+    return constructor;
+}
+
+// Not really python-like
+CubicWeb = {};
+// XXX backward compatibility
+Erudi = CubicWeb;
+CubicWeb.loaded = [];
+CubicWeb.require = function(module) {
+    if (!CubicWeb.loaded.contains(module)) {
+	// a CubicWeb.load_javascript(module) function would require a dependency on ajax.js
+	log(module, ' is required but not loaded');
+    }
+};
+
+CubicWeb.provide = function(module) {
+    if (!CubicWeb.loaded.contains(module)) {
+	CubicWeb.loaded.push(module);
+    }
+};
+
+CubicWeb.provide('python.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.schema.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,125 @@
+/* styles for schema views
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div.relationDefinition { 
+  float: left;   
+  position: relative;
+  width: 60%;
+  padding: 0;
+}
+
+div.acl{ 
+  position: relative;
+/*  right: 20%;*/
+  float: right; 
+  width: 10%;
+  padding:0px 0px 0px 2em;
+}
+
+div.schema table {
+  width : 100%;
+}
+
+div.entityAttributes{ 
+ margin: 3em 0 5em; 
+ font: normal 9pt Arial;
+}
+
+div.box div.title{
+ border-bottom:1px solid black;
+ padding:0.2em 0.2em;
+ margin: 0 auto;
+}
+
+span.stereotype {
+  font-style: italic;
+  font-size:  80%;
+}
+
+div.box div.line span{
+ padding: 0 1em;
+}
+
+div.body{
+ padding : 0.2em;
+ padding-bottom : 0.4em; 
+ overflow: auto;
+}
+
+div.body table td{
+ padding:0.4em; 
+ }
+
+div.box{
+ float:left;
+ border:1px solid black;
+ width:50%; 
+}
+
+div.vl{
+ float:left;
+ position:relative; 
+ margin-top:1em; 
+ border-top:1px solid black; 
+ line-height : 1px; 
+ width: 1em; 
+ height : 0px}
+
+div.hl{
+ float:left;
+ position:relative; 
+ margin-top:1em;  
+ border-left:1px solid black; 
+ width: 1px; 
+ height : 10px
+}
+
+div.rels{
+ float:left; 
+ position:relative; 
+ margin-top:1em; 
+ border-left:1px solid black;  
+ margin-left:-2px;}
+
+div.firstrel, div.rel, div.lastrel{
+ line-height:1em;
+ border:1px solid white;
+ border-bottom:1px solid black;
+ padding: 0px 0.6em 0.2em;
+}
+
+/* FIXME set to 9em or an image*/
+div.rel, div.lastrel{ 
+ margin-top:0.7em}
+
+div.vars{
+ float:left;
+}
+
+div.firstvar, div.var, div.lastvar{
+ line-height:1em;
+ border:1px solid black; 
+ padding:0.2em}
+
+div.firstvar{
+ margin-top:1em;}
+
+div.var{
+ margin-top:0.5em; 
+}
+
+div.lastvar{
+ border:none;
+}
+
+div.firstvar a, 
+div.var a,
+div.rel a,
+div.firstrel a{ 
+ padding:0px ! important;
+ margin : 0px ! important; 
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.sortable.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,249 @@
+/* Adapted from MochiKit's example to use custom cubicweb attribute
+   and a stable sort (merge sort) instead of default's js array sort
+
+merge sort JS implementation was found here :
+http://en.literateprograms.org/Merge_sort_(JavaScript)
+
+
+On page load, the SortableManager:
+
+- Finds the table by its id (sortable_table).
+- Parses its thead for columns with a "mochi:format" attribute.
+- Parses the data out of the tbody based upon information given in the
+ "cubicweb:sorvalue" attribute, and clones the tr elements for later re-use.
+- Clones the column header th elements for use as a template when drawing
+ sort arrow columns.
+- Stores away a reference to the tbody, as it will be replaced on each sort.
+
+On sort request:
+
+- Sorts the data based on the given key and direction
+- Creates a new tbody from the rows in the new ordering
+- Replaces the column header th elements with clickable versions, adding an
+ indicator (&uarr; or &darr;) to the most recently sorted column.
+
+*/
+
+//************** merge sort implementation ***************//
+Sortable = {}
+
+Sortable.msort = function(array, begin, end, cmpfunc) {
+    var size=end-begin;
+    if(size<2) return;
+    
+    var begin_right=begin+Math.floor(size/2);
+    
+    Sortable.msort(array, begin, begin_right, cmpfunc);
+    Sortable.msort(array, begin_right, end, cmpfunc);
+    Sortable.merge(array, begin, begin_right, end, cmpfunc);
+}
+
+Sortable.merge_sort = function(array, cmpfunc) {
+    Sortable.msort(array, 0, array.length, cmpfunc);
+}
+
+Sortable.merge = function(array, begin, begin_right, end, cmpfunc) {
+    for(;begin<begin_right; ++begin) {
+	// if array[begin] > array[begin_right]
+	if(cmpfunc(array[begin], array[begin_right]) == 1) {
+	    var v = array[begin];
+	    array[begin] = array[begin_right];
+	    Sortable.insert(array, begin_right, end, v, cmpfunc);
+	}
+    }
+}
+
+Array.prototype.swap=function(a, b) {
+    var tmp = this[a];
+    this[a] = this[b];
+    this[b] = tmp;
+}
+
+
+Sortable.insert = function(array, begin, end, v, cmpfunc) {
+    // while(begin+1<end && array[begin+1]<v) {
+    while(begin+1<end && cmpfunc(array[begin+1], v) == -1) {
+	array.swap(begin, begin+1);
+	++begin;
+    }
+    array[begin]=v;
+}
+
+//************** auto-sortable tables ***************//
+
+Sortable.SortableManager = function () {
+    this.thead = null;
+    this.tbody = null;
+    this.columns = [];
+    this.rows = [];
+    this.sortState = {};
+    this.sortkey = 0;
+};
+
+mouseOverFunc = function () {
+    addElementClass(this, "over");
+};
+
+mouseOutFunc = function () {
+    removeElementClass(this, "over");
+};
+
+Sortable.ignoreEvent = function (ev) {
+    if (ev && ev.preventDefault) {
+	ev.preventDefault();
+	ev.stopPropagation();
+    } else if (typeof(event) != 'undefined') {
+	event.cancelBubble = false;
+	event.returnValue = false;
+    }
+};
+
+
+Sortable.getTableHead = function(table) {
+    var thead = table.getElementsByTagName('thead')[0];
+    if ( !thead ) {
+	thead = table.getElementsByTagName('tr')[0];
+    }
+    return thead;
+}
+
+Sortable.getTableBody = function(table) {
+    var tbody = table.getElementsByTagName('tbody')[0];
+    if ( !tbody ) {
+	tobdy = table; // XXX
+    }
+    return tbody;
+}
+
+jQuery.extend(Sortable.SortableManager.prototype, {
+    
+    "initWithTable" : function (table) {
+	/***  Initialize the SortableManager with a table object  ***/
+	// Find the thead
+	this.thead = Sortable.getTableHead(table);
+	// get the mochi:format key and contents for each column header
+	var cols = this.thead.getElementsByTagName('th');
+	for (var i = 0; i < cols.length; i++) {
+	    var node = cols[i];
+	    var o = node.childNodes;
+	    node.onclick = this.onSortClick(i);
+	    node.onmousedown = Sortable.ignoreEvent;
+	    node.onmouseover = mouseOverFunc;
+	    node.onmouseout = mouseOutFunc;
+	    this.columns.push({
+		"element": node,
+		"proto": node.cloneNode(true)
+	    });
+	}
+	// scrape the tbody for data
+	this.tbody = Sortable.getTableBody(table);
+	// every row
+	var rows = this.tbody.getElementsByTagName('tr');
+	for (var i = 0; i < rows.length; i++) {
+	    // every cell
+	    var row = rows[i];
+	    var cols = row.getElementsByTagName('td');
+	    var rowData = [];
+	    for (var j = 0; j < cols.length; j++) {
+		// scrape the text and build the appropriate object out of it
+		var cell = cols[j];
+		rowData.push([evalJSON(cell.getAttribute('cubicweb:sortvalue'))]);
+	    }
+	    // stow away a reference to the TR and save it
+	    rowData.row = row.cloneNode(true);
+	    this.rows.push(rowData);
+	}
+	// do initial sort on first column
+	// this.drawSortedRows(null, true, false);
+
+    },
+
+    "onSortClick" : function (name) {
+	/*** Return a sort function for click events  ***/
+	return method(this, function () {
+	    var order = this.sortState[name];
+	    if (order == null) {
+		order = true;
+	    } else if (name == this.sortkey) {
+		order = !order;
+	    }
+	    this.drawSortedRows(name, order, true);
+	});
+    },
+    
+    "drawSortedRows" : function (key, forward, clicked) {
+	/***  Draw the new sorted table body, and modify the column headers
+              if appropriate
+         ***/
+	this.sortkey = key;
+	// sort based on the state given (forward or reverse)
+	var cmp = (forward ? keyComparator : reverseKeyComparator);
+	Sortable.merge_sort(this.rows, cmp(key));
+	
+	// save it so we can flip next time
+	this.sortState[key] = forward;
+	// get every "row" element from this.rows and make a new tbody
+	var newRows = [];
+	for (var i=0; i < this.rows.length; i++){
+	    var row = this.rows[i].row;
+	    if (i%2) {
+		removeElementClass(row, 'even');
+		addElementClass(row, 'odd');
+	    } else {
+		removeElementClass(row, 'odd');
+		addElementClass(row, 'even');
+	    }
+	    newRows.push(row);
+	}
+	// var newBody = TBODY(null, map(itemgetter("row"), this.rows));
+	var newBody = TBODY(null, newRows);
+	// swap in the new tbody
+	this.tbody = swapDOM(this.tbody, newBody);
+	for (var i = 0; i < this.columns.length; i++) {
+	    var col = this.columns[i];
+	    var node = col.proto.cloneNode(true);
+	    // remove the existing events to minimize IE leaks
+	    col.element.onclick = null;
+	    col.element.onmousedown = null;
+	    col.element.onmouseover = null;
+	    col.element.onmouseout = null;
+	    // set new events for the new node
+	    node.onclick = this.onSortClick(i);
+	    node.onmousedown = Sortable.ignoreEvent;
+	    node.onmouseover = mouseOverFunc;
+	    node.onmouseout = mouseOutFunc;
+	    // if this is the sorted column
+	    if (key == i) {
+		// \u2193 is down arrow, \u2191 is up arrow
+		// forward sorts mean the rows get bigger going down
+		var arrow = (forward ? "\u2193" : "\u2191");
+		// add the character to the column header
+		node.appendChild(SPAN(null, arrow));
+		if (clicked) {
+		    node.onmouseover();
+		}
+	    }
+
+	    // swap in the new th
+	    col.element = swapDOM(col.element, node);
+	}
+    }
+});
+
+var sortableManagers = [];
+
+/*
+ * Find each table under `rootNode` and make them sortable
+ */
+Sortable.makeTablesSortable = function(rootNode) {
+    var tables = getElementsByTagAndClassName('table', 'listing', rootNode);
+    for(var i=0; i < tables.length; i++) {
+	var sortableManager = new Sortable.SortableManager();
+	sortableManager.initWithTable(tables[i]);
+	sortableManagers.push(sortableManagers);
+    }
+}
+
+jQuery(document).ready(Sortable.makeTablesSortable);
+
+CubicWeb.provide('sortable.js');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.suggest.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,43 @@
+/* styles for input with suggestions (as for tags and keywords)
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+div.sgform div.combobox {
+  z-index: 300;
+  margin-left: 3px;
+  width: 150px;
+}
+
+div.sgform div.combobox input.suggestions {
+  width: 100%;
+}
+
+div.sgform  select{
+  width: 155px;
+}
+
+div.sgformbuttons {
+  margin-left: 3px;
+}
+
+div.sgformbuttons a {
+  font-weight: bold;
+  padding:2px 0px;
+}
+
+a.sglink {
+  color: #000;
+  font-weight: bold;
+  display: block;
+  background: url("puce.png") 98% 5px no-repeat;
+  margin-left: 3px;
+  padding: 2px 16px 2px 0px;
+}
+
+div.suggholder {
+  z-index: 300;
+  position: absolute;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.tablesorter.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,40 @@
+/* tables */
+/*table.tablesorter {
+	font-family:arial;
+	background-color: #CDCDCD;
+	margin:10px 0pt 15px;
+	font-size: 8pt;
+	width: 100%;
+	text-align: left;
+} */
+
+table.listing tr th {
+  border: 1px solid #FFF;
+  font-size: 8pt;
+  padding: 4px;
+}
+
+table.listing tr .header {
+  cursor: pointer;
+}
+
+table.listing td {
+  color: #3D3D3D;
+  padding: 4px;
+  background-color: #FFF;
+  vertical-align: top;
+}
+
+
+table.listing tr .headerSortUp {
+  background-image: url(asc.gif);
+}
+table.listing tr .headerSortDown {
+  background-image: url(desc.gif);
+}
+table.listing tr .headerSortDown, table.listing tr .headerSortUp {
+   background-color: #DDD;
+   background-repeat: no-repeat;
+   background-position: center right;
+}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.timeline-bundle.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5919 @@
+
+var SimileAjax = {
+    loaded:                 false,
+    loadingScriptsCount:    0,
+    error:                  null,
+    params:                 { bundle:"true" }
+};
+
+
+/**
+ * Parse out the query parameters from a URL
+ * @param {String} url    the url to parse, or location.href if undefined
+ * @param {Object} to     optional object to extend with the parameters
+ * @param {Object} types  optional object mapping keys to value types
+ *        (String, Number, Boolean or Array, String by default)
+ * @return a key/value Object whose keys are the query parameter names
+ * @type Object
+ */
+SimileAjax.parseURLParameters = function(url, to, types) {
+    to = to || {};
+    types = types || {};
+
+    if (typeof url == "undefined") {
+        url = location.href;
+    }
+    var q = url.indexOf("?");
+    if (q < 0) {
+        return to;
+    }
+    url = (url+"#").slice(q+1, url.indexOf("#")); // toss the URL fragment
+
+    var params = url.split("&"), param, parsed = {};
+    var decode = window.decodeURIComponent || unescape;
+    for (var i = 0; param = params[i]; i++) {
+        var eq = param.indexOf("=");
+        var name = decode(param.slice(0,eq));
+        var old = parsed[name];
+        if (typeof old == "undefined") {
+            old = [];
+        } else if (!(old instanceof Array)) {
+            old = [old];
+        }
+        parsed[name] = old.concat(decode(param.slice(eq+1)));
+    }
+    for (var i in parsed) {
+        if (!parsed.hasOwnProperty(i)) continue;
+        var type = types[i] || String;
+        var data = parsed[i];
+        if (!(data instanceof Array)) {
+            data = [data];
+        }
+        if (type === Boolean && data[0] == "false") {
+            to[i] = false; // because Boolean("false") === true
+        } else {
+            to[i] = type.apply(this, data);
+        }
+    }
+    return to;
+};
+
+
+SimileAjax.Platform = new Object();
+
+SimileAjax.urlPrefix = baseuri();
+
+/* jquery-1.2.6.js */
+(function(){var _jQuery=window.jQuery,_$=window.$;
+var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);
+};
+var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;
+jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;
+if(selector.nodeType){this[0]=selector;
+this.length=1;
+return this;
+}if(typeof selector=="string"){var match=quickExpr.exec(selector);
+if(match&&(match[1]||!context)){if(match[1]){selector=jQuery.clean([match[1]],context);
+}else{var elem=document.getElementById(match[3]);
+if(elem){if(elem.id!=match[3]){return jQuery().find(selector);
+}return jQuery(elem);
+}selector=[];
+}}else{return jQuery(context).find(selector);
+}}else{if(jQuery.isFunction(selector)){return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);
+}}return this.setArray(jQuery.makeArray(selector));
+},jquery:"1.2.6",size:function(){return this.length;
+},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];
+},pushStack:function(elems){var ret=jQuery(elems);
+ret.prevObject=this;
+return ret;
+},setArray:function(elems){this.length=0;
+Array.prototype.push.apply(this,elems);
+return this;
+},each:function(callback,args){return jQuery.each(this,callback,args);
+},index:function(elem){var ret=-1;
+return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);
+},attr:function(name,value,type){var options=name;
+if(name.constructor==String){if(value===undefined){return this[0]&&jQuery[type||"attr"](this[0],name);
+}else{options={};
+options[name]=value;
+}}return this.each(function(i){for(name in options){jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));
+}});
+},css:function(key,value){if((key=="width"||key=="height")&&parseFloat(value)<0){value=undefined;
+}return this.attr(key,value,"curCSS");
+},text:function(text){if(typeof text!="object"&&text!=null){return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));
+}var ret="";
+jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8){ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);
+}});
+});
+return ret;
+},wrapAll:function(html){if(this[0]){jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;
+while(elem.firstChild){elem=elem.firstChild;
+}return elem;
+}).append(this);
+}return this;
+},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);
+});
+},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);
+});
+},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1){this.appendChild(elem);
+}});
+},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1){this.insertBefore(elem,this.firstChild);
+}});
+},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);
+});
+},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);
+});
+},end:function(){return this.prevObject||jQuery([]);
+},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);
+});
+return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);
+},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");
+container.appendChild(clone);
+return jQuery.clean([container.innerHTML])[0];
+}else{return this.cloneNode(true);
+}});
+var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined){this[expando]=null;
+}});
+if(events===true){this.find("*").andSelf().each(function(i){if(this.nodeType==3){return ;
+}var events=jQuery.data(this,"events");
+for(var type in events){for(var handler in events[type]){jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);
+}}});
+}return ret;
+},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);
+})||jQuery.multiFilter(selector,this));
+},not:function(selector){if(selector.constructor==String){if(isSimple.test(selector)){return this.pushStack(jQuery.multiFilter(selector,this,true));
+}else{selector=jQuery.multiFilter(selector,this);
+}}var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;
+return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;
+});
+},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=="string"?jQuery(selector):jQuery.makeArray(selector))));
+},is:function(selector){return !!selector&&jQuery.multiFilter(selector,this).length>0;
+},hasClass:function(selector){return this.is("."+selector);
+},val:function(value){if(value==undefined){if(this.length){var elem=this[0];
+if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";
+if(index<0){return null;
+}for(var i=one?index:0,max=one?index+1:options.length;
+i<max;
+i++){var option=options[i];
+if(option.selected){value=jQuery.browser.msie&&!option.attributes.value.specified?option.text:option.value;
+if(one){return value;
+}values.push(value);
+}}return values;
+}else{return(this[0].value||"").replace(/\r/g,"");
+}}return undefined;
+}if(value.constructor==Number){value+="";
+}return this.each(function(){if(this.nodeType!=1){return ;
+}if(value.constructor==Array&&/radio|checkbox/.test(this.type)){this.checked=(jQuery.inArray(this.value,value)>=0||jQuery.inArray(this.name,value)>=0);
+}else{if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);
+jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);
+});
+if(!values.length){this.selectedIndex=-1;
+}}else{this.value=value;
+}}});
+},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);
+},replaceWith:function(value){return this.after(value).remove();
+},eq:function(i){return this.slice(i,i+1);
+},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));
+},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);
+}));
+},andSelf:function(){return this.add(this.prevObject);
+},data:function(key,value){var parts=key.split(".");
+parts[1]=parts[1]?"."+parts[1]:"";
+if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);
+if(data===undefined&&this.length){data=jQuery.data(this[0],key);
+}return data===undefined&&parts[1]?this.data(parts[0]):data;
+}else{return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);
+});
+}},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);
+});
+},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;
+return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);
+if(reverse){elems.reverse();
+}}var obj=this;
+if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr")){obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));
+}var scripts=jQuery([]);
+jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;
+if(jQuery.nodeName(elem,"script")){scripts=scripts.add(elem);
+}else{if(elem.nodeType==1){scripts=scripts.add(jQuery("script",elem).remove());
+}callback.call(obj,elem);
+}});
+scripts.each(evalScript);
+});
+}};
+jQuery.fn.init.prototype=jQuery.fn;
+function evalScript(i,elem){if(elem.src){jQuery.ajax({url:elem.src,async:false,dataType:"script"});
+}else{jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");
+}if(elem.parentNode){elem.parentNode.removeChild(elem);
+}}function now(){return +new Date;
+}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;
+if(target.constructor==Boolean){deep=target;
+target=arguments[1]||{};
+i=2;
+}if(typeof target!="object"&&typeof target!="function"){target={};
+}if(length==i){target=this;
+--i;
+}for(;
+i<length;
+i++){if((options=arguments[i])!=null){for(var name in options){var src=target[name],copy=options[name];
+if(target===copy){continue;
+}if(deep&&copy&&typeof copy=="object"&&!copy.nodeType){target[name]=jQuery.extend(deep,src||(copy.length!=null?[]:{}),copy);
+}else{if(copy!==undefined){target[name]=copy;
+}}}}}return target;
+};
+var expando="jQuery"+now(),uuid=0,windowData={},exclude=/z-?index|font-?weight|opacity|zoom|line-?height/i,defaultView=document.defaultView||{};
+jQuery.extend({noConflict:function(deep){window.$=_$;
+if(deep){window.jQuery=_jQuery;
+}return jQuery;
+},isFunction:function(fn){return !!fn&&typeof fn!="string"&&!fn.nodeName&&fn.constructor!=Array&&/^[\s[]?function/.test(fn+"");
+},isXMLDoc:function(elem){return elem.documentElement&&!elem.body||elem.tagName&&elem.ownerDocument&&!elem.ownerDocument.body;
+},globalEval:function(data){data=jQuery.trim(data);
+if(data){var head=document.getElementsByTagName("head")[0]||document.documentElement,script=document.createElement("script");
+script.type="text/javascript";
+if(jQuery.browser.msie){script.text=data;
+}else{script.appendChild(document.createTextNode(data));
+}head.insertBefore(script,head.firstChild);
+head.removeChild(script);
+}},nodeName:function(elem,name){return elem.nodeName&&elem.nodeName.toUpperCase()==name.toUpperCase();
+},cache:{},data:function(elem,name,data){elem=elem==window?windowData:elem;
+var id=elem[expando];
+if(!id){id=elem[expando]=++uuid;
+}if(name&&!jQuery.cache[id]){jQuery.cache[id]={};
+}if(data!==undefined){jQuery.cache[id][name]=data;
+}return name?jQuery.cache[id][name]:id;
+},removeData:function(elem,name){elem=elem==window?windowData:elem;
+var id=elem[expando];
+if(name){if(jQuery.cache[id]){delete jQuery.cache[id][name];
+name="";
+for(name in jQuery.cache[id]){break;
+}if(!name){jQuery.removeData(elem);
+}}}else{try{delete elem[expando];
+}catch(e){if(elem.removeAttribute){elem.removeAttribute(expando);
+}}delete jQuery.cache[id];
+}},each:function(object,callback,args){var name,i=0,length=object.length;
+if(args){if(length==undefined){for(name in object){if(callback.apply(object[name],args)===false){break;
+}}}else{for(;
+i<length;
+){if(callback.apply(object[i++],args)===false){break;
+}}}}else{if(length==undefined){for(name in object){if(callback.call(object[name],name,object[name])===false){break;
+}}}else{for(var value=object[0];
+i<length&&callback.call(value,i,value)!==false;
+value=object[++i]){}}}return object;
+},prop:function(elem,value,type,i,name){if(jQuery.isFunction(value)){value=value.call(elem,i);
+}return value&&value.constructor==Number&&type=="curCSS"&&!exclude.test(name)?value+"px":value;
+},className:{add:function(elem,classNames){jQuery.each((classNames||"").split(/\s+/),function(i,className){if(elem.nodeType==1&&!jQuery.className.has(elem.className,className)){elem.className+=(elem.className?" ":"")+className;
+}});
+},remove:function(elem,classNames){if(elem.nodeType==1){elem.className=classNames!=undefined?jQuery.grep(elem.className.split(/\s+/),function(className){return !jQuery.className.has(classNames,className);
+}).join(" "):"";
+}},has:function(elem,className){return jQuery.inArray(className,(elem.className||elem).toString().split(/\s+/))>-1;
+}},swap:function(elem,options,callback){var old={};
+for(var name in options){old[name]=elem.style[name];
+elem.style[name]=options[name];
+}callback.call(elem);
+for(var name in options){elem.style[name]=old[name];
+}},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];
+function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;
+var padding=0,border=0;
+jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;
+border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;
+});
+val-=Math.round(padding+border);
+}if(jQuery(elem).is(":visible")){getWH();
+}else{jQuery.swap(elem,props,getWH);
+}return Math.max(0,val);
+}return jQuery.curCSS(elem,name,force);
+},curCSS:function(elem,name,force){var ret,style=elem.style;
+function color(elem){if(!jQuery.browser.safari){return false;
+}var ret=defaultView.getComputedStyle(elem,null);
+return !ret||ret.getPropertyValue("color")=="";
+}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");
+return ret==""?"1":ret;
+}if(jQuery.browser.opera&&name=="display"){var save=style.outline;
+style.outline="0 solid black";
+style.outline=save;
+}if(name.match(/float/i)){name=styleFloat;
+}if(!force&&style&&style[name]){ret=style[name];
+}else{if(defaultView.getComputedStyle){if(name.match(/float/i)){name="float";
+}name=name.replace(/([A-Z])/g,"-$1").toLowerCase();
+var computedStyle=defaultView.getComputedStyle(elem,null);
+if(computedStyle&&!color(elem)){ret=computedStyle.getPropertyValue(name);
+}else{var swap=[],stack=[],a=elem,i=0;
+for(;
+a&&color(a);
+a=a.parentNode){stack.unshift(a);
+}for(;
+i<stack.length;
+i++){if(color(stack[i])){swap[i]=stack[i].style.display;
+stack[i].style.display="block";
+}}ret=name=="display"&&swap[stack.length-1]!=null?"none":(computedStyle&&computedStyle.getPropertyValue(name))||"";
+for(i=0;
+i<swap.length;
+i++){if(swap[i]!=null){stack[i].style.display=swap[i];
+}}}if(name=="opacity"&&ret==""){ret="1";
+}}else{if(elem.currentStyle){var camelCase=name.replace(/\-(\w)/g,function(all,letter){return letter.toUpperCase();
+});
+ret=elem.currentStyle[name]||elem.currentStyle[camelCase];
+if(!/^\d+(px)?$/i.test(ret)&&/^\d/.test(ret)){var left=style.left,rsLeft=elem.runtimeStyle.left;
+elem.runtimeStyle.left=elem.currentStyle.left;
+style.left=ret||0;
+ret=style.pixelLeft+"px";
+style.left=left;
+elem.runtimeStyle.left=rsLeft;
+}}}}return ret;
+},clean:function(elems,context){var ret=[];
+context=context||document;
+if(typeof context.createElement=="undefined"){context=context.ownerDocument||context[0]&&context[0].ownerDocument||document;
+}jQuery.each(elems,function(i,elem){if(!elem){return ;
+}if(elem.constructor==Number){elem+="";
+}if(typeof elem=="string"){elem=elem.replace(/(<(\w+)[^>]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+"></"+tag+">";
+});
+var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");
+var wrap=!tags.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!tags.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!tags.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!tags.indexOf("<td")||!tags.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!tags.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||jQuery.browser.msie&&[1,"div<div>","</div>"]||[0,"",""];
+div.innerHTML=wrap[1]+elem+wrap[2];
+while(wrap[0]--){div=div.lastChild;
+}if(jQuery.browser.msie){var tbody=!tags.indexOf("<table")&&tags.indexOf("<tbody")<0?div.firstChild&&div.firstChild.childNodes:wrap[1]=="<table>"&&tags.indexOf("<tbody")<0?div.childNodes:[];
+for(var j=tbody.length-1;
+j>=0;
+--j){if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length){tbody[j].parentNode.removeChild(tbody[j]);
+}}if(/^\s/.test(elem)){div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);
+}}elem=jQuery.makeArray(div.childNodes);
+}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select"))){return ;
+}if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options){ret.push(elem);
+}else{ret=jQuery.merge(ret,elem);
+}});
+return ret;
+},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8){return undefined;
+}var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;
+name=notxml&&jQuery.props[name]||name;
+if(elem.tagName){var special=/href|src|style/.test(name);
+if(name=="selected"&&jQuery.browser.safari){elem.parentNode.selectedIndex;
+}if(name in elem&&notxml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode){throw"type property can't be changed";
+}elem[name]=value;
+}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name)){return elem.getAttributeNode(name).nodeValue;
+}return elem[name];
+}if(msie&&notxml&&name=="style"){return jQuery.attr(elem.style,"cssText",value);
+}if(set){elem.setAttribute(name,""+value);
+}var attr=msie&&notxml&&special?elem.getAttribute(name,2):elem.getAttribute(name);
+return attr===null?undefined:attr;
+}if(msie&&name=="opacity"){if(set){elem.zoom=1;
+elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+""=="NaN"?"":"alpha(opacity="+value*100+")");
+}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+"":"";
+}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();
+});
+if(set){elem[name]=value;
+}return elem[name];
+},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");
+},makeArray:function(array){var ret=[];
+if(array!=null){var i=array.length;
+if(i==null||array.split||array.setInterval||array.call){ret[0]=array;
+}else{while(i){ret[--i]=array[i];
+}}}return ret;
+},inArray:function(elem,array){for(var i=0,length=array.length;
+i<length;
+i++){if(array[i]===elem){return i;
+}}return -1;
+},merge:function(first,second){var i=0,elem,pos=first.length;
+if(jQuery.browser.msie){while(elem=second[i++]){if(elem.nodeType!=8){first[pos++]=elem;
+}}}else{while(elem=second[i++]){first[pos++]=elem;
+}}return first;
+},unique:function(array){var ret=[],done={};
+try{for(var i=0,length=array.length;
+i<length;
+i++){var id=jQuery.data(array[i]);
+if(!done[id]){done[id]=true;
+ret.push(array[i]);
+}}}catch(e){ret=array;
+}return ret;
+},grep:function(elems,callback,inv){var ret=[];
+for(var i=0,length=elems.length;
+i<length;
+i++){if(!inv!=!callback(elems[i],i)){ret.push(elems[i]);
+}}return ret;
+},map:function(elems,callback){var ret=[];
+for(var i=0,length=elems.length;
+i<length;
+i++){var value=callback(elems[i],i);
+if(value!=null){ret[ret.length]=value;
+}}return ret.concat.apply([],ret);
+}});
+var userAgent=navigator.userAgent.toLowerCase();
+jQuery.browser={version:(userAgent.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[])[1],safari:/webkit/.test(userAgent),opera:/opera/.test(userAgent),msie:/msie/.test(userAgent)&&!/opera/.test(userAgent),mozilla:/mozilla/.test(userAgent)&&!/(compatible|webkit)/.test(userAgent)};
+var styleFloat=jQuery.browser.msie?"styleFloat":"cssFloat";
+jQuery.extend({boxModel:!jQuery.browser.msie||document.compatMode=="CSS1Compat",props:{"for":"htmlFor","class":"className","float":styleFloat,cssFloat:styleFloat,styleFloat:styleFloat,readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing"}});
+jQuery.each({parent:function(elem){return elem.parentNode;
+},parents:function(elem){return jQuery.dir(elem,"parentNode");
+},next:function(elem){return jQuery.nth(elem,2,"nextSibling");
+},prev:function(elem){return jQuery.nth(elem,2,"previousSibling");
+},nextAll:function(elem){return jQuery.dir(elem,"nextSibling");
+},prevAll:function(elem){return jQuery.dir(elem,"previousSibling");
+},siblings:function(elem){return jQuery.sibling(elem.parentNode.firstChild,elem);
+},children:function(elem){return jQuery.sibling(elem.firstChild);
+},contents:function(elem){return jQuery.nodeName(elem,"iframe")?elem.contentDocument||elem.contentWindow.document:jQuery.makeArray(elem.childNodes);
+}},function(name,fn){jQuery.fn[name]=function(selector){var ret=jQuery.map(this,fn);
+if(selector&&typeof selector=="string"){ret=jQuery.multiFilter(selector,ret);
+}return this.pushStack(jQuery.unique(ret));
+};
+});
+jQuery.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(name,original){jQuery.fn[name]=function(){var args=arguments;
+return this.each(function(){for(var i=0,length=args.length;
+i<length;
+i++){jQuery(args[i])[original](this);
+}});
+};
+});
+jQuery.each({removeAttr:function(name){jQuery.attr(this,name,"");
+if(this.nodeType==1){this.removeAttribute(name);
+}},addClass:function(classNames){jQuery.className.add(this,classNames);
+},removeClass:function(classNames){jQuery.className.remove(this,classNames);
+},toggleClass:function(classNames){jQuery.className[jQuery.className.has(this,classNames)?"remove":"add"](this,classNames);
+},remove:function(selector){if(!selector||jQuery.filter(selector,[this]).r.length){jQuery("*",this).add(this).each(function(){jQuery.event.remove(this);
+jQuery.removeData(this);
+});
+if(this.parentNode){this.parentNode.removeChild(this);
+}}},empty:function(){jQuery(">*",this).remove();
+while(this.firstChild){this.removeChild(this.firstChild);
+}}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);
+};
+});
+jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();
+jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");
+};
+});
+function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;
+}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");
+jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);
+},"#":function(a,i,m){return a.getAttribute("id")==m[2];
+},":":{lt:function(a,i,m){return i<m[3]-0;
+},gt:function(a,i,m){return i>m[3]-0;
+},nth:function(a,i,m){return m[3]-0==i;
+},eq:function(a,i,m){return m[3]-0==i;
+},first:function(a,i){return i==0;
+},last:function(a,i,m,r){return i==r.length-1;
+},even:function(a,i){return i%2==0;
+},odd:function(a,i){return i%2;
+},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;
+},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;
+},"only-child":function(a){return !jQuery.nth(a.parentNode.lastChild,2,"previousSibling");
+},parent:function(a){return a.firstChild;
+},empty:function(a){return !a.firstChild;
+},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;
+},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";
+},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";
+},enabled:function(a){return !a.disabled;
+},disabled:function(a){return a.disabled;
+},checked:function(a){return a.checked;
+},selected:function(a){return a.selected||jQuery.attr(a,"selected");
+},text:function(a){return"text"==a.type;
+},radio:function(a){return"radio"==a.type;
+},checkbox:function(a){return"checkbox"==a.type;
+},file:function(a){return"file"==a.type;
+},password:function(a){return"password"==a.type;
+},submit:function(a){return"submit"==a.type;
+},image:function(a){return"image"==a.type;
+},reset:function(a){return"reset"==a.type;
+},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");
+},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);
+},has:function(a,i,m){return jQuery.find(m[3],a).length;
+},header:function(a){return/h\d/i.test(a.nodeName);
+},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;
+}).length;
+}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];
+while(expr&&expr!=old){old=expr;
+var f=jQuery.filter(expr,elems,not);
+expr=f.t.replace(/^\s*,\s*/,"");
+cur=not?elems=f.r:jQuery.merge(cur,f.r);
+}return cur;
+},find:function(t,context){if(typeof t!="string"){return[t];
+}if(context&&context.nodeType!=1&&context.nodeType!=9){return[];
+}context=context||document;
+var ret=[context],done=[],last,nodeName;
+while(t&&last!=t){var r=[];
+last=t;
+t=jQuery.trim(t);
+var foundToken=false,re=quickChild,m=re.exec(t);
+if(m){nodeName=m[1].toUpperCase();
+for(var i=0;
+ret[i];
+i++){for(var c=ret[i].firstChild;
+c;
+c=c.nextSibling){if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName)){r.push(c);
+}}}ret=r;
+t=t.replace(re,"");
+if(t.indexOf(" ")==0){continue;
+}foundToken=true;
+}else{re=/^([>+~])\s*(\w*)/i;
+if((m=re.exec(t))!=null){r=[];
+var merge={};
+nodeName=m[2].toUpperCase();
+m=m[1];
+for(var j=0,rl=ret.length;
+j<rl;
+j++){var n=m=="~"||m=="+"?ret[j].nextSibling:ret[j].firstChild;
+for(;
+n;
+n=n.nextSibling){if(n.nodeType==1){var id=jQuery.data(n);
+if(m=="~"&&merge[id]){break;
+}if(!nodeName||n.nodeName.toUpperCase()==nodeName){if(m=="~"){merge[id]=true;
+}r.push(n);
+}if(m=="+"){break;
+}}}}ret=r;
+t=jQuery.trim(t.replace(re,""));
+foundToken=true;
+}}if(t&&!foundToken){if(!t.indexOf(",")){if(context==ret[0]){ret.shift();
+}done=jQuery.merge(done,ret);
+r=ret=[context];
+t=" "+t.substr(1,t.length);
+}else{var re2=quickID;
+var m=re2.exec(t);
+if(m){m=[0,m[2],m[3],m[1]];
+}else{re2=quickClass;
+m=re2.exec(t);
+}m[2]=m[2].replace(/\\/g,"");
+var elem=ret[ret.length-1];
+if(m[1]=="#"&&elem&&elem.getElementById&&!jQuery.isXMLDoc(elem)){var oid=elem.getElementById(m[2]);
+if((jQuery.browser.msie||jQuery.browser.opera)&&oid&&typeof oid.id=="string"&&oid.id!=m[2]){oid=jQuery('[@id="'+m[2]+'"]',elem)[0];
+}ret=r=oid&&(!m[3]||jQuery.nodeName(oid,m[3]))?[oid]:[];
+}else{for(var i=0;
+ret[i];
+i++){var tag=m[1]=="#"&&m[3]?m[3]:m[1]!=""||m[0]==""?"*":m[2];
+if(tag=="*"&&ret[i].nodeName.toLowerCase()=="object"){tag="param";
+}r=jQuery.merge(r,ret[i].getElementsByTagName(tag));
+}if(m[1]=="."){r=jQuery.classFilter(r,m[2]);
+}if(m[1]=="#"){var tmp=[];
+for(var i=0;
+r[i];
+i++){if(r[i].getAttribute("id")==m[2]){tmp=[r[i]];
+break;
+}}r=tmp;
+}ret=r;
+}t=t.replace(re2,"");
+}}if(t){var val=jQuery.filter(t,r);
+ret=r=val.r;
+t=jQuery.trim(val.t);
+}}if(t){ret=[];
+}if(ret&&context==ret[0]){ret.shift();
+}done=jQuery.merge(done,ret);
+return done;
+},classFilter:function(r,m,not){m=" "+m+" ";
+var tmp=[];
+for(var i=0;
+r[i];
+i++){var pass=(" "+r[i].className+" ").indexOf(m)>=0;
+if(!not&&pass||not&&!pass){tmp.push(r[i]);
+}}return tmp;
+},filter:function(t,r,not){var last;
+while(t&&t!=last){last=t;
+var p=jQuery.parse,m;
+for(var i=0;
+p[i];
+i++){m=p[i].exec(t);
+if(m){t=t.substring(m[0].length);
+m[2]=m[2].replace(/\\/g,"");
+break;
+}}if(!m){break;
+}if(m[1]==":"&&m[2]=="not"){r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);
+}else{if(m[1]=="."){r=jQuery.classFilter(r,m[2],not);
+}else{if(m[1]=="["){var tmp=[],type=m[3];
+for(var i=0,rl=r.length;
+i<rl;
+i++){var a=r[i],z=a[jQuery.props[m[2]]||m[2]];
+if(z==null||/href|src|selected/.test(m[2])){z=jQuery.attr(a,m[2])||"";
+}if((type==""&&!!z||type=="="&&z==m[5]||type=="!="&&z!=m[5]||type=="^="&&z&&!z.indexOf(m[5])||type=="$="&&z.substr(z.length-m[5].length)==m[5]||(type=="*="||type=="~=")&&z.indexOf(m[5])>=0)^not){tmp.push(a);
+}}r=tmp;
+}else{if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;
+for(var i=0,rl=r.length;
+i<rl;
+i++){var node=r[i],parentNode=node.parentNode,id=jQuery.data(parentNode);
+if(!merge[id]){var c=1;
+for(var n=parentNode.firstChild;
+n;
+n=n.nextSibling){if(n.nodeType==1){n.nodeIndex=c++;
+}}merge[id]=true;
+}var add=false;
+if(first==0){if(node.nodeIndex==last){add=true;
+}}else{if((node.nodeIndex-last)%first==0&&(node.nodeIndex-last)/first>=0){add=true;
+}}if(add^not){tmp.push(node);
+}}r=tmp;
+}else{var fn=jQuery.expr[m[1]];
+if(typeof fn=="object"){fn=fn[m[2]];
+}if(typeof fn=="string"){fn=eval("false||function(a,i){return "+fn+";}");
+}r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);
+},not);
+}}}}}return{r:r,t:t};
+},dir:function(elem,dir){var matched=[],cur=elem[dir];
+while(cur&&cur!=document){if(cur.nodeType==1){matched.push(cur);
+}cur=cur[dir];
+}return matched;
+},nth:function(cur,result,dir,elem){result=result||1;
+var num=0;
+for(;
+cur;
+cur=cur[dir]){if(cur.nodeType==1&&++num==result){break;
+}}return cur;
+},sibling:function(n,elem){var r=[];
+for(;
+n;
+n=n.nextSibling){if(n.nodeType==1&&n!=elem){r.push(n);
+}}return r;
+}});
+jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8){return ;
+}if(jQuery.browser.msie&&elem.setInterval){elem=window;
+}if(!handler.guid){handler.guid=this.guid++;
+}if(data!=undefined){var fn=handler;
+handler=this.proxy(fn,function(){return fn.apply(this,arguments);
+});
+handler.data=data;
+}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered){return jQuery.event.handle.apply(arguments.callee.elem,arguments);
+}});
+handle.elem=elem;
+jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");
+type=parts[0];
+handler.type=parts[1];
+var handlers=events[type];
+if(!handlers){handlers=events[type]={};
+if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener){elem.addEventListener(type,handle,false);
+}else{if(elem.attachEvent){elem.attachEvent("on"+type,handle);
+}}}}handlers[handler.guid]=handler;
+jQuery.event.global[type]=true;
+});
+elem=null;
+},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8){return ;
+}var events=jQuery.data(elem,"events"),ret,index;
+if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)==".")){for(var type in events){this.remove(elem,type+(types||""));
+}}else{if(types.type){handler=types.handler;
+types=types.type;
+}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");
+type=parts[0];
+if(events[type]){if(handler){delete events[type][handler.guid];
+}else{for(handler in events[type]){if(!parts[1]||events[type][handler].type==parts[1]){delete events[type][handler];
+}}}for(ret in events[type]){break;
+}if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener){elem.removeEventListener(type,jQuery.data(elem,"handle"),false);
+}else{if(elem.detachEvent){elem.detachEvent("on"+type,jQuery.data(elem,"handle"));
+}}}ret=null;
+delete events[type];
+}}});
+}for(ret in events){break;
+}if(!ret){var handle=jQuery.data(elem,"handle");
+if(handle){handle.elem=null;
+}jQuery.removeData(elem,"events");
+jQuery.removeData(elem,"handle");
+}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);
+if(type.indexOf("!")>=0){type=type.slice(0,-1);
+var exclusive=true;
+}if(!elem){if(this.global[type]){jQuery("*").add([window,document]).trigger(type,data);
+}}else{if(elem.nodeType==3||elem.nodeType==8){return undefined;
+}var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;
+if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});
+data[0][expando]=true;
+}data[0].type=type;
+if(exclusive){data[0].exclusive=true;
+}var handle=jQuery.data(elem,"handle");
+if(handle){val=handle.apply(elem,data);
+}if((!fn||(jQuery.nodeName(elem,"a")&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false){val=false;
+}if(event){data.shift();
+}if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));
+if(ret!==undefined){val=ret;
+}}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,"a")&&type=="click")){this.triggered=true;
+try{elem[type]();
+}catch(e){}}this.triggered=false;
+}return val;
+},handle:function(event){var val,ret,namespace,all,handlers;
+event=arguments[0]=jQuery.event.fix(event||window.event);
+namespace=event.type.split(".");
+event.type=namespace[0];
+namespace=namespace[1];
+all=!namespace&&!event.exclusive;
+handlers=(jQuery.data(this,"events")||{})[event.type];
+for(var j in handlers){var handler=handlers[j];
+if(all||handler.type==namespace){event.handler=handler;
+event.data=handler.data;
+ret=handler.apply(this,arguments);
+if(val!==false){val=ret;
+}if(ret===false){event.preventDefault();
+event.stopPropagation();
+}}}return val;
+},fix:function(event){if(event[expando]==true){return event;
+}var originalEvent=event;
+event={originalEvent:originalEvent};
+var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");
+for(var i=props.length;
+i;
+i--){event[props[i]]=originalEvent[props[i]];
+}event[expando]=true;
+event.preventDefault=function(){if(originalEvent.preventDefault){originalEvent.preventDefault();
+}originalEvent.returnValue=false;
+};
+event.stopPropagation=function(){if(originalEvent.stopPropagation){originalEvent.stopPropagation();
+}originalEvent.cancelBubble=true;
+};
+event.timeStamp=event.timeStamp||now();
+if(!event.target){event.target=event.srcElement||document;
+}if(event.target.nodeType==3){event.target=event.target.parentNode;
+}if(!event.relatedTarget&&event.fromElement){event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;
+}if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;
+event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);
+event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);
+}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode)){event.which=event.charCode||event.keyCode;
+}if(!event.metaKey&&event.ctrlKey){event.metaKey=event.ctrlKey;
+}if(!event.which&&event.button){event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));
+}return event;
+},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;
+return proxy;
+},special:{ready:{setup:function(){bindReady();
+return ;
+},teardown:function(){return ;
+}},mouseenter:{setup:function(){if(jQuery.browser.msie){return false;
+}jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);
+return true;
+},teardown:function(){if(jQuery.browser.msie){return false;
+}jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);
+return true;
+},handler:function(event){if(withinElement(event,this)){return true;
+}event.type="mouseenter";
+return jQuery.event.handle.apply(this,arguments);
+}},mouseleave:{setup:function(){if(jQuery.browser.msie){return false;
+}jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);
+return true;
+},teardown:function(){if(jQuery.browser.msie){return false;
+}jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);
+return true;
+},handler:function(event){if(withinElement(event,this)){return true;
+}event.type="mouseleave";
+return jQuery.event.handle.apply(this,arguments);
+}}}};
+jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);
+});
+},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);
+return(fn||data).apply(this,arguments);
+});
+return this.each(function(){jQuery.event.add(this,type,one,fn&&data);
+});
+},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);
+});
+},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);
+});
+},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);
+},toggle:function(fn){var args=arguments,i=1;
+while(i<args.length){jQuery.event.proxy(fn,args[i++]);
+}return this.click(jQuery.event.proxy(fn,function(event){this.lastToggle=(this.lastToggle||0)%i;
+event.preventDefault();
+return args[this.lastToggle++].apply(this,arguments)||false;
+}));
+},hover:function(fnOver,fnOut){return this.bind("mouseenter",fnOver).bind("mouseleave",fnOut);
+},ready:function(fn){bindReady();
+if(jQuery.isReady){fn.call(document,jQuery);
+}else{jQuery.readyList.push(function(){return fn.call(this,jQuery);
+});
+}return this;
+}});
+jQuery.extend({isReady:false,readyList:[],ready:function(){if(!jQuery.isReady){jQuery.isReady=true;
+if(jQuery.readyList){jQuery.each(jQuery.readyList,function(){this.call(document);
+});
+jQuery.readyList=null;
+}jQuery(document).triggerHandler("ready");
+}}});
+var readyBound=false;
+function bindReady(){if(readyBound){return ;
+}readyBound=true;
+if(document.addEventListener&&!jQuery.browser.opera){document.addEventListener("DOMContentLoaded",jQuery.ready,false);
+}if(jQuery.browser.msie&&window==top){(function(){if(jQuery.isReady){return ;
+}try{document.documentElement.doScroll("left");
+}catch(error){setTimeout(arguments.callee,0);
+return ;
+}jQuery.ready();
+})();
+}if(jQuery.browser.opera){document.addEventListener("DOMContentLoaded",function(){if(jQuery.isReady){return ;
+}for(var i=0;
+i<document.styleSheets.length;
+i++){if(document.styleSheets[i].disabled){setTimeout(arguments.callee,0);
+return ;
+}}jQuery.ready();
+},false);
+}if(jQuery.browser.safari){var numStyles;
+(function(){if(jQuery.isReady){return ;
+}if(document.readyState!="loaded"&&document.readyState!="complete"){setTimeout(arguments.callee,0);
+return ;
+}if(numStyles===undefined){numStyles=jQuery("style, link[rel=stylesheet]").length;
+}if(document.styleSheets.length!=numStyles){setTimeout(arguments.callee,0);
+return ;
+}jQuery.ready();
+})();
+}jQuery.event.add(window,"load",jQuery.ready);
+}jQuery.each(("blur,focus,load,resize,scroll,unload,click,dblclick,mousedown,mouseup,mousemove,mouseover,mouseout,change,select,submit,keydown,keypress,keyup,error").split(","),function(i,name){jQuery.fn[name]=function(fn){return fn?this.bind(name,fn):this.trigger(name);
+};
+});
+var withinElement=function(event,elem){var parent=event.relatedTarget;
+while(parent&&parent!=elem){try{parent=parent.parentNode;
+}catch(error){parent=elem;
+}}return parent==elem;
+};
+jQuery(window).bind("unload",function(){jQuery("*").add(document).unbind();
+});
+jQuery.fn.extend({_load:jQuery.fn.load,load:function(url,params,callback){if(typeof url!="string"){return this._load(url);
+}var off=url.indexOf(" ");
+if(off>=0){var selector=url.slice(off,url.length);
+url=url.slice(0,off);
+}callback=callback||function(){};
+var type="GET";
+if(params){if(jQuery.isFunction(params)){callback=params;
+params=null;
+}else{params=jQuery.param(params);
+type="POST";
+}}var self=this;
+jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified"){self.html(selector?jQuery("<div/>").append(res.responseText.replace(/<script(.|\s)*?\/script>/g,"")).find(selector):res.responseText);
+}self.each(callback,[res.responseText,status,res]);
+}});
+return this;
+},serialize:function(){return jQuery.param(this.serializeArray());
+},serializeArray:function(){return this.map(function(){return jQuery.nodeName(this,"form")?jQuery.makeArray(this.elements):this;
+}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password/i.test(this.type));
+}).map(function(i,elem){var val=jQuery(this).val();
+return val==null?null:val.constructor==Array?jQuery.map(val,function(val,i){return{name:elem.name,value:val};
+}):{name:elem.name,value:val};
+}).get();
+}});
+jQuery.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(i,o){jQuery.fn[o]=function(f){return this.bind(o,f);
+};
+});
+var jsc=now();
+jQuery.extend({get:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;
+data=null;
+}return jQuery.ajax({type:"GET",url:url,data:data,success:callback,dataType:type});
+},getScript:function(url,callback){return jQuery.get(url,null,callback,"script");
+},getJSON:function(url,data,callback){return jQuery.get(url,data,callback,"json");
+},post:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;
+data={};
+}return jQuery.ajax({type:"POST",url:url,data:data,success:callback,dataType:type});
+},ajaxSetup:function(settings){jQuery.extend(jQuery.ajaxSettings,settings);
+},ajaxSettings:{url:location.href,global:true,type:"GET",timeout:0,contentType:"application/x-www-form-urlencoded",processData:true,async:true,data:null,username:null,password:null,accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(s){s=jQuery.extend(true,s,jQuery.extend(true,{},jQuery.ajaxSettings,s));
+var jsonp,jsre=/=\?(&|$)/g,status,data,type=s.type.toUpperCase();
+if(s.data&&s.processData&&typeof s.data!="string"){s.data=jQuery.param(s.data);
+}if(s.dataType=="jsonp"){if(type=="GET"){if(!s.url.match(jsre)){s.url+=(s.url.match(/\?/)?"&":"?")+(s.jsonp||"callback")+"=?";
+}}else{if(!s.data||!s.data.match(jsre)){s.data=(s.data?s.data+"&":"")+(s.jsonp||"callback")+"=?";
+}}s.dataType="json";
+}if(s.dataType=="json"&&(s.data&&s.data.match(jsre)||s.url.match(jsre))){jsonp="jsonp"+jsc++;
+if(s.data){s.data=(s.data+"").replace(jsre,"="+jsonp+"$1");
+}s.url=s.url.replace(jsre,"="+jsonp+"$1");
+s.dataType="script";
+window[jsonp]=function(tmp){data=tmp;
+success();
+complete();
+window[jsonp]=undefined;
+try{delete window[jsonp];
+}catch(e){}if(head){head.removeChild(script);
+}};
+}if(s.dataType=="script"&&s.cache==null){s.cache=false;
+}if(s.cache===false&&type=="GET"){var ts=now();
+var ret=s.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+ts+"$2");
+s.url=ret+((ret==s.url)?(s.url.match(/\?/)?"&":"?")+"_="+ts:"");
+}if(s.data&&type=="GET"){s.url+=(s.url.match(/\?/)?"&":"?")+s.data;
+s.data=null;
+}if(s.global&&!jQuery.active++){jQuery.event.trigger("ajaxStart");
+}var remote=/^(?:\w+:)?\/\/([^\/?#]+)/;
+if(s.dataType=="script"&&type=="GET"&&remote.test(s.url)&&remote.exec(s.url)[1]!=location.host){var head=document.getElementsByTagName("head")[0];
+var script=document.createElement("script");
+script.src=s.url;
+if(s.scriptCharset){script.charset=s.scriptCharset;
+}if(!jsonp){var done=false;
+script.onload=script.onreadystatechange=function(){if(!done&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){done=true;
+success();
+complete();
+head.removeChild(script);
+}};
+}head.appendChild(script);
+return undefined;
+}var requestDone=false;
+var xhr=window.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest();
+if(s.username){xhr.open(type,s.url,s.async,s.username,s.password);
+}else{xhr.open(type,s.url,s.async);
+}try{if(s.data){xhr.setRequestHeader("Content-Type",s.contentType);
+}if(s.ifModified){xhr.setRequestHeader("If-Modified-Since",jQuery.lastModified[s.url]||"Thu, 01 Jan 1970 00:00:00 GMT");
+}xhr.setRequestHeader("X-Requested-With","XMLHttpRequest");
+xhr.setRequestHeader("Accept",s.dataType&&s.accepts[s.dataType]?s.accepts[s.dataType]+", */*":s.accepts._default);
+}catch(e){}if(s.beforeSend&&s.beforeSend(xhr,s)===false){s.global&&jQuery.active--;
+xhr.abort();
+return false;
+}if(s.global){jQuery.event.trigger("ajaxSend",[xhr,s]);
+}var onreadystatechange=function(isTimeout){if(!requestDone&&xhr&&(xhr.readyState==4||isTimeout=="timeout")){requestDone=true;
+if(ival){clearInterval(ival);
+ival=null;
+}status=isTimeout=="timeout"&&"timeout"||!jQuery.httpSuccess(xhr)&&"error"||s.ifModified&&jQuery.httpNotModified(xhr,s.url)&&"notmodified"||"success";
+if(status=="success"){try{data=jQuery.httpData(xhr,s.dataType,s.dataFilter);
+}catch(e){status="parsererror";
+}}if(status=="success"){var modRes;
+try{modRes=xhr.getResponseHeader("Last-Modified");
+}catch(e){}if(s.ifModified&&modRes){jQuery.lastModified[s.url]=modRes;
+}if(!jsonp){success();
+}}else{jQuery.handleError(s,xhr,status);
+}complete();
+if(s.async){xhr=null;
+}}};
+if(s.async){var ival=setInterval(onreadystatechange,13);
+if(s.timeout>0){setTimeout(function(){if(xhr){xhr.abort();
+if(!requestDone){onreadystatechange("timeout");
+}}},s.timeout);
+}}try{xhr.send(s.data);
+}catch(e){jQuery.handleError(s,xhr,null,e);
+}if(!s.async){onreadystatechange();
+}function success(){if(s.success){s.success(data,status);
+}if(s.global){jQuery.event.trigger("ajaxSuccess",[xhr,s]);
+}}function complete(){if(s.complete){s.complete(xhr,status);
+}if(s.global){jQuery.event.trigger("ajaxComplete",[xhr,s]);
+}if(s.global&&!--jQuery.active){jQuery.event.trigger("ajaxStop");
+}}return xhr;
+},handleError:function(s,xhr,status,e){if(s.error){s.error(xhr,status,e);
+}if(s.global){jQuery.event.trigger("ajaxError",[xhr,s,e]);
+}},active:0,httpSuccess:function(xhr){try{return !xhr.status&&location.protocol=="file:"||(xhr.status>=200&&xhr.status<300)||xhr.status==304||xhr.status==1223||jQuery.browser.safari&&xhr.status==undefined;
+}catch(e){}return false;
+},httpNotModified:function(xhr,url){try{var xhrRes=xhr.getResponseHeader("Last-Modified");
+return xhr.status==304||xhrRes==jQuery.lastModified[url]||jQuery.browser.safari&&xhr.status==undefined;
+}catch(e){}return false;
+},httpData:function(xhr,type,filter){var ct=xhr.getResponseHeader("content-type"),xml=type=="xml"||!type&&ct&&ct.indexOf("xml")>=0,data=xml?xhr.responseXML:xhr.responseText;
+if(xml&&data.documentElement.tagName=="parsererror"){throw"parsererror";
+}if(filter){data=filter(data,type);
+}if(type=="script"){jQuery.globalEval(data);
+}if(type=="json"){data=eval("("+data+")");
+}return data;
+},param:function(a){var s=[];
+if(a.constructor==Array||a.jquery){jQuery.each(a,function(){s.push(encodeURIComponent(this.name)+"="+encodeURIComponent(this.value));
+});
+}else{for(var j in a){if(a[j]&&a[j].constructor==Array){jQuery.each(a[j],function(){s.push(encodeURIComponent(j)+"="+encodeURIComponent(this));
+});
+}else{s.push(encodeURIComponent(j)+"="+encodeURIComponent(jQuery.isFunction(a[j])?a[j]():a[j]));
+}}}return s.join("&").replace(/%20/g,"+");
+}});
+jQuery.fn.extend({show:function(speed,callback){return speed?this.animate({height:"show",width:"show",opacity:"show"},speed,callback):this.filter(":hidden").each(function(){this.style.display=this.oldblock||"";
+if(jQuery.css(this,"display")=="none"){var elem=jQuery("<"+this.tagName+" />").appendTo("body");
+this.style.display=elem.css("display");
+if(this.style.display=="none"){this.style.display="block";
+}elem.remove();
+}}).end();
+},hide:function(speed,callback){return speed?this.animate({height:"hide",width:"hide",opacity:"hide"},speed,callback):this.filter(":visible").each(function(){this.oldblock=this.oldblock||jQuery.css(this,"display");
+this.style.display="none";
+}).end();
+},_toggle:jQuery.fn.toggle,toggle:function(fn,fn2){return jQuery.isFunction(fn)&&jQuery.isFunction(fn2)?this._toggle.apply(this,arguments):fn?this.animate({height:"toggle",width:"toggle",opacity:"toggle"},fn,fn2):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();
+});
+},slideDown:function(speed,callback){return this.animate({height:"show"},speed,callback);
+},slideUp:function(speed,callback){return this.animate({height:"hide"},speed,callback);
+},slideToggle:function(speed,callback){return this.animate({height:"toggle"},speed,callback);
+},fadeIn:function(speed,callback){return this.animate({opacity:"show"},speed,callback);
+},fadeOut:function(speed,callback){return this.animate({opacity:"hide"},speed,callback);
+},fadeTo:function(speed,to,callback){return this.animate({opacity:to},speed,callback);
+},animate:function(prop,speed,easing,callback){var optall=jQuery.speed(speed,easing,callback);
+return this[optall.queue===false?"each":"queue"](function(){if(this.nodeType!=1){return false;
+}var opt=jQuery.extend({},optall),p,hidden=jQuery(this).is(":hidden"),self=this;
+for(p in prop){if(prop[p]=="hide"&&hidden||prop[p]=="show"&&!hidden){return opt.complete.call(this);
+}if(p=="height"||p=="width"){opt.display=jQuery.css(this,"display");
+opt.overflow=this.style.overflow;
+}}if(opt.overflow!=null){this.style.overflow="hidden";
+}opt.curAnim=jQuery.extend({},prop);
+jQuery.each(prop,function(name,val){var e=new jQuery.fx(self,opt,name);
+if(/toggle|show|hide/.test(val)){e[val=="toggle"?hidden?"show":"hide":val](prop);
+}else{var parts=val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),start=e.cur(true)||0;
+if(parts){var end=parseFloat(parts[2]),unit=parts[3]||"px";
+if(unit!="px"){self.style[name]=(end||1)+unit;
+start=((end||1)/e.cur(true))*start;
+self.style[name]=start+unit;
+}if(parts[1]){end=((parts[1]=="-="?-1:1)*end)+start;
+}e.custom(start,end,unit);
+}else{e.custom(start,val,"");
+}}});
+return true;
+});
+},queue:function(type,fn){if(jQuery.isFunction(type)||(type&&type.constructor==Array)){fn=type;
+type="fx";
+}if(!type||(typeof type=="string"&&!fn)){return queue(this[0],type);
+}return this.each(function(){if(fn.constructor==Array){queue(this,type,fn);
+}else{queue(this,type).push(fn);
+if(queue(this,type).length==1){fn.call(this);
+}}});
+},stop:function(clearQueue,gotoEnd){var timers=jQuery.timers;
+if(clearQueue){this.queue([]);
+}this.each(function(){for(var i=timers.length-1;
+i>=0;
+i--){if(timers[i].elem==this){if(gotoEnd){timers[i](true);
+}timers.splice(i,1);
+}}});
+if(!gotoEnd){this.dequeue();
+}return this;
+}});
+var queue=function(elem,type,array){if(elem){type=type||"fx";
+var q=jQuery.data(elem,type+"queue");
+if(!q||array){q=jQuery.data(elem,type+"queue",jQuery.makeArray(array));
+}}return q;
+};
+jQuery.fn.dequeue=function(type){type=type||"fx";
+return this.each(function(){var q=queue(this,type);
+q.shift();
+if(q.length){q[0].call(this);
+}});
+};
+jQuery.extend({speed:function(speed,easing,fn){var opt=speed&&speed.constructor==Object?speed:{complete:fn||!fn&&easing||jQuery.isFunction(speed)&&speed,duration:speed,easing:fn&&easing||easing&&easing.constructor!=Function&&easing};
+opt.duration=(opt.duration&&opt.duration.constructor==Number?opt.duration:jQuery.fx.speeds[opt.duration])||jQuery.fx.speeds.def;
+opt.old=opt.complete;
+opt.complete=function(){if(opt.queue!==false){jQuery(this).dequeue();
+}if(jQuery.isFunction(opt.old)){opt.old.call(this);
+}};
+return opt;
+},easing:{linear:function(p,n,firstNum,diff){return firstNum+diff*p;
+},swing:function(p,n,firstNum,diff){return((-Math.cos(p*Math.PI)/2)+0.5)*diff+firstNum;
+}},timers:[],timerId:null,fx:function(elem,options,prop){this.options=options;
+this.elem=elem;
+this.prop=prop;
+if(!options.orig){options.orig={};
+}}});
+jQuery.fx.prototype={update:function(){if(this.options.step){this.options.step.call(this.elem,this.now,this);
+}(jQuery.fx.step[this.prop]||jQuery.fx.step._default)(this);
+if(this.prop=="height"||this.prop=="width"){this.elem.style.display="block";
+}},cur:function(force){if(this.elem[this.prop]!=null&&this.elem.style[this.prop]==null){return this.elem[this.prop];
+}var r=parseFloat(jQuery.css(this.elem,this.prop,force));
+return r&&r>-10000?r:parseFloat(jQuery.curCSS(this.elem,this.prop))||0;
+},custom:function(from,to,unit){this.startTime=now();
+this.start=from;
+this.end=to;
+this.unit=unit||this.unit||"px";
+this.now=this.start;
+this.pos=this.state=0;
+this.update();
+var self=this;
+function t(gotoEnd){return self.step(gotoEnd);
+}t.elem=this.elem;
+jQuery.timers.push(t);
+if(jQuery.timerId==null){jQuery.timerId=setInterval(function(){var timers=jQuery.timers;
+for(var i=0;
+i<timers.length;
+i++){if(!timers[i]()){timers.splice(i--,1);
+}}if(!timers.length){clearInterval(jQuery.timerId);
+jQuery.timerId=null;
+}},13);
+}},show:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);
+this.options.show=true;
+this.custom(0,this.cur());
+if(this.prop=="width"||this.prop=="height"){this.elem.style[this.prop]="1px";
+}jQuery(this.elem).show();
+},hide:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);
+this.options.hide=true;
+this.custom(this.cur(),0);
+},step:function(gotoEnd){var t=now();
+if(gotoEnd||t>this.options.duration+this.startTime){this.now=this.end;
+this.pos=this.state=1;
+this.update();
+this.options.curAnim[this.prop]=true;
+var done=true;
+for(var i in this.options.curAnim){if(this.options.curAnim[i]!==true){done=false;
+}}if(done){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;
+this.elem.style.display=this.options.display;
+if(jQuery.css(this.elem,"display")=="none"){this.elem.style.display="block";
+}}if(this.options.hide){this.elem.style.display="none";
+}if(this.options.hide||this.options.show){for(var p in this.options.curAnim){jQuery.attr(this.elem.style,p,this.options.orig[p]);
+}}}if(done){this.options.complete.call(this.elem);
+}return false;
+}else{var n=t-this.startTime;
+this.state=n/this.options.duration;
+this.pos=jQuery.easing[this.options.easing||(jQuery.easing.swing?"swing":"linear")](this.state,n,0,1,this.options.duration);
+this.now=this.start+((this.end-this.start)*this.pos);
+this.update();
+}return true;
+}};
+jQuery.extend(jQuery.fx,{speeds:{slow:600,fast:200,def:400},step:{scrollLeft:function(fx){fx.elem.scrollLeft=fx.now;
+},scrollTop:function(fx){fx.elem.scrollTop=fx.now;
+},opacity:function(fx){jQuery.attr(fx.elem.style,"opacity",fx.now);
+},_default:function(fx){fx.elem.style[fx.prop]=fx.now+fx.unit;
+}}});
+jQuery.fn.offset=function(){var left=0,top=0,elem=this[0],results;
+if(elem){with(jQuery.browser){var parent=elem.parentNode,offsetChild=elem,offsetParent=elem.offsetParent,doc=elem.ownerDocument,safari2=safari&&parseInt(version)<522&&!/adobeair/i.test(userAgent),css=jQuery.curCSS,fixed=css(elem,"position")=="fixed";
+if(elem.getBoundingClientRect){var box=elem.getBoundingClientRect();
+add(box.left+Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),box.top+Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));
+add(-doc.documentElement.clientLeft,-doc.documentElement.clientTop);
+}else{add(elem.offsetLeft,elem.offsetTop);
+while(offsetParent){add(offsetParent.offsetLeft,offsetParent.offsetTop);
+if(mozilla&&!/^t(able|d|h)$/i.test(offsetParent.tagName)||safari&&!safari2){border(offsetParent);
+}if(!fixed&&css(offsetParent,"position")=="fixed"){fixed=true;
+}offsetChild=/^body$/i.test(offsetParent.tagName)?offsetChild:offsetParent;
+offsetParent=offsetParent.offsetParent;
+}while(parent&&parent.tagName&&!/^body|html$/i.test(parent.tagName)){if(!/^inline|table.*$/i.test(css(parent,"display"))){add(-parent.scrollLeft,-parent.scrollTop);
+}if(mozilla&&css(parent,"overflow")!="visible"){border(parent);
+}parent=parent.parentNode;
+}if((safari2&&(fixed||css(offsetChild,"position")=="absolute"))||(mozilla&&css(offsetChild,"position")!="absolute")){add(-doc.body.offsetLeft,-doc.body.offsetTop);
+}if(fixed){add(Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));
+}}results={top:top,left:left};
+}}function border(elem){add(jQuery.curCSS(elem,"borderLeftWidth",true),jQuery.curCSS(elem,"borderTopWidth",true));
+}function add(l,t){left+=parseInt(l,10)||0;
+top+=parseInt(t,10)||0;
+}return results;
+};
+jQuery.fn.extend({position:function(){var left=0,top=0,results;
+if(this[0]){var offsetParent=this.offsetParent(),offset=this.offset(),parentOffset=/^body|html$/i.test(offsetParent[0].tagName)?{top:0,left:0}:offsetParent.offset();
+offset.top-=num(this,"marginTop");
+offset.left-=num(this,"marginLeft");
+parentOffset.top+=num(offsetParent,"borderTopWidth");
+parentOffset.left+=num(offsetParent,"borderLeftWidth");
+results={top:offset.top-parentOffset.top,left:offset.left-parentOffset.left};
+}return results;
+},offsetParent:function(){var offsetParent=this[0].offsetParent;
+while(offsetParent&&(!/^body|html$/i.test(offsetParent.tagName)&&jQuery.css(offsetParent,"position")=="static")){offsetParent=offsetParent.offsetParent;
+}return jQuery(offsetParent);
+}});
+jQuery.each(["Left","Top"],function(i,name){var method="scroll"+name;
+jQuery.fn[method]=function(val){if(!this[0]){return ;
+}return val!=undefined?this.each(function(){this==window||this==document?window.scrollTo(!i?val:jQuery(window).scrollLeft(),i?val:jQuery(window).scrollTop()):this[method]=val;
+}):this[0]==window||this[0]==document?self[i?"pageYOffset":"pageXOffset"]||jQuery.boxModel&&document.documentElement[method]||document.body[method]:this[0][method];
+};
+});
+jQuery.each(["Height","Width"],function(i,name){var tl=i?"Left":"Top",br=i?"Right":"Bottom";
+jQuery.fn["inner"+name]=function(){return this[name.toLowerCase()]()+num(this,"padding"+tl)+num(this,"padding"+br);
+};
+jQuery.fn["outer"+name]=function(margin){return this["inner"+name]()+num(this,"border"+tl+"Width")+num(this,"border"+br+"Width")+(margin?num(this,"margin"+tl)+num(this,"margin"+br):0);
+};
+});
+})();
+
+
+/* platform.js */
+SimileAjax.jQuery=jQuery.noConflict(true);
+if(typeof window["$"]=="undefined"){window.$=SimileAjax.jQuery;
+}SimileAjax.Platform.os={isMac:false,isWin:false,isWin32:false,isUnix:false};
+SimileAjax.Platform.browser={isIE:false,isNetscape:false,isMozilla:false,isFirefox:false,isOpera:false,isSafari:false,majorVersion:0,minorVersion:0};
+(function(){var C=navigator.appName.toLowerCase();
+var A=navigator.userAgent.toLowerCase();
+SimileAjax.Platform.os.isMac=(A.indexOf("mac")!=-1);
+SimileAjax.Platform.os.isWin=(A.indexOf("win")!=-1);
+SimileAjax.Platform.os.isWin32=SimileAjax.Platform.isWin&&(A.indexOf("95")!=-1||A.indexOf("98")!=-1||A.indexOf("nt")!=-1||A.indexOf("win32")!=-1||A.indexOf("32bit")!=-1);
+SimileAjax.Platform.os.isUnix=(A.indexOf("x11")!=-1);
+SimileAjax.Platform.browser.isIE=(C.indexOf("microsoft")!=-1);
+SimileAjax.Platform.browser.isNetscape=(C.indexOf("netscape")!=-1);
+SimileAjax.Platform.browser.isMozilla=(A.indexOf("mozilla")!=-1);
+SimileAjax.Platform.browser.isFirefox=(A.indexOf("firefox")!=-1);
+SimileAjax.Platform.browser.isOpera=(C.indexOf("opera")!=-1);
+SimileAjax.Platform.browser.isSafari=(C.indexOf("safari")!=-1);
+var E=function(G){var F=G.split(".");
+SimileAjax.Platform.browser.majorVersion=parseInt(F[0]);
+SimileAjax.Platform.browser.minorVersion=parseInt(F[1]);
+};
+var B=function(H,G,I){var F=H.indexOf(G,I);
+return F>=0?F:H.length;
+};
+if(SimileAjax.Platform.browser.isMozilla){var D=A.indexOf("mozilla/");
+if(D>=0){E(A.substring(D+8,B(A," ",D)));
+}}if(SimileAjax.Platform.browser.isIE){var D=A.indexOf("msie ");
+if(D>=0){E(A.substring(D+5,B(A,";",D)));
+}}if(SimileAjax.Platform.browser.isNetscape){var D=A.indexOf("rv:");
+if(D>=0){E(A.substring(D+3,B(A,")",D)));
+}}if(SimileAjax.Platform.browser.isFirefox){var D=A.indexOf("firefox/");
+if(D>=0){E(A.substring(D+8,B(A," ",D)));
+}}if(!("localeCompare" in String.prototype)){String.prototype.localeCompare=function(F){if(this<F){return -1;
+}else{if(this>F){return 1;
+}else{return 0;
+}}};
+}})();
+SimileAjax.Platform.getDefaultLocale=function(){return SimileAjax.Platform.clientLocale;
+};
+
+
+/* ajax.js */
+SimileAjax.ListenerQueue=function(A){this._listeners=[];
+this._wildcardHandlerName=A;
+};
+SimileAjax.ListenerQueue.prototype.add=function(A){this._listeners.push(A);
+};
+SimileAjax.ListenerQueue.prototype.remove=function(C){var B=this._listeners;
+for(var A=0;
+A<B.length;
+A++){if(B[A]==C){B.splice(A,1);
+break;
+}}};
+SimileAjax.ListenerQueue.prototype.fire=function(B,A){var D=[].concat(this._listeners);
+for(var C=0;
+C<D.length;
+C++){var E=D[C];
+if(B in E){try{E[B].apply(E,A);
+}catch(F){SimileAjax.Debug.exception("Error firing event of name "+B,F);
+}}else{if(this._wildcardHandlerName!=null&&this._wildcardHandlerName in E){try{E[this._wildcardHandlerName].apply(E,[B]);
+}catch(F){SimileAjax.Debug.exception("Error firing event of name "+B+" to wildcard handler",F);
+}}}}};
+
+
+/* data-structure.js */
+SimileAjax.Set=function(A){this._hash={};
+this._count=0;
+if(A instanceof Array){for(var B=0;
+B<A.length;
+B++){this.add(A[B]);
+}}else{if(A instanceof SimileAjax.Set){this.addSet(A);
+}}};
+SimileAjax.Set.prototype.add=function(A){if(!(A in this._hash)){this._hash[A]=true;
+this._count++;
+return true;
+}return false;
+};
+SimileAjax.Set.prototype.addSet=function(B){for(var A in B._hash){this.add(A);
+}};
+SimileAjax.Set.prototype.remove=function(A){if(A in this._hash){delete this._hash[A];
+this._count--;
+return true;
+}return false;
+};
+SimileAjax.Set.prototype.removeSet=function(B){for(var A in B._hash){this.remove(A);
+}};
+SimileAjax.Set.prototype.retainSet=function(B){for(var A in this._hash){if(!B.contains(A)){delete this._hash[A];
+this._count--;
+}}};
+SimileAjax.Set.prototype.contains=function(A){return(A in this._hash);
+};
+SimileAjax.Set.prototype.size=function(){return this._count;
+};
+SimileAjax.Set.prototype.toArray=function(){var A=[];
+for(var B in this._hash){A.push(B);
+}return A;
+};
+SimileAjax.Set.prototype.visit=function(A){for(var B in this._hash){if(A(B)==true){break;
+}}};
+SimileAjax.SortedArray=function(B,A){this._a=(A instanceof Array)?A:[];
+this._compare=B;
+};
+SimileAjax.SortedArray.prototype.add=function(C){var A=this;
+var B=this.find(function(D){return A._compare(D,C);
+});
+if(B<this._a.length){this._a.splice(B,0,C);
+}else{this._a.push(C);
+}};
+SimileAjax.SortedArray.prototype.remove=function(C){var A=this;
+var B=this.find(function(D){return A._compare(D,C);
+});
+while(B<this._a.length&&this._compare(this._a[B],C)==0){if(this._a[B]==C){this._a.splice(B,1);
+return true;
+}else{B++;
+}}return false;
+};
+SimileAjax.SortedArray.prototype.removeAll=function(){this._a=[];
+};
+SimileAjax.SortedArray.prototype.elementAt=function(A){return this._a[A];
+};
+SimileAjax.SortedArray.prototype.length=function(){return this._a.length;
+};
+SimileAjax.SortedArray.prototype.find=function(D){var B=0;
+var A=this._a.length;
+while(B<A){var C=Math.floor((B+A)/2);
+var E=D(this._a[C]);
+if(C==B){return E<0?B+1:B;
+}else{if(E<0){B=C;
+}else{A=C;
+}}}return B;
+};
+SimileAjax.SortedArray.prototype.getFirst=function(){return(this._a.length>0)?this._a[0]:null;
+};
+SimileAjax.SortedArray.prototype.getLast=function(){return(this._a.length>0)?this._a[this._a.length-1]:null;
+};
+SimileAjax.EventIndex=function(B){var A=this;
+this._unit=(B!=null)?B:SimileAjax.NativeDateUnit;
+this._events=new SimileAjax.SortedArray(function(D,C){return A._unit.compare(D.getStart(),C.getStart());
+});
+this._idToEvent={};
+this._indexed=true;
+};
+SimileAjax.EventIndex.prototype.getUnit=function(){return this._unit;
+};
+SimileAjax.EventIndex.prototype.getEvent=function(A){return this._idToEvent[A];
+};
+SimileAjax.EventIndex.prototype.add=function(A){this._events.add(A);
+this._idToEvent[A.getID()]=A;
+this._indexed=false;
+};
+SimileAjax.EventIndex.prototype.removeAll=function(){this._events.removeAll();
+this._idToEvent={};
+this._indexed=false;
+};
+SimileAjax.EventIndex.prototype.getCount=function(){return this._events.length();
+};
+SimileAjax.EventIndex.prototype.getIterator=function(A,B){if(!this._indexed){this._index();
+}return new SimileAjax.EventIndex._Iterator(this._events,A,B,this._unit);
+};
+SimileAjax.EventIndex.prototype.getReverseIterator=function(A,B){if(!this._indexed){this._index();
+}return new SimileAjax.EventIndex._ReverseIterator(this._events,A,B,this._unit);
+};
+SimileAjax.EventIndex.prototype.getAllIterator=function(){return new SimileAjax.EventIndex._AllIterator(this._events);
+};
+SimileAjax.EventIndex.prototype.getEarliestDate=function(){var A=this._events.getFirst();
+return(A==null)?null:A.getStart();
+};
+SimileAjax.EventIndex.prototype.getLatestDate=function(){var A=this._events.getLast();
+if(A==null){return null;
+}if(!this._indexed){this._index();
+}var C=A._earliestOverlapIndex;
+var B=this._events.elementAt(C).getEnd();
+for(var D=C+1;
+D<this._events.length();
+D++){B=this._unit.later(B,this._events.elementAt(D).getEnd());
+}return B;
+};
+SimileAjax.EventIndex.prototype._index=function(){var D=this._events.length();
+for(var E=0;
+E<D;
+E++){var C=this._events.elementAt(E);
+C._earliestOverlapIndex=E;
+}var G=1;
+for(var E=0;
+E<D;
+E++){var C=this._events.elementAt(E);
+var B=C.getEnd();
+G=Math.max(G,E+1);
+while(G<D){var A=this._events.elementAt(G);
+var F=A.getStart();
+if(this._unit.compare(F,B)<0){A._earliestOverlapIndex=E;
+G++;
+}else{break;
+}}}this._indexed=true;
+};
+SimileAjax.EventIndex._Iterator=function(B,A,D,C){this._events=B;
+this._startDate=A;
+this._endDate=D;
+this._unit=C;
+this._currentIndex=B.find(function(E){return C.compare(E.getStart(),A);
+});
+if(this._currentIndex-1>=0){this._currentIndex=this._events.elementAt(this._currentIndex-1)._earliestOverlapIndex;
+}this._currentIndex--;
+this._maxIndex=B.find(function(E){return C.compare(E.getStart(),D);
+});
+this._hasNext=false;
+this._next=null;
+this._findNext();
+};
+SimileAjax.EventIndex._Iterator.prototype={hasNext:function(){return this._hasNext;
+},next:function(){if(this._hasNext){var A=this._next;
+this._findNext();
+return A;
+}else{return null;
+}},_findNext:function(){var B=this._unit;
+while((++this._currentIndex)<this._maxIndex){var A=this._events.elementAt(this._currentIndex);
+if(B.compare(A.getStart(),this._endDate)<0&&B.compare(A.getEnd(),this._startDate)>0){this._next=A;
+this._hasNext=true;
+return ;
+}}this._next=null;
+this._hasNext=false;
+}};
+SimileAjax.EventIndex._ReverseIterator=function(B,A,D,C){this._events=B;
+this._startDate=A;
+this._endDate=D;
+this._unit=C;
+this._minIndex=B.find(function(E){return C.compare(E.getStart(),A);
+});
+if(this._minIndex-1>=0){this._minIndex=this._events.elementAt(this._minIndex-1)._earliestOverlapIndex;
+}this._maxIndex=B.find(function(E){return C.compare(E.getStart(),D);
+});
+this._currentIndex=this._maxIndex;
+this._hasNext=false;
+this._next=null;
+this._findNext();
+};
+SimileAjax.EventIndex._ReverseIterator.prototype={hasNext:function(){return this._hasNext;
+},next:function(){if(this._hasNext){var A=this._next;
+this._findNext();
+return A;
+}else{return null;
+}},_findNext:function(){var B=this._unit;
+while((--this._currentIndex)>=this._minIndex){var A=this._events.elementAt(this._currentIndex);
+if(B.compare(A.getStart(),this._endDate)<0&&B.compare(A.getEnd(),this._startDate)>0){this._next=A;
+this._hasNext=true;
+return ;
+}}this._next=null;
+this._hasNext=false;
+}};
+SimileAjax.EventIndex._AllIterator=function(A){this._events=A;
+this._index=0;
+};
+SimileAjax.EventIndex._AllIterator.prototype={hasNext:function(){return this._index<this._events.length();
+},next:function(){return this._index<this._events.length()?this._events.elementAt(this._index++):null;
+}};
+
+
+/* date-time.js */
+SimileAjax.DateTime=new Object();
+SimileAjax.DateTime.MILLISECOND=0;
+SimileAjax.DateTime.SECOND=1;
+SimileAjax.DateTime.MINUTE=2;
+SimileAjax.DateTime.HOUR=3;
+SimileAjax.DateTime.DAY=4;
+SimileAjax.DateTime.WEEK=5;
+SimileAjax.DateTime.MONTH=6;
+SimileAjax.DateTime.YEAR=7;
+SimileAjax.DateTime.DECADE=8;
+SimileAjax.DateTime.CENTURY=9;
+SimileAjax.DateTime.MILLENNIUM=10;
+SimileAjax.DateTime.EPOCH=-1;
+SimileAjax.DateTime.ERA=-2;
+SimileAjax.DateTime.gregorianUnitLengths=[];
+(function(){var B=SimileAjax.DateTime;
+var A=B.gregorianUnitLengths;
+A[B.MILLISECOND]=1;
+A[B.SECOND]=1000;
+A[B.MINUTE]=A[B.SECOND]*60;
+A[B.HOUR]=A[B.MINUTE]*60;
+A[B.DAY]=A[B.HOUR]*24;
+A[B.WEEK]=A[B.DAY]*7;
+A[B.MONTH]=A[B.DAY]*31;
+A[B.YEAR]=A[B.DAY]*365;
+A[B.DECADE]=A[B.YEAR]*10;
+A[B.CENTURY]=A[B.YEAR]*100;
+A[B.MILLENNIUM]=A[B.YEAR]*1000;
+})();
+SimileAjax.DateTime._dateRegexp=new RegExp("^(-?)([0-9]{4})("+["(-?([0-9]{2})(-?([0-9]{2}))?)","(-?([0-9]{3}))","(-?W([0-9]{2})(-?([1-7]))?)"].join("|")+")?$");
+SimileAjax.DateTime._timezoneRegexp=new RegExp("Z|(([-+])([0-9]{2})(:?([0-9]{2}))?)$");
+SimileAjax.DateTime._timeRegexp=new RegExp("^([0-9]{2})(:?([0-9]{2})(:?([0-9]{2})(.([0-9]+))?)?)?$");
+SimileAjax.DateTime.setIso8601Date=function(H,F){var I=F.match(SimileAjax.DateTime._dateRegexp);
+if(!I){throw new Error("Invalid date string: "+F);
+}var B=(I[1]=="-")?-1:1;
+var J=B*I[2];
+var G=I[5];
+var C=I[7];
+var E=I[9];
+var A=I[11];
+var M=(I[13])?I[13]:1;
+H.setUTCFullYear(J);
+if(E){H.setUTCMonth(0);
+H.setUTCDate(Number(E));
+}else{if(A){H.setUTCMonth(0);
+H.setUTCDate(1);
+var L=H.getUTCDay();
+var K=(L)?L:7;
+var D=Number(M)+(7*Number(A));
+if(K<=4){H.setUTCDate(D+1-K);
+}else{H.setUTCDate(D+8-K);
+}}else{if(G){H.setUTCDate(1);
+H.setUTCMonth(G-1);
+}if(C){H.setUTCDate(C);
+}}}return H;
+};
+SimileAjax.DateTime.setIso8601Time=function(F,C){var G=C.match(SimileAjax.DateTime._timeRegexp);
+if(!G){SimileAjax.Debug.warn("Invalid time string: "+C);
+return false;
+}var A=G[1];
+var E=Number((G[3])?G[3]:0);
+var D=(G[5])?G[5]:0;
+var B=G[7]?(Number("0."+G[7])*1000):0;
+F.setUTCHours(A);
+F.setUTCMinutes(E);
+F.setUTCSeconds(D);
+F.setUTCMilliseconds(B);
+return F;
+};
+SimileAjax.DateTime.timezoneOffset=new Date().getTimezoneOffset();
+SimileAjax.DateTime.setIso8601=function(B,A){var D=null;
+var E=(A.indexOf("T")==-1)?A.split(" "):A.split("T");
+SimileAjax.DateTime.setIso8601Date(B,E[0]);
+if(E.length==2){var C=E[1].match(SimileAjax.DateTime._timezoneRegexp);
+if(C){if(C[0]=="Z"){D=0;
+}else{D=(Number(C[3])*60)+Number(C[5]);
+D*=((C[2]=="-")?1:-1);
+}E[1]=E[1].substr(0,E[1].length-C[0].length);
+}SimileAjax.DateTime.setIso8601Time(B,E[1]);
+}if(D==null){D=B.getTimezoneOffset();
+}B.setTime(B.getTime()+D*60000);
+return B;
+};
+SimileAjax.DateTime.parseIso8601DateTime=function(A){try{return SimileAjax.DateTime.setIso8601(new Date(0),A);
+}catch(B){return null;
+}};
+SimileAjax.DateTime.parseGregorianDateTime=function(G){if(G==null){return null;
+}else{if(G instanceof Date){return G;
+}}var B=G.toString();
+if(B.length>0&&B.length<8){var C=B.indexOf(" ");
+if(C>0){var A=parseInt(B.substr(0,C));
+var E=B.substr(C+1);
+if(E.toLowerCase()=="bc"){A=1-A;
+}}else{var A=parseInt(B);
+}var F=new Date(0);
+F.setUTCFullYear(A);
+return F;
+}try{return new Date(Date.parse(B));
+}catch(D){return null;
+}};
+SimileAjax.DateTime.roundDownToInterval=function(B,G,J,K,A){var D=J*SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR];
+var I=new Date(B.getTime()+D);
+var E=function(L){L.setUTCMilliseconds(0);
+L.setUTCSeconds(0);
+L.setUTCMinutes(0);
+L.setUTCHours(0);
+};
+var C=function(L){E(L);
+L.setUTCDate(1);
+L.setUTCMonth(0);
+};
+switch(G){case SimileAjax.DateTime.MILLISECOND:var H=I.getUTCMilliseconds();
+I.setUTCMilliseconds(H-(H%K));
+break;
+case SimileAjax.DateTime.SECOND:I.setUTCMilliseconds(0);
+var H=I.getUTCSeconds();
+I.setUTCSeconds(H-(H%K));
+break;
+case SimileAjax.DateTime.MINUTE:I.setUTCMilliseconds(0);
+I.setUTCSeconds(0);
+var H=I.getUTCMinutes();
+I.setTime(I.getTime()-(H%K)*SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.MINUTE]);
+break;
+case SimileAjax.DateTime.HOUR:I.setUTCMilliseconds(0);
+I.setUTCSeconds(0);
+I.setUTCMinutes(0);
+var H=I.getUTCHours();
+I.setUTCHours(H-(H%K));
+break;
+case SimileAjax.DateTime.DAY:E(I);
+break;
+case SimileAjax.DateTime.WEEK:E(I);
+var F=(I.getUTCDay()+7-A)%7;
+I.setTime(I.getTime()-F*SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.DAY]);
+break;
+case SimileAjax.DateTime.MONTH:E(I);
+I.setUTCDate(1);
+var H=I.getUTCMonth();
+I.setUTCMonth(H-(H%K));
+break;
+case SimileAjax.DateTime.YEAR:C(I);
+var H=I.getUTCFullYear();
+I.setUTCFullYear(H-(H%K));
+break;
+case SimileAjax.DateTime.DECADE:C(I);
+I.setUTCFullYear(Math.floor(I.getUTCFullYear()/10)*10);
+break;
+case SimileAjax.DateTime.CENTURY:C(I);
+I.setUTCFullYear(Math.floor(I.getUTCFullYear()/100)*100);
+break;
+case SimileAjax.DateTime.MILLENNIUM:C(I);
+I.setUTCFullYear(Math.floor(I.getUTCFullYear()/1000)*1000);
+break;
+}B.setTime(I.getTime()-D);
+};
+SimileAjax.DateTime.roundUpToInterval=function(D,F,C,A,B){var E=D.getTime();
+SimileAjax.DateTime.roundDownToInterval(D,F,C,A,B);
+if(D.getTime()<E){D.setTime(D.getTime()+SimileAjax.DateTime.gregorianUnitLengths[F]*A);
+}};
+SimileAjax.DateTime.incrementByInterval=function(B,E,A){A=(typeof A=="undefined")?0:A;
+var D=A*SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR];
+var C=new Date(B.getTime()+D);
+switch(E){case SimileAjax.DateTime.MILLISECOND:C.setTime(C.getTime()+1);
+break;
+case SimileAjax.DateTime.SECOND:C.setTime(C.getTime()+1000);
+break;
+case SimileAjax.DateTime.MINUTE:C.setTime(C.getTime()+SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.MINUTE]);
+break;
+case SimileAjax.DateTime.HOUR:C.setTime(C.getTime()+SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR]);
+break;
+case SimileAjax.DateTime.DAY:C.setUTCDate(C.getUTCDate()+1);
+break;
+case SimileAjax.DateTime.WEEK:C.setUTCDate(C.getUTCDate()+7);
+break;
+case SimileAjax.DateTime.MONTH:C.setUTCMonth(C.getUTCMonth()+1);
+break;
+case SimileAjax.DateTime.YEAR:C.setUTCFullYear(C.getUTCFullYear()+1);
+break;
+case SimileAjax.DateTime.DECADE:C.setUTCFullYear(C.getUTCFullYear()+10);
+break;
+case SimileAjax.DateTime.CENTURY:C.setUTCFullYear(C.getUTCFullYear()+100);
+break;
+case SimileAjax.DateTime.MILLENNIUM:C.setUTCFullYear(C.getUTCFullYear()+1000);
+break;
+}B.setTime(C.getTime()-D);
+};
+SimileAjax.DateTime.removeTimeZoneOffset=function(B,A){return new Date(B.getTime()+A*SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR]);
+};
+SimileAjax.DateTime.getTimezone=function(){var A=new Date().getTimezoneOffset();
+return A/-60;
+};
+
+
+/* debug.js */
+SimileAjax.Debug={silent:false};
+SimileAjax.Debug.log=function(B){var A;
+if("console" in window&&"log" in window.console){A=function(C){console.log(C);
+};
+}else{A=function(C){if(!SimileAjax.Debug.silent){alert(C);
+}};
+}SimileAjax.Debug.log=A;
+A(B);
+};
+SimileAjax.Debug.warn=function(B){var A;
+if("console" in window&&"warn" in window.console){A=function(C){console.warn(C);
+};
+}else{A=function(C){if(!SimileAjax.Debug.silent){alert(C);
+}};
+}SimileAjax.Debug.warn=A;
+A(B);
+};
+SimileAjax.Debug.exception=function(B,D){var A,C=SimileAjax.parseURLParameters();
+if(C.errors=="throw"||SimileAjax.params.errors=="throw"){A=function(F,E){throw (F);
+};
+}else{if("console" in window&&"error" in window.console){A=function(F,E){if(E!=null){console.error(E+" %o",F);
+}else{console.error(F);
+}throw (F);
+};
+}else{A=function(F,E){if(!SimileAjax.Debug.silent){alert("Caught exception: "+E+"\n\nDetails: "+("description" in F?F.description:F));
+}throw (F);
+};
+}}SimileAjax.Debug.exception=A;
+A(B,D);
+};
+SimileAjax.Debug.objectToString=function(A){return SimileAjax.Debug._objectToString(A,"");
+};
+SimileAjax.Debug._objectToString=function(D,A){var C=A+" ";
+if(typeof D=="object"){var B="{";
+for(E in D){B+=C+E+": "+SimileAjax.Debug._objectToString(D[E],C)+"\n";
+}B+=A+"}";
+return B;
+}else{if(typeof D=="array"){var B="[";
+for(var E=0;
+E<D.length;
+E++){B+=SimileAjax.Debug._objectToString(D[E],C)+"\n";
+}B+=A+"]";
+return B;
+}else{return D;
+}}};
+
+
+/* dom.js */
+SimileAjax.DOM=new Object();
+SimileAjax.DOM.registerEventWithObject=function(C,A,D,B){SimileAjax.DOM.registerEvent(C,A,function(F,E,G){return D[B].call(D,F,E,G);
+});
+};
+SimileAjax.DOM.registerEvent=function(C,B,D){var A=function(E){E=(E)?E:((event)?event:null);
+if(E){var F=(E.target)?E.target:((E.srcElement)?E.srcElement:null);
+if(F){F=(F.nodeType==1||F.nodeType==9)?F:F.parentNode;
+}return D(C,E,F);
+}return true;
+};
+if(SimileAjax.Platform.browser.isIE){C.attachEvent("on"+B,A);
+}else{C.addEventListener(B,A,false);
+}};
+SimileAjax.DOM.getPageCoordinates=function(B){var E=0;
+var D=0;
+if(B.nodeType!=1){B=B.parentNode;
+}var C=B;
+while(C!=null){E+=C.offsetLeft;
+D+=C.offsetTop;
+C=C.offsetParent;
+}var A=document.body;
+while(B!=null&&B!=A){if("scrollLeft" in B){E-=B.scrollLeft;
+D-=B.scrollTop;
+}B=B.parentNode;
+}return{left:E,top:D};
+};
+SimileAjax.DOM.getSize=function(B){var A=this.getStyle(B,"width");
+var C=this.getStyle(B,"height");
+if(A.indexOf("px")>-1){A=A.replace("px","");
+}if(C.indexOf("px")>-1){C=C.replace("px","");
+}return{w:A,h:C};
+};
+SimileAjax.DOM.getStyle=function(B,A){if(B.currentStyle){var C=B.currentStyle[A];
+}else{if(window.getComputedStyle){var C=document.defaultView.getComputedStyle(B,null).getPropertyValue(A);
+}else{var C="";
+}}return C;
+};
+SimileAjax.DOM.getEventRelativeCoordinates=function(A,B){if(SimileAjax.Platform.browser.isIE){if(A.type=="mousewheel"){var C=SimileAjax.DOM.getPageCoordinates(B);
+return{x:A.clientX-C.left,y:A.clientY-C.top};
+}else{return{x:A.offsetX,y:A.offsetY};
+}}else{var C=SimileAjax.DOM.getPageCoordinates(B);
+if((A.type=="DOMMouseScroll")&&SimileAjax.Platform.browser.isFirefox&&(SimileAjax.Platform.browser.majorVersion==2)){return{x:A.screenX-C.left,y:A.screenY-C.top};
+}else{return{x:A.pageX-C.left,y:A.pageY-C.top};
+}}};
+SimileAjax.DOM.getEventPageCoordinates=function(A){if(SimileAjax.Platform.browser.isIE){return{x:A.clientX+document.body.scrollLeft,y:A.clientY+document.body.scrollTop};
+}else{return{x:A.pageX,y:A.pageY};
+}};
+SimileAjax.DOM.hittest=function(A,C,B){return SimileAjax.DOM._hittest(document.body,A,C,B);
+};
+SimileAjax.DOM._hittest=function(C,L,K,H){var M=C.childNodes;
+outer:for(var G=0;
+G<M.length;
+G++){var A=M[G];
+for(var F=0;
+F<H.length;
+F++){if(A==H[F]){continue outer;
+}}if(A.offsetWidth==0&&A.offsetHeight==0){var B=SimileAjax.DOM._hittest(A,L,K,H);
+if(B!=A){return B;
+}}else{var J=0;
+var E=0;
+var D=A;
+while(D){J+=D.offsetTop;
+E+=D.offsetLeft;
+D=D.offsetParent;
+}if(E<=L&&J<=K&&(L-E)<A.offsetWidth&&(K-J)<A.offsetHeight){return SimileAjax.DOM._hittest(A,L,K,H);
+}else{if(A.nodeType==1&&A.tagName=="TR"){var I=SimileAjax.DOM._hittest(A,L,K,H);
+if(I!=A){return I;
+}}}}}return C;
+};
+SimileAjax.DOM.cancelEvent=function(A){A.returnValue=false;
+A.cancelBubble=true;
+if("preventDefault" in A){A.preventDefault();
+}};
+SimileAjax.DOM.appendClassName=function(C,D){var B=C.className.split(" ");
+for(var A=0;
+A<B.length;
+A++){if(B[A]==D){return ;
+}}B.push(D);
+C.className=B.join(" ");
+};
+SimileAjax.DOM.createInputElement=function(A){var B=document.createElement("div");
+B.innerHTML="<input type='"+A+"' />";
+return B.firstChild;
+};
+SimileAjax.DOM.createDOMFromTemplate=function(B){var A={};
+A.elmt=SimileAjax.DOM._createDOMFromTemplate(B,A,null);
+return A;
+};
+SimileAjax.DOM._createDOMFromTemplate=function(A,I,E){if(A==null){return null;
+}else{if(typeof A!="object"){var D=document.createTextNode(A);
+if(E!=null){E.appendChild(D);
+}return D;
+}else{var C=null;
+if("tag" in A){var J=A.tag;
+if(E!=null){if(J=="tr"){C=E.insertRow(E.rows.length);
+}else{if(J=="td"){C=E.insertCell(E.cells.length);
+}}}if(C==null){C=J=="input"?SimileAjax.DOM.createInputElement(A.type):document.createElement(J);
+if(E!=null){E.appendChild(C);
+}}}else{C=A.elmt;
+if(E!=null){E.appendChild(C);
+}}for(var B in A){var G=A[B];
+if(B=="field"){I[G]=C;
+}else{if(B=="className"){C.className=G;
+}else{if(B=="id"){C.id=G;
+}else{if(B=="title"){C.title=G;
+}else{if(B=="type"&&C.tagName=="input"){}else{if(B=="style"){for(n in G){var H=G[n];
+if(n=="float"){n=SimileAjax.Platform.browser.isIE?"styleFloat":"cssFloat";
+}C.style[n]=H;
+}}else{if(B=="children"){for(var F=0;
+F<G.length;
+F++){SimileAjax.DOM._createDOMFromTemplate(G[F],I,C);
+}}else{if(B!="tag"&&B!="elmt"){C.setAttribute(B,G);
+}}}}}}}}}return C;
+}}};
+SimileAjax.DOM._cachedParent=null;
+SimileAjax.DOM.createElementFromString=function(A){if(SimileAjax.DOM._cachedParent==null){SimileAjax.DOM._cachedParent=document.createElement("div");
+}SimileAjax.DOM._cachedParent.innerHTML=A;
+return SimileAjax.DOM._cachedParent.firstChild;
+};
+SimileAjax.DOM.createDOMFromString=function(A,C,D){var B=typeof A=="string"?document.createElement(A):A;
+B.innerHTML=C;
+var E={elmt:B};
+SimileAjax.DOM._processDOMChildrenConstructedFromString(E,B,D!=null?D:{});
+return E;
+};
+SimileAjax.DOM._processDOMConstructedFromString=function(D,A,B){var E=A.id;
+if(E!=null&&E.length>0){A.removeAttribute("id");
+if(E in B){var C=A.parentNode;
+C.insertBefore(B[E],A);
+C.removeChild(A);
+D[E]=B[E];
+return ;
+}else{D[E]=A;
+}}if(A.hasChildNodes()){SimileAjax.DOM._processDOMChildrenConstructedFromString(D,A,B);
+}};
+SimileAjax.DOM._processDOMChildrenConstructedFromString=function(E,B,D){var C=B.firstChild;
+while(C!=null){var A=C.nextSibling;
+if(C.nodeType==1){SimileAjax.DOM._processDOMConstructedFromString(E,C,D);
+}C=A;
+}};
+
+
+/* graphics.js */
+SimileAjax.Graphics=new Object();
+SimileAjax.Graphics.pngIsTranslucent=(!SimileAjax.Platform.browser.isIE)||(SimileAjax.Platform.browser.majorVersion>6);
+SimileAjax.Graphics._createTranslucentImage1=function(A,C){var B=document.createElement("img");
+B.setAttribute("src",A);
+if(C!=null){B.style.verticalAlign=C;
+}return B;
+};
+SimileAjax.Graphics._createTranslucentImage2=function(A,C){var B=document.createElement("img");
+B.style.width="1px";
+B.style.height="1px";
+B.style.filter="progid:DXImageTransform.Microsoft.AlphaImageLoader(src='"+A+"', sizingMethod='image')";
+B.style.verticalAlign=(C!=null)?C:"middle";
+return B;
+};
+SimileAjax.Graphics.createTranslucentImage=SimileAjax.Graphics.pngIsTranslucent?SimileAjax.Graphics._createTranslucentImage1:SimileAjax.Graphics._createTranslucentImage2;
+SimileAjax.Graphics._createTranslucentImageHTML1=function(A,B){return'<img src="'+A+'"'+(B!=null?' style="vertical-align: '+B+';"':"")+" />";
+};
+SimileAjax.Graphics._createTranslucentImageHTML2=function(A,C){var B="width: 1px; height: 1px; filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='"+A+"', sizingMethod='image');"+(C!=null?" vertical-align: "+C+";":"");
+return"<img src='"+A+"' style=\""+B+'" />';
+};
+SimileAjax.Graphics.createTranslucentImageHTML=SimileAjax.Graphics.pngIsTranslucent?SimileAjax.Graphics._createTranslucentImageHTML1:SimileAjax.Graphics._createTranslucentImageHTML2;
+SimileAjax.Graphics.setOpacity=function(B,A){if(SimileAjax.Platform.browser.isIE){B.style.filter="progid:DXImageTransform.Microsoft.Alpha(Style=0,Opacity="+A+")";
+}else{var C=(A/100).toString();
+B.style.opacity=C;
+B.style.MozOpacity=C;
+}};
+SimileAjax.Graphics._bubbleMargins={top:33,bottom:42,left:33,right:40};
+SimileAjax.Graphics._arrowOffsets={top:0,bottom:9,left:1,right:8};
+SimileAjax.Graphics._bubblePadding=15;
+SimileAjax.Graphics._bubblePointOffset=6;
+SimileAjax.Graphics._halfArrowWidth=18;
+SimileAjax.Graphics.createBubbleForContentAndPoint=function(E,D,C,A,B){if(typeof A!="number"){A=300;
+}E.style.position="absolute";
+E.style.left="-5000px";
+E.style.top="0px";
+E.style.width=A+"px";
+document.body.appendChild(E);
+window.setTimeout(function(){var H=E.scrollWidth+10;
+var F=E.scrollHeight+10;
+var G=SimileAjax.Graphics.createBubbleForPoint(D,C,H,F,B);
+document.body.removeChild(E);
+E.style.position="static";
+E.style.left="";
+E.style.top="";
+E.style.width=H+"px";
+G.content.appendChild(E);
+},200);
+};
+SimileAjax.Graphics.createBubbleForPoint=function(C,B,N,R,F){function T(){if(typeof window.innerHeight=="number"){return{w:window.innerWidth,h:window.innerHeight};
+}else{if(document.documentElement&&document.documentElement.clientHeight){return{w:document.documentElement.clientWidth,h:document.documentElement.clientHeight};
+}else{if(document.body&&document.body.clientHeight){return{w:document.body.clientWidth,h:document.body.clientHeight};
+}}}}var L=function(){if(!M._closed){document.body.removeChild(M._div);
+M._doc=null;
+M._div=null;
+M._content=null;
+M._closed=true;
+}};
+var M={_closed:false};
+var O=T();
+var H=O.w;
+var G=O.h;
+var D=SimileAjax.Graphics._bubbleMargins;
+N=parseInt(N,10);
+R=parseInt(R,10);
+var P=D.left+N+D.right;
+var U=D.top+R+D.bottom;
+var Q=SimileAjax.Graphics.pngIsTranslucent;
+var J=SimileAjax.urlPrefix;
+var A=function(Z,Y,a,X){Z.style.position="absolute";
+Z.style.width=a+"px";
+Z.style.height=X+"px";
+if(Q){Z.style.background="url("+Y+")";
+}else{Z.style.filter="progid:DXImageTransform.Microsoft.AlphaImageLoader(src='"+Y+"', sizingMethod='crop')";
+}};
+var K=document.createElement("div");
+K.style.width=P+"px";
+K.style.height=U+"px";
+K.style.position="absolute";
+K.style.zIndex=1000;
+var W=SimileAjax.WindowManager.pushLayer(L,true,K);
+M._div=K;
+M.close=function(){SimileAjax.WindowManager.popLayer(W);
+};
+var I=document.createElement("div");
+I.style.width="100%";
+I.style.height="100%";
+I.style.position="relative";
+K.appendChild(I);
+var S=function(Z,c,b,a,Y){var X=document.createElement("div");
+X.style.left=c+"px";
+X.style.top=b+"px";
+A(X,Z,a,Y);
+I.appendChild(X);
+};
+S(J+"data/timeline/bubble-top-left.png",0,0,D.left,D.top);
+S(J+"data/timeline/bubble-top.png",D.left,0,N,D.top);
+S(J+"data/timeline/bubble-top-right.png",D.left+N,0,D.right,D.top);
+S(J+"data/timeline/bubble-left.png",0,D.top,D.left,R);
+S(J+"data/timeline/bubble-right.png",D.left+N,D.top,D.right,R);
+S(J+"data/timeline/bubble-bottom-left.png",0,D.top+R,D.left,D.bottom);
+S(J+"data/timeline/bubble-bottom.png",D.left,D.top+R,N,D.bottom);
+S(J+"data/timeline/bubble-bottom-right.png",D.left+N,D.top+R,D.right,D.bottom);
+var V=document.createElement("div");
+V.style.left=(P-D.right+SimileAjax.Graphics._bubblePadding-16-2)+"px";
+V.style.top=(D.top-SimileAjax.Graphics._bubblePadding+1)+"px";
+V.style.cursor="pointer";
+A(V,J+"data/timeline/close-button.png",16,16);
+SimileAjax.WindowManager.registerEventWithObject(V,"click",M,"close");
+I.appendChild(V);
+var E=document.createElement("div");
+E.style.position="absolute";
+E.style.left=D.left+"px";
+E.style.top=D.top+"px";
+E.style.width=N+"px";
+E.style.height=R+"px";
+E.style.overflow="auto";
+E.style.background="white";
+I.appendChild(E);
+M.content=E;
+(function(){if(C-SimileAjax.Graphics._halfArrowWidth-SimileAjax.Graphics._bubblePadding>0&&C+SimileAjax.Graphics._halfArrowWidth+SimileAjax.Graphics._bubblePadding<H){var Z=C-Math.round(N/2)-D.left;
+Z=C<(H/2)?Math.max(Z,-(D.left-SimileAjax.Graphics._bubblePadding)):Math.min(Z,H+(D.right-SimileAjax.Graphics._bubblePadding)-P);
+if((F&&F=="top")||(!F&&(B-SimileAjax.Graphics._bubblePointOffset-U>0))){var X=document.createElement("div");
+X.style.left=(C-SimileAjax.Graphics._halfArrowWidth-Z)+"px";
+X.style.top=(D.top+R)+"px";
+A(X,J+"data/timeline/bubble-bottom-arrow.png",37,D.bottom);
+I.appendChild(X);
+K.style.left=Z+"px";
+K.style.top=(B-SimileAjax.Graphics._bubblePointOffset-U+SimileAjax.Graphics._arrowOffsets.bottom)+"px";
+return ;
+}else{if((F&&F=="bottom")||(!F&&(B+SimileAjax.Graphics._bubblePointOffset+U<G))){var X=document.createElement("div");
+X.style.left=(C-SimileAjax.Graphics._halfArrowWidth-Z)+"px";
+X.style.top="0px";
+A(X,J+"data/timeline/bubble-top-arrow.png",37,D.top);
+I.appendChild(X);
+K.style.left=Z+"px";
+K.style.top=(B+SimileAjax.Graphics._bubblePointOffset-SimileAjax.Graphics._arrowOffsets.top)+"px";
+return ;
+}}}var Y=B-Math.round(R/2)-D.top;
+Y=B<(G/2)?Math.max(Y,-(D.top-SimileAjax.Graphics._bubblePadding)):Math.min(Y,G+(D.bottom-SimileAjax.Graphics._bubblePadding)-U);
+if((F&&F=="left")||(!F&&(C-SimileAjax.Graphics._bubblePointOffset-P>0))){var X=document.createElement("div");
+X.style.left=(D.left+N)+"px";
+X.style.top=(B-SimileAjax.Graphics._halfArrowWidth-Y)+"px";
+A(X,J+"data/timeline/bubble-right-arrow.png",D.right,37);
+I.appendChild(X);
+K.style.left=(C-SimileAjax.Graphics._bubblePointOffset-P+SimileAjax.Graphics._arrowOffsets.right)+"px";
+K.style.top=Y+"px";
+}else{if((F&&F=="right")||(!F&&(C-SimileAjax.Graphics._bubblePointOffset-P<H))){var X=document.createElement("div");
+X.style.left="0px";
+X.style.top=(B-SimileAjax.Graphics._halfArrowWidth-Y)+"px";
+A(X,J+"data/timeline/bubble-left-arrow.png",D.left,37);
+I.appendChild(X);
+K.style.left=(C+SimileAjax.Graphics._bubblePointOffset-SimileAjax.Graphics._arrowOffsets.left)+"px";
+K.style.top=Y+"px";
+}}})();
+document.body.appendChild(K);
+return M;
+};
+SimileAjax.Graphics.createMessageBubble=function(H){var G=H.createElement("div");
+if(SimileAjax.Graphics.pngIsTranslucent){var I=H.createElement("div");
+I.style.height="33px";
+I.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-top-left.png) top left no-repeat";
+I.style.paddingLeft="44px";
+G.appendChild(I);
+var C=H.createElement("div");
+C.style.height="33px";
+C.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-top-right.png) top right no-repeat";
+I.appendChild(C);
+var F=H.createElement("div");
+F.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-left.png) top left repeat-y";
+F.style.paddingLeft="44px";
+G.appendChild(F);
+var A=H.createElement("div");
+A.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-right.png) top right repeat-y";
+A.style.paddingRight="44px";
+F.appendChild(A);
+var D=H.createElement("div");
+A.appendChild(D);
+var B=H.createElement("div");
+B.style.height="55px";
+B.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-bottom-left.png) bottom left no-repeat";
+B.style.paddingLeft="44px";
+G.appendChild(B);
+var E=H.createElement("div");
+E.style.height="55px";
+E.style.background="url("+SimileAjax.urlPrefix+"data/timeline/message-bottom-right.png) bottom right no-repeat";
+B.appendChild(E);
+}else{G.style.border="2px solid #7777AA";
+G.style.padding="20px";
+G.style.background="white";
+SimileAjax.Graphics.setOpacity(G,90);
+var D=H.createElement("div");
+G.appendChild(D);
+}return{containerDiv:G,contentDiv:D};
+};
+SimileAjax.Graphics.createAnimation=function(B,E,D,C,A){return new SimileAjax.Graphics._Animation(B,E,D,C,A);
+};
+SimileAjax.Graphics._Animation=function(B,E,D,C,A){this.f=B;
+this.cont=(typeof A=="function")?A:function(){};
+this.from=E;
+this.to=D;
+this.current=E;
+this.duration=C;
+this.start=new Date().getTime();
+this.timePassed=0;
+};
+SimileAjax.Graphics._Animation.prototype.run=function(){var A=this;
+window.setTimeout(function(){A.step();
+},50);
+};
+SimileAjax.Graphics._Animation.prototype.step=function(){this.timePassed+=50;
+var B=this.timePassed/this.duration;
+var A=-Math.cos(B*Math.PI)/2+0.5;
+var D=A*(this.to-this.from)+this.from;
+try{this.f(D,D-this.current);
+}catch(C){}this.current=D;
+if(this.timePassed<this.duration){this.run();
+}else{this.f(this.to,0);
+this["cont"]();
+}};
+SimileAjax.Graphics.createStructuredDataCopyButton=function(F,D,A,E){var G=document.createElement("div");
+G.style.position="relative";
+G.style.display="inline";
+G.style.width=D+"px";
+G.style.height=A+"px";
+G.style.overflow="hidden";
+G.style.margin="2px";
+if(SimileAjax.Graphics.pngIsTranslucent){G.style.background="url("+F+") no-repeat";
+}else{G.style.filter="progid:DXImageTransform.Microsoft.AlphaImageLoader(src='"+F+"', sizingMethod='image')";
+}var C;
+if(SimileAjax.Platform.browser.isIE){C="filter:alpha(opacity=0)";
+}else{C="opacity: 0";
+}G.innerHTML="<textarea rows='1' autocomplete='off' value='none' style='"+C+"' />";
+var B=G.firstChild;
+B.style.width=D+"px";
+B.style.height=A+"px";
+B.onmousedown=function(H){H=(H)?H:((event)?event:null);
+if(H.button==2){B.value=E();
+B.select();
+}};
+return G;
+};
+SimileAjax.Graphics.getFontRenderingContext=function(A,B){return new SimileAjax.Graphics._FontRenderingContext(A,B);
+};
+SimileAjax.Graphics._FontRenderingContext=function(A,B){this._elmt=A;
+this._elmt.style.visibility="hidden";
+if(typeof B=="string"){this._elmt.style.width=B;
+}else{if(typeof B=="number"){this._elmt.style.width=B+"px";
+}}};
+SimileAjax.Graphics._FontRenderingContext.prototype.dispose=function(){this._elmt=null;
+};
+SimileAjax.Graphics._FontRenderingContext.prototype.update=function(){this._elmt.innerHTML="A";
+this._lineHeight=this._elmt.offsetHeight;
+};
+SimileAjax.Graphics._FontRenderingContext.prototype.computeSize=function(A){this._elmt.innerHTML=A;
+return{width:this._elmt.offsetWidth,height:this._elmt.offsetHeight};
+};
+SimileAjax.Graphics._FontRenderingContext.prototype.getLineHeight=function(){return this._lineHeight;
+};
+
+
+/* history.js */
+SimileAjax.History={maxHistoryLength:10,historyFile:"__history__.html",enabled:true,_initialized:false,_listeners:new SimileAjax.ListenerQueue(),_actions:[],_baseIndex:0,_currentIndex:0,_plainDocumentTitle:document.title};
+SimileAjax.History.formatHistoryEntryTitle=function(A){return SimileAjax.History._plainDocumentTitle+" {"+A+"}";
+};
+SimileAjax.History.initialize=function(){if(SimileAjax.History._initialized){return ;
+}if(SimileAjax.History.enabled){var A=document.createElement("iframe");
+A.id="simile-ajax-history";
+A.style.position="absolute";
+A.style.width="10px";
+A.style.height="10px";
+A.style.top="0px";
+A.style.left="0px";
+A.style.visibility="hidden";
+A.src=SimileAjax.History.historyFile+"?0";
+document.body.appendChild(A);
+SimileAjax.DOM.registerEvent(A,"load",SimileAjax.History._handleIFrameOnLoad);
+SimileAjax.History._iframe=A;
+}SimileAjax.History._initialized=true;
+};
+SimileAjax.History.addListener=function(A){SimileAjax.History.initialize();
+SimileAjax.History._listeners.add(A);
+};
+SimileAjax.History.removeListener=function(A){SimileAjax.History.initialize();
+SimileAjax.History._listeners.remove(A);
+};
+SimileAjax.History.addAction=function(A){SimileAjax.History.initialize();
+SimileAjax.History._listeners.fire("onBeforePerform",[A]);
+window.setTimeout(function(){try{A.perform();
+SimileAjax.History._listeners.fire("onAfterPerform",[A]);
+if(SimileAjax.History.enabled){SimileAjax.History._actions=SimileAjax.History._actions.slice(0,SimileAjax.History._currentIndex-SimileAjax.History._baseIndex);
+SimileAjax.History._actions.push(A);
+SimileAjax.History._currentIndex++;
+var C=SimileAjax.History._actions.length-SimileAjax.History.maxHistoryLength;
+if(C>0){SimileAjax.History._actions=SimileAjax.History._actions.slice(C);
+SimileAjax.History._baseIndex+=C;
+}try{SimileAjax.History._iframe.contentWindow.location.search="?"+SimileAjax.History._currentIndex;
+}catch(B){var D=SimileAjax.History.formatHistoryEntryTitle(A.label);
+document.title=D;
+}}}catch(B){SimileAjax.Debug.exception(B,"Error adding action {"+A.label+"} to history");
+}},0);
+};
+SimileAjax.History.addLengthyAction=function(C,A,B){SimileAjax.History.addAction({perform:C,undo:A,label:B,uiLayer:SimileAjax.WindowManager.getBaseLayer(),lengthy:true});
+};
+SimileAjax.History._handleIFrameOnLoad=function(){try{var B=SimileAjax.History._iframe.contentWindow.location.search;
+var F=(B.length==0)?0:Math.max(0,parseInt(B.substr(1)));
+var E=function(){var G=F-SimileAjax.History._currentIndex;
+SimileAjax.History._currentIndex+=G;
+SimileAjax.History._baseIndex+=G;
+SimileAjax.History._iframe.contentWindow.location.search="?"+F;
+};
+if(F<SimileAjax.History._currentIndex){SimileAjax.History._listeners.fire("onBeforeUndoSeveral",[]);
+window.setTimeout(function(){while(SimileAjax.History._currentIndex>F&&SimileAjax.History._currentIndex>SimileAjax.History._baseIndex){SimileAjax.History._currentIndex--;
+var G=SimileAjax.History._actions[SimileAjax.History._currentIndex-SimileAjax.History._baseIndex];
+try{G.undo();
+}catch(H){SimileAjax.Debug.exception(H,"History: Failed to undo action {"+G.label+"}");
+}}SimileAjax.History._listeners.fire("onAfterUndoSeveral",[]);
+E();
+},0);
+}else{if(F>SimileAjax.History._currentIndex){SimileAjax.History._listeners.fire("onBeforeRedoSeveral",[]);
+window.setTimeout(function(){while(SimileAjax.History._currentIndex<F&&SimileAjax.History._currentIndex-SimileAjax.History._baseIndex<SimileAjax.History._actions.length){var G=SimileAjax.History._actions[SimileAjax.History._currentIndex-SimileAjax.History._baseIndex];
+try{G.perform();
+}catch(H){SimileAjax.Debug.exception(H,"History: Failed to redo action {"+G.label+"}");
+}SimileAjax.History._currentIndex++;
+}SimileAjax.History._listeners.fire("onAfterRedoSeveral",[]);
+E();
+},0);
+}else{var A=SimileAjax.History._currentIndex-SimileAjax.History._baseIndex-1;
+var D=(A>=0&&A<SimileAjax.History._actions.length)?SimileAjax.History.formatHistoryEntryTitle(SimileAjax.History._actions[A].label):SimileAjax.History._plainDocumentTitle;
+SimileAjax.History._iframe.contentWindow.document.title=D;
+document.title=D;
+}}}catch(C){}};
+SimileAjax.History.getNextUndoAction=function(){try{var A=SimileAjax.History._currentIndex-SimileAjax.History._baseIndex-1;
+return SimileAjax.History._actions[A];
+}catch(B){return null;
+}};
+SimileAjax.History.getNextRedoAction=function(){try{var A=SimileAjax.History._currentIndex-SimileAjax.History._baseIndex;
+return SimileAjax.History._actions[A];
+}catch(B){return null;
+}};
+
+
+/* html.js */
+SimileAjax.HTML=new Object();
+SimileAjax.HTML._e2uHash={};
+(function(){var A=SimileAjax.HTML._e2uHash;
+A["nbsp"]="\u00A0[space]";
+A["iexcl"]="\u00A1";
+A["cent"]="\u00A2";
+A["pound"]="\u00A3";
+A["curren"]="\u00A4";
+A["yen"]="\u00A5";
+A["brvbar"]="\u00A6";
+A["sect"]="\u00A7";
+A["uml"]="\u00A8";
+A["copy"]="\u00A9";
+A["ordf"]="\u00AA";
+A["laquo"]="\u00AB";
+A["not"]="\u00AC";
+A["shy"]="\u00AD";
+A["reg"]="\u00AE";
+A["macr"]="\u00AF";
+A["deg"]="\u00B0";
+A["plusmn"]="\u00B1";
+A["sup2"]="\u00B2";
+A["sup3"]="\u00B3";
+A["acute"]="\u00B4";
+A["micro"]="\u00B5";
+A["para"]="\u00B6";
+A["middot"]="\u00B7";
+A["cedil"]="\u00B8";
+A["sup1"]="\u00B9";
+A["ordm"]="\u00BA";
+A["raquo"]="\u00BB";
+A["frac14"]="\u00BC";
+A["frac12"]="\u00BD";
+A["frac34"]="\u00BE";
+A["iquest"]="\u00BF";
+A["Agrave"]="\u00C0";
+A["Aacute"]="\u00C1";
+A["Acirc"]="\u00C2";
+A["Atilde"]="\u00C3";
+A["Auml"]="\u00C4";
+A["Aring"]="\u00C5";
+A["AElig"]="\u00C6";
+A["Ccedil"]="\u00C7";
+A["Egrave"]="\u00C8";
+A["Eacute"]="\u00C9";
+A["Ecirc"]="\u00CA";
+A["Euml"]="\u00CB";
+A["Igrave"]="\u00CC";
+A["Iacute"]="\u00CD";
+A["Icirc"]="\u00CE";
+A["Iuml"]="\u00CF";
+A["ETH"]="\u00D0";
+A["Ntilde"]="\u00D1";
+A["Ograve"]="\u00D2";
+A["Oacute"]="\u00D3";
+A["Ocirc"]="\u00D4";
+A["Otilde"]="\u00D5";
+A["Ouml"]="\u00D6";
+A["times"]="\u00D7";
+A["Oslash"]="\u00D8";
+A["Ugrave"]="\u00D9";
+A["Uacute"]="\u00DA";
+A["Ucirc"]="\u00DB";
+A["Uuml"]="\u00DC";
+A["Yacute"]="\u00DD";
+A["THORN"]="\u00DE";
+A["szlig"]="\u00DF";
+A["agrave"]="\u00E0";
+A["aacute"]="\u00E1";
+A["acirc"]="\u00E2";
+A["atilde"]="\u00E3";
+A["auml"]="\u00E4";
+A["aring"]="\u00E5";
+A["aelig"]="\u00E6";
+A["ccedil"]="\u00E7";
+A["egrave"]="\u00E8";
+A["eacute"]="\u00E9";
+A["ecirc"]="\u00EA";
+A["euml"]="\u00EB";
+A["igrave"]="\u00EC";
+A["iacute"]="\u00ED";
+A["icirc"]="\u00EE";
+A["iuml"]="\u00EF";
+A["eth"]="\u00F0";
+A["ntilde"]="\u00F1";
+A["ograve"]="\u00F2";
+A["oacute"]="\u00F3";
+A["ocirc"]="\u00F4";
+A["otilde"]="\u00F5";
+A["ouml"]="\u00F6";
+A["divide"]="\u00F7";
+A["oslash"]="\u00F8";
+A["ugrave"]="\u00F9";
+A["uacute"]="\u00FA";
+A["ucirc"]="\u00FB";
+A["uuml"]="\u00FC";
+A["yacute"]="\u00FD";
+A["thorn"]="\u00FE";
+A["yuml"]="\u00FF";
+A["quot"]="\u0022";
+A["amp"]="\u0026";
+A["lt"]="\u003C";
+A["gt"]="\u003E";
+A["OElig"]="";
+A["oelig"]="\u0153";
+A["Scaron"]="\u0160";
+A["scaron"]="\u0161";
+A["Yuml"]="\u0178";
+A["circ"]="\u02C6";
+A["tilde"]="\u02DC";
+A["ensp"]="\u2002";
+A["emsp"]="\u2003";
+A["thinsp"]="\u2009";
+A["zwnj"]="\u200C";
+A["zwj"]="\u200D";
+A["lrm"]="\u200E";
+A["rlm"]="\u200F";
+A["ndash"]="\u2013";
+A["mdash"]="\u2014";
+A["lsquo"]="\u2018";
+A["rsquo"]="\u2019";
+A["sbquo"]="\u201A";
+A["ldquo"]="\u201C";
+A["rdquo"]="\u201D";
+A["bdquo"]="\u201E";
+A["dagger"]="\u2020";
+A["Dagger"]="\u2021";
+A["permil"]="\u2030";
+A["lsaquo"]="\u2039";
+A["rsaquo"]="\u203A";
+A["euro"]="\u20AC";
+A["fnof"]="\u0192";
+A["Alpha"]="\u0391";
+A["Beta"]="\u0392";
+A["Gamma"]="\u0393";
+A["Delta"]="\u0394";
+A["Epsilon"]="\u0395";
+A["Zeta"]="\u0396";
+A["Eta"]="\u0397";
+A["Theta"]="\u0398";
+A["Iota"]="\u0399";
+A["Kappa"]="\u039A";
+A["Lambda"]="\u039B";
+A["Mu"]="\u039C";
+A["Nu"]="\u039D";
+A["Xi"]="\u039E";
+A["Omicron"]="\u039F";
+A["Pi"]="\u03A0";
+A["Rho"]="\u03A1";
+A["Sigma"]="\u03A3";
+A["Tau"]="\u03A4";
+A["Upsilon"]="\u03A5";
+A["Phi"]="\u03A6";
+A["Chi"]="\u03A7";
+A["Psi"]="\u03A8";
+A["Omega"]="\u03A9";
+A["alpha"]="\u03B1";
+A["beta"]="\u03B2";
+A["gamma"]="\u03B3";
+A["delta"]="\u03B4";
+A["epsilon"]="\u03B5";
+A["zeta"]="\u03B6";
+A["eta"]="\u03B7";
+A["theta"]="\u03B8";
+A["iota"]="\u03B9";
+A["kappa"]="\u03BA";
+A["lambda"]="\u03BB";
+A["mu"]="\u03BC";
+A["nu"]="\u03BD";
+A["xi"]="\u03BE";
+A["omicron"]="\u03BF";
+A["pi"]="\u03C0";
+A["rho"]="\u03C1";
+A["sigmaf"]="\u03C2";
+A["sigma"]="\u03C3";
+A["tau"]="\u03C4";
+A["upsilon"]="\u03C5";
+A["phi"]="\u03C6";
+A["chi"]="\u03C7";
+A["psi"]="\u03C8";
+A["omega"]="\u03C9";
+A["thetasym"]="\u03D1";
+A["upsih"]="\u03D2";
+A["piv"]="\u03D6";
+A["bull"]="\u2022";
+A["hellip"]="\u2026";
+A["prime"]="\u2032";
+A["Prime"]="\u2033";
+A["oline"]="\u203E";
+A["frasl"]="\u2044";
+A["weierp"]="\u2118";
+A["image"]="\u2111";
+A["real"]="\u211C";
+A["trade"]="\u2122";
+A["alefsym"]="\u2135";
+A["larr"]="\u2190";
+A["uarr"]="\u2191";
+A["rarr"]="\u2192";
+A["darr"]="\u2193";
+A["harr"]="\u2194";
+A["crarr"]="\u21B5";
+A["lArr"]="\u21D0";
+A["uArr"]="\u21D1";
+A["rArr"]="\u21D2";
+A["dArr"]="\u21D3";
+A["hArr"]="\u21D4";
+A["forall"]="\u2200";
+A["part"]="\u2202";
+A["exist"]="\u2203";
+A["empty"]="\u2205";
+A["nabla"]="\u2207";
+A["isin"]="\u2208";
+A["notin"]="\u2209";
+A["ni"]="\u220B";
+A["prod"]="\u220F";
+A["sum"]="\u2211";
+A["minus"]="\u2212";
+A["lowast"]="\u2217";
+A["radic"]="\u221A";
+A["prop"]="\u221D";
+A["infin"]="\u221E";
+A["ang"]="\u2220";
+A["and"]="\u2227";
+A["or"]="\u2228";
+A["cap"]="\u2229";
+A["cup"]="\u222A";
+A["int"]="\u222B";
+A["there4"]="\u2234";
+A["sim"]="\u223C";
+A["cong"]="\u2245";
+A["asymp"]="\u2248";
+A["ne"]="\u2260";
+A["equiv"]="\u2261";
+A["le"]="\u2264";
+A["ge"]="\u2265";
+A["sub"]="\u2282";
+A["sup"]="\u2283";
+A["nsub"]="\u2284";
+A["sube"]="\u2286";
+A["supe"]="\u2287";
+A["oplus"]="\u2295";
+A["otimes"]="\u2297";
+A["perp"]="\u22A5";
+A["sdot"]="\u22C5";
+A["lceil"]="\u2308";
+A["rceil"]="\u2309";
+A["lfloor"]="\u230A";
+A["rfloor"]="\u230B";
+A["lang"]="\u2329";
+A["rang"]="\u232A";
+A["loz"]="\u25CA";
+A["spades"]="\u2660";
+A["clubs"]="\u2663";
+A["hearts"]="\u2665";
+A["diams"]="\u2666";
+})();
+SimileAjax.HTML.deEntify=function(C){var D=SimileAjax.HTML._e2uHash;
+var B=/&(\w+?);/;
+while(B.test(C)){var A=C.match(B);
+C=C.replace(B,D[A[1]]);
+}return C;
+};
+
+
+/* json.js */
+SimileAjax.JSON=new Object();
+(function(){var m={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"};
+var s={array:function(x){var a=["["],b,f,i,l=x.length,v;
+for(i=0;
+i<l;
+i+=1){v=x[i];
+f=s[typeof v];
+if(f){v=f(v);
+if(typeof v=="string"){if(b){a[a.length]=",";
+}a[a.length]=v;
+b=true;
+}}}a[a.length]="]";
+return a.join("");
+},"boolean":function(x){return String(x);
+},"null":function(x){return"null";
+},number:function(x){return isFinite(x)?String(x):"null";
+},object:function(x){if(x){if(x instanceof Array){return s.array(x);
+}var a=["{"],b,f,i,v;
+for(i in x){v=x[i];
+f=s[typeof v];
+if(f){v=f(v);
+if(typeof v=="string"){if(b){a[a.length]=",";
+}a.push(s.string(i),":",v);
+b=true;
+}}}a[a.length]="}";
+return a.join("");
+}return"null";
+},string:function(x){if(/["\\\x00-\x1f]/.test(x)){x=x.replace(/([\x00-\x1f\\"])/g,function(a,b){var c=m[b];
+if(c){return c;
+}c=b.charCodeAt();
+return"\\u00"+Math.floor(c/16).toString(16)+(c%16).toString(16);
+});
+}return'"'+x+'"';
+}};
+SimileAjax.JSON.toJSONString=function(o){if(o instanceof Object){return s.object(o);
+}else{if(o instanceof Array){return s.array(o);
+}else{return o.toString();
+}}};
+SimileAjax.JSON.parseJSON=function(){try{return !(/[^,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]/.test(this.replace(/"(\\.|[^"\\])*"/g,"")))&&eval("("+this+")");
+}catch(e){return false;
+}};
+})();
+
+
+/* string.js */
+String.prototype.trim=function(){return this.replace(/^\s+|\s+$/g,"");
+};
+String.prototype.startsWith=function(A){return this.length>=A.length&&this.substr(0,A.length)==A;
+};
+String.prototype.endsWith=function(A){return this.length>=A.length&&this.substr(this.length-A.length)==A;
+};
+String.substitute=function(B,D){var A="";
+var F=0;
+while(F<B.length-1){var C=B.indexOf("%",F);
+if(C<0||C==B.length-1){break;
+}else{if(C>F&&B.charAt(C-1)=="\\"){A+=B.substring(F,C-1)+"%";
+F=C+1;
+}else{var E=parseInt(B.charAt(C+1));
+if(isNaN(E)||E>=D.length){A+=B.substring(F,C+2);
+}else{A+=B.substring(F,C)+D[E].toString();
+}F=C+2;
+}}}if(F<B.length){A+=B.substring(F);
+}return A;
+};
+
+
+/* units.js */
+SimileAjax.NativeDateUnit=new Object();
+SimileAjax.NativeDateUnit.makeDefaultValue=function(){return new Date();
+};
+SimileAjax.NativeDateUnit.cloneValue=function(A){return new Date(A.getTime());
+};
+SimileAjax.NativeDateUnit.getParser=function(A){if(typeof A=="string"){A=A.toLowerCase();
+}return(A=="iso8601"||A=="iso 8601")?SimileAjax.DateTime.parseIso8601DateTime:SimileAjax.DateTime.parseGregorianDateTime;
+};
+SimileAjax.NativeDateUnit.parseFromObject=function(A){return SimileAjax.DateTime.parseGregorianDateTime(A);
+};
+SimileAjax.NativeDateUnit.toNumber=function(A){return A.getTime();
+};
+SimileAjax.NativeDateUnit.fromNumber=function(A){return new Date(A);
+};
+SimileAjax.NativeDateUnit.compare=function(D,C){var B,A;
+if(typeof D=="object"){B=D.getTime();
+}else{B=Number(D);
+}if(typeof C=="object"){A=C.getTime();
+}else{A=Number(C);
+}return B-A;
+};
+SimileAjax.NativeDateUnit.earlier=function(B,A){return SimileAjax.NativeDateUnit.compare(B,A)<0?B:A;
+};
+SimileAjax.NativeDateUnit.later=function(B,A){return SimileAjax.NativeDateUnit.compare(B,A)>0?B:A;
+};
+SimileAjax.NativeDateUnit.change=function(A,B){return new Date(A.getTime()+B);
+};
+
+
+/* window-manager.js */
+SimileAjax.WindowManager={_initialized:false,_listeners:[],_draggedElement:null,_draggedElementCallback:null,_dropTargetHighlightElement:null,_lastCoords:null,_ghostCoords:null,_draggingMode:"",_dragging:false,_layers:[]};
+SimileAjax.WindowManager.initialize=function(){if(SimileAjax.WindowManager._initialized){return ;
+}SimileAjax.DOM.registerEvent(document.body,"mousedown",SimileAjax.WindowManager._onBodyMouseDown);
+SimileAjax.DOM.registerEvent(document.body,"mousemove",SimileAjax.WindowManager._onBodyMouseMove);
+SimileAjax.DOM.registerEvent(document.body,"mouseup",SimileAjax.WindowManager._onBodyMouseUp);
+SimileAjax.DOM.registerEvent(document,"keydown",SimileAjax.WindowManager._onBodyKeyDown);
+SimileAjax.DOM.registerEvent(document,"keyup",SimileAjax.WindowManager._onBodyKeyUp);
+SimileAjax.WindowManager._layers.push({index:0});
+SimileAjax.WindowManager._historyListener={onBeforeUndoSeveral:function(){},onAfterUndoSeveral:function(){},onBeforeUndo:function(){},onAfterUndo:function(){},onBeforeRedoSeveral:function(){},onAfterRedoSeveral:function(){},onBeforeRedo:function(){},onAfterRedo:function(){}};
+SimileAjax.History.addListener(SimileAjax.WindowManager._historyListener);
+SimileAjax.WindowManager._initialized=true;
+};
+SimileAjax.WindowManager.getBaseLayer=function(){SimileAjax.WindowManager.initialize();
+return SimileAjax.WindowManager._layers[0];
+};
+SimileAjax.WindowManager.getHighestLayer=function(){SimileAjax.WindowManager.initialize();
+return SimileAjax.WindowManager._layers[SimileAjax.WindowManager._layers.length-1];
+};
+SimileAjax.WindowManager.registerEventWithObject=function(D,A,E,B,C){SimileAjax.WindowManager.registerEvent(D,A,function(G,F,H){return E[B].call(E,G,F,H);
+},C);
+};
+SimileAjax.WindowManager.registerEvent=function(D,B,E,C){if(C==null){C=SimileAjax.WindowManager.getHighestLayer();
+}var A=function(G,F,I){if(SimileAjax.WindowManager._canProcessEventAtLayer(C)){SimileAjax.WindowManager._popToLayer(C.index);
+try{E(G,F,I);
+}catch(H){SimileAjax.Debug.exception(H);
+}}SimileAjax.DOM.cancelEvent(F);
+return false;
+};
+SimileAjax.DOM.registerEvent(D,B,A);
+};
+SimileAjax.WindowManager.pushLayer=function(C,D,B){var A={onPop:C,index:SimileAjax.WindowManager._layers.length,ephemeral:(D),elmt:B};
+SimileAjax.WindowManager._layers.push(A);
+return A;
+};
+SimileAjax.WindowManager.popLayer=function(B){for(var A=1;
+A<SimileAjax.WindowManager._layers.length;
+A++){if(SimileAjax.WindowManager._layers[A]==B){SimileAjax.WindowManager._popToLayer(A-1);
+break;
+}}};
+SimileAjax.WindowManager.popAllLayers=function(){SimileAjax.WindowManager._popToLayer(0);
+};
+SimileAjax.WindowManager.registerForDragging=function(B,C,A){SimileAjax.WindowManager.registerEvent(B,"mousedown",function(E,D,F){SimileAjax.WindowManager._handleMouseDown(E,D,C);
+},A);
+};
+SimileAjax.WindowManager._popToLayer=function(C){while(C+1<SimileAjax.WindowManager._layers.length){try{var A=SimileAjax.WindowManager._layers.pop();
+if(A.onPop!=null){A.onPop();
+}}catch(B){}}};
+SimileAjax.WindowManager._canProcessEventAtLayer=function(B){if(B.index==(SimileAjax.WindowManager._layers.length-1)){return true;
+}for(var A=B.index+1;
+A<SimileAjax.WindowManager._layers.length;
+A++){if(!SimileAjax.WindowManager._layers[A].ephemeral){return false;
+}}return true;
+};
+SimileAjax.WindowManager.cancelPopups=function(A){var F=(A)?SimileAjax.DOM.getEventPageCoordinates(A):{x:-1,y:-1};
+var E=SimileAjax.WindowManager._layers.length-1;
+while(E>0&&SimileAjax.WindowManager._layers[E].ephemeral){var D=SimileAjax.WindowManager._layers[E];
+if(D.elmt!=null){var C=D.elmt;
+var B=SimileAjax.DOM.getPageCoordinates(C);
+if(F.x>=B.left&&F.x<(B.left+C.offsetWidth)&&F.y>=B.top&&F.y<(B.top+C.offsetHeight)){break;
+}}E--;
+}SimileAjax.WindowManager._popToLayer(E);
+};
+SimileAjax.WindowManager._onBodyMouseDown=function(B,A,C){if(!("eventPhase" in A)||A.eventPhase==A.BUBBLING_PHASE){SimileAjax.WindowManager.cancelPopups(A);
+}};
+SimileAjax.WindowManager._handleMouseDown=function(B,A,C){SimileAjax.WindowManager._draggedElement=B;
+SimileAjax.WindowManager._draggedElementCallback=C;
+SimileAjax.WindowManager._lastCoords={x:A.clientX,y:A.clientY};
+SimileAjax.DOM.cancelEvent(A);
+return false;
+};
+SimileAjax.WindowManager._onBodyKeyDown=function(C,A,D){if(SimileAjax.WindowManager._dragging){if(A.keyCode==27){SimileAjax.WindowManager._cancelDragging();
+}else{if((A.keyCode==17||A.keyCode==16)&&SimileAjax.WindowManager._draggingMode!="copy"){SimileAjax.WindowManager._draggingMode="copy";
+var B=SimileAjax.Graphics.createTranslucentImage(SimileAjax.urlPrefix+"data/timeline/copy.png");
+B.style.position="absolute";
+B.style.left=(SimileAjax.WindowManager._ghostCoords.left-16)+"px";
+B.style.top=(SimileAjax.WindowManager._ghostCoords.top)+"px";
+document.body.appendChild(B);
+SimileAjax.WindowManager._draggingModeIndicatorElmt=B;
+}}}};
+SimileAjax.WindowManager._onBodyKeyUp=function(B,A,C){if(SimileAjax.WindowManager._dragging){if(A.keyCode==17||A.keyCode==16){SimileAjax.WindowManager._draggingMode="";
+if(SimileAjax.WindowManager._draggingModeIndicatorElmt!=null){document.body.removeChild(SimileAjax.WindowManager._draggingModeIndicatorElmt);
+SimileAjax.WindowManager._draggingModeIndicatorElmt=null;
+}}}};
+SimileAjax.WindowManager._onBodyMouseMove=function(A,N,H){if(SimileAjax.WindowManager._draggedElement!=null){var P=SimileAjax.WindowManager._draggedElementCallback;
+var E=SimileAjax.WindowManager._lastCoords;
+var M=N.clientX-E.x;
+var J=N.clientY-E.y;
+if(!SimileAjax.WindowManager._dragging){if(Math.abs(M)>5||Math.abs(J)>5){try{if("onDragStart" in P){P.onDragStart();
+}if("ghost" in P&&P.ghost){var K=SimileAjax.WindowManager._draggedElement;
+SimileAjax.WindowManager._ghostCoords=SimileAjax.DOM.getPageCoordinates(K);
+SimileAjax.WindowManager._ghostCoords.left+=M;
+SimileAjax.WindowManager._ghostCoords.top+=J;
+var O=K.cloneNode(true);
+O.style.position="absolute";
+O.style.left=SimileAjax.WindowManager._ghostCoords.left+"px";
+O.style.top=SimileAjax.WindowManager._ghostCoords.top+"px";
+O.style.zIndex=1000;
+SimileAjax.Graphics.setOpacity(O,50);
+document.body.appendChild(O);
+P._ghostElmt=O;
+}SimileAjax.WindowManager._dragging=true;
+SimileAjax.WindowManager._lastCoords={x:N.clientX,y:N.clientY};
+document.body.focus();
+}catch(G){SimileAjax.Debug.exception("WindowManager: Error handling mouse down",G);
+SimileAjax.WindowManager._cancelDragging();
+}}}else{try{SimileAjax.WindowManager._lastCoords={x:N.clientX,y:N.clientY};
+if("onDragBy" in P){P.onDragBy(M,J);
+}if("_ghostElmt" in P){var O=P._ghostElmt;
+SimileAjax.WindowManager._ghostCoords.left+=M;
+SimileAjax.WindowManager._ghostCoords.top+=J;
+O.style.left=SimileAjax.WindowManager._ghostCoords.left+"px";
+O.style.top=SimileAjax.WindowManager._ghostCoords.top+"px";
+if(SimileAjax.WindowManager._draggingModeIndicatorElmt!=null){var I=SimileAjax.WindowManager._draggingModeIndicatorElmt;
+I.style.left=(SimileAjax.WindowManager._ghostCoords.left-16)+"px";
+I.style.top=SimileAjax.WindowManager._ghostCoords.top+"px";
+}if("droppable" in P&&P.droppable){var L=SimileAjax.DOM.getEventPageCoordinates(N);
+var H=SimileAjax.DOM.hittest(L.x,L.y,[SimileAjax.WindowManager._ghostElmt,SimileAjax.WindowManager._dropTargetHighlightElement]);
+H=SimileAjax.WindowManager._findDropTarget(H);
+if(H!=SimileAjax.WindowManager._potentialDropTarget){if(SimileAjax.WindowManager._dropTargetHighlightElement!=null){document.body.removeChild(SimileAjax.WindowManager._dropTargetHighlightElement);
+SimileAjax.WindowManager._dropTargetHighlightElement=null;
+SimileAjax.WindowManager._potentialDropTarget=null;
+}var F=false;
+if(H!=null){if((!("canDropOn" in P)||P.canDropOn(H))&&(!("canDrop" in H)||H.canDrop(SimileAjax.WindowManager._draggedElement))){F=true;
+}}if(F){var C=4;
+var D=SimileAjax.DOM.getPageCoordinates(H);
+var B=document.createElement("div");
+B.style.border=C+"px solid yellow";
+B.style.backgroundColor="yellow";
+B.style.position="absolute";
+B.style.left=D.left+"px";
+B.style.top=D.top+"px";
+B.style.width=(H.offsetWidth-C*2)+"px";
+B.style.height=(H.offsetHeight-C*2)+"px";
+SimileAjax.Graphics.setOpacity(B,30);
+document.body.appendChild(B);
+SimileAjax.WindowManager._potentialDropTarget=H;
+SimileAjax.WindowManager._dropTargetHighlightElement=B;
+}}}}}catch(G){SimileAjax.Debug.exception("WindowManager: Error handling mouse move",G);
+SimileAjax.WindowManager._cancelDragging();
+}}SimileAjax.DOM.cancelEvent(N);
+return false;
+}};
+SimileAjax.WindowManager._onBodyMouseUp=function(B,A,C){if(SimileAjax.WindowManager._draggedElement!=null){try{if(SimileAjax.WindowManager._dragging){var E=SimileAjax.WindowManager._draggedElementCallback;
+if("onDragEnd" in E){E.onDragEnd();
+}if("droppable" in E&&E.droppable){var D=false;
+var C=SimileAjax.WindowManager._potentialDropTarget;
+if(C!=null){if((!("canDropOn" in E)||E.canDropOn(C))&&(!("canDrop" in C)||C.canDrop(SimileAjax.WindowManager._draggedElement))){if("onDropOn" in E){E.onDropOn(C);
+}C.ondrop(SimileAjax.WindowManager._draggedElement,SimileAjax.WindowManager._draggingMode);
+D=true;
+}}if(!D){}}}}finally{SimileAjax.WindowManager._cancelDragging();
+}SimileAjax.DOM.cancelEvent(A);
+return false;
+}};
+SimileAjax.WindowManager._cancelDragging=function(){var B=SimileAjax.WindowManager._draggedElementCallback;
+if("_ghostElmt" in B){var A=B._ghostElmt;
+document.body.removeChild(A);
+delete B._ghostElmt;
+}if(SimileAjax.WindowManager._dropTargetHighlightElement!=null){document.body.removeChild(SimileAjax.WindowManager._dropTargetHighlightElement);
+SimileAjax.WindowManager._dropTargetHighlightElement=null;
+}if(SimileAjax.WindowManager._draggingModeIndicatorElmt!=null){document.body.removeChild(SimileAjax.WindowManager._draggingModeIndicatorElmt);
+SimileAjax.WindowManager._draggingModeIndicatorElmt=null;
+}SimileAjax.WindowManager._draggedElement=null;
+SimileAjax.WindowManager._draggedElementCallback=null;
+SimileAjax.WindowManager._potentialDropTarget=null;
+SimileAjax.WindowManager._dropTargetHighlightElement=null;
+SimileAjax.WindowManager._lastCoords=null;
+SimileAjax.WindowManager._ghostCoords=null;
+SimileAjax.WindowManager._draggingMode="";
+SimileAjax.WindowManager._dragging=false;
+};
+SimileAjax.WindowManager._findDropTarget=function(A){while(A!=null){if("ondrop" in A&&(typeof A.ondrop)=="function"){break;
+}A=A.parentNode;
+}return A;
+};
+
+
+/* xmlhttp.js */
+SimileAjax.XmlHttp=new Object();
+SimileAjax.XmlHttp._onReadyStateChange=function(A,D,B){switch(A.readyState){case 4:try{if(A.status==0||A.status==200){if(B){B(A);
+}}else{if(D){D(A.statusText,A.status,A);
+}}}catch(C){SimileAjax.Debug.exception("XmlHttp: Error handling onReadyStateChange",C);
+}break;
+}};
+SimileAjax.XmlHttp._createRequest=function(){if(SimileAjax.Platform.browser.isIE){var A=["Msxml2.XMLHTTP","Microsoft.XMLHTTP","Msxml2.XMLHTTP.4.0"];
+for(var B=0;
+B<A.length;
+B++){try{var C=A[B];
+var D=function(){return new ActiveXObject(C);
+};
+var F=D();
+SimileAjax.XmlHttp._createRequest=D;
+return F;
+}catch(E){}}}try{var D=function(){return new XMLHttpRequest();
+};
+var F=D();
+SimileAjax.XmlHttp._createRequest=D;
+return F;
+}catch(E){throw new Error("Failed to create an XMLHttpRequest object");
+}};
+SimileAjax.XmlHttp.get=function(A,D,C){var B=SimileAjax.XmlHttp._createRequest();
+B.open("GET",A,true);
+B.onreadystatechange=function(){SimileAjax.XmlHttp._onReadyStateChange(B,D,C);
+};
+B.send(null);
+};
+SimileAjax.XmlHttp.post=function(B,A,E,D){var C=SimileAjax.XmlHttp._createRequest();
+C.open("POST",B,true);
+C.onreadystatechange=function(){SimileAjax.XmlHttp._onReadyStateChange(C,E,D);
+};
+C.send(A);
+};
+SimileAjax.XmlHttp._forceXML=function(A){try{A.overrideMimeType("text/xml");
+}catch(B){A.setrequestheader("Content-Type","text/xml");
+}};
+
+
+window.Timeline = new Object();
+Timeline.urlPrefix = baseuri();
+window.Timeline.DateTime = window.SimileAjax.DateTime; // for backward compatibility
+
+/* decorators.js */
+Timeline.SpanHighlightDecorator=function(A){this._unit=("unit" in A)?A.unit:SimileAjax.NativeDateUnit;
+this._startDate=(typeof A.startDate=="string")?this._unit.parseFromObject(A.startDate):A.startDate;
+this._endDate=(typeof A.endDate=="string")?this._unit.parseFromObject(A.endDate):A.endDate;
+this._startLabel=A.startLabel;
+this._endLabel=A.endLabel;
+this._color=A.color;
+this._cssClass=("cssClass" in A)?A.cssClass:null;
+this._opacity=("opacity" in A)?A.opacity:100;
+};
+Timeline.SpanHighlightDecorator.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._layerDiv=null;
+};
+Timeline.SpanHighlightDecorator.prototype.paint=function(){if(this._layerDiv!=null){this._band.removeLayerDiv(this._layerDiv);
+}this._layerDiv=this._band.createLayerDiv(10);
+this._layerDiv.setAttribute("name","span-highlight-decorator");
+this._layerDiv.style.display="none";
+var F=this._band.getMinDate();
+var C=this._band.getMaxDate();
+if(this._unit.compare(this._startDate,C)<0&&this._unit.compare(this._endDate,F)>0){F=this._unit.later(F,this._startDate);
+C=this._unit.earlier(C,this._endDate);
+var D=this._band.dateToPixelOffset(F);
+var K=this._band.dateToPixelOffset(C);
+var I=this._timeline.getDocument();
+var H=function(){var L=I.createElement("table");
+L.insertRow(0).insertCell(0);
+return L;
+};
+var B=I.createElement("div");
+B.className="timeline-highlight-decorator";
+if(this._cssClass){B.className+=" "+this._cssClass;
+}if(this._opacity<100){SimileAjax.Graphics.setOpacity(B,this._opacity);
+}this._layerDiv.appendChild(B);
+var J=H();
+J.className="timeline-highlight-label timeline-highlight-label-start";
+var G=J.rows[0].cells[0];
+G.innerHTML=this._startLabel;
+if(this._cssClass){G.className="label_"+this._cssClass;
+}this._layerDiv.appendChild(J);
+var A=H();
+A.className="timeline-highlight-label timeline-highlight-label-end";
+var E=A.rows[0].cells[0];
+E.innerHTML=this._endLabel;
+if(this._cssClass){E.className="label_"+this._cssClass;
+}this._layerDiv.appendChild(A);
+if(this._timeline.isHorizontal()){B.style.left=D+"px";
+B.style.width=(K-D)+"px";
+J.style.right=(this._band.getTotalViewLength()-D)+"px";
+J.style.width=(this._startLabel.length)+"em";
+A.style.left=K+"px";
+A.style.width=(this._endLabel.length)+"em";
+}else{B.style.top=D+"px";
+B.style.height=(K-D)+"px";
+J.style.bottom=D+"px";
+J.style.height="1.5px";
+A.style.top=K+"px";
+A.style.height="1.5px";
+}}this._layerDiv.style.display="block";
+};
+Timeline.SpanHighlightDecorator.prototype.softPaint=function(){};
+Timeline.PointHighlightDecorator=function(A){this._unit=("unit" in A)?A.unit:SimileAjax.NativeDateUnit;
+this._date=(typeof A.date=="string")?this._unit.parseFromObject(A.date):A.date;
+this._width=("width" in A)?A.width:10;
+this._color=A.color;
+this._cssClass=("cssClass" in A)?A.cssClass:"";
+this._opacity=("opacity" in A)?A.opacity:100;
+};
+Timeline.PointHighlightDecorator.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._layerDiv=null;
+};
+Timeline.PointHighlightDecorator.prototype.paint=function(){if(this._layerDiv!=null){this._band.removeLayerDiv(this._layerDiv);
+}this._layerDiv=this._band.createLayerDiv(10);
+this._layerDiv.setAttribute("name","span-highlight-decorator");
+this._layerDiv.style.display="none";
+var C=this._band.getMinDate();
+var E=this._band.getMaxDate();
+if(this._unit.compare(this._date,E)<0&&this._unit.compare(this._date,C)>0){var B=this._band.dateToPixelOffset(this._date);
+var A=B-Math.round(this._width/2);
+var D=this._timeline.getDocument();
+var F=D.createElement("div");
+F.className="timeline-highlight-point-decorator";
+F.className+=" "+this._cssClass;
+if(this._opacity<100){SimileAjax.Graphics.setOpacity(F,this._opacity);
+}this._layerDiv.appendChild(F);
+if(this._timeline.isHorizontal()){F.style.left=A+"px";
+}else{F.style.top=A+"px";
+}}this._layerDiv.style.display="block";
+};
+Timeline.PointHighlightDecorator.prototype.softPaint=function(){};
+
+
+/* detailed-painter.js */
+Timeline.DetailedEventPainter=function(A){this._params=A;
+this._onSelectListeners=[];
+this._filterMatcher=null;
+this._highlightMatcher=null;
+this._frc=null;
+this._eventIdToElmt={};
+};
+Timeline.DetailedEventPainter.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._backLayer=null;
+this._eventLayer=null;
+this._lineLayer=null;
+this._highlightLayer=null;
+this._eventIdToElmt=null;
+};
+Timeline.DetailedEventPainter.prototype.addOnSelectListener=function(A){this._onSelectListeners.push(A);
+};
+Timeline.DetailedEventPainter.prototype.removeOnSelectListener=function(B){for(var A=0;
+A<this._onSelectListeners.length;
+A++){if(this._onSelectListeners[A]==B){this._onSelectListeners.splice(A,1);
+break;
+}}};
+Timeline.DetailedEventPainter.prototype.getFilterMatcher=function(){return this._filterMatcher;
+};
+Timeline.DetailedEventPainter.prototype.setFilterMatcher=function(A){this._filterMatcher=A;
+};
+Timeline.DetailedEventPainter.prototype.getHighlightMatcher=function(){return this._highlightMatcher;
+};
+Timeline.DetailedEventPainter.prototype.setHighlightMatcher=function(A){this._highlightMatcher=A;
+};
+Timeline.DetailedEventPainter.prototype.paint=function(){var B=this._band.getEventSource();
+if(B==null){return ;
+}this._eventIdToElmt={};
+this._prepareForPainting();
+var I=this._params.theme.event;
+var G=Math.max(I.track.height,this._frc.getLineHeight());
+var F={trackOffset:Math.round(this._band.getViewWidth()/2-G/2),trackHeight:G,trackGap:I.track.gap,trackIncrement:G+I.track.gap,icon:I.instant.icon,iconWidth:I.instant.iconWidth,iconHeight:I.instant.iconHeight,labelWidth:I.label.width};
+var C=this._band.getMinDate();
+var A=this._band.getMaxDate();
+var J=(this._filterMatcher!=null)?this._filterMatcher:function(K){return true;
+};
+var E=(this._highlightMatcher!=null)?this._highlightMatcher:function(K){return -1;
+};
+var D=B.getEventReverseIterator(C,A);
+while(D.hasNext()){var H=D.next();
+if(J(H)){this.paintEvent(H,F,this._params.theme,E(H));
+}}this._highlightLayer.style.display="block";
+this._lineLayer.style.display="block";
+this._eventLayer.style.display="block";
+};
+Timeline.DetailedEventPainter.prototype.softPaint=function(){};
+Timeline.DetailedEventPainter.prototype._prepareForPainting=function(){var B=this._band;
+if(this._backLayer==null){this._backLayer=this._band.createLayerDiv(0,"timeline-band-events");
+this._backLayer.style.visibility="hidden";
+var A=document.createElement("span");
+A.className="timeline-event-label";
+this._backLayer.appendChild(A);
+this._frc=SimileAjax.Graphics.getFontRenderingContext(A);
+}this._frc.update();
+this._lowerTracks=[];
+this._upperTracks=[];
+if(this._highlightLayer!=null){B.removeLayerDiv(this._highlightLayer);
+}this._highlightLayer=B.createLayerDiv(105,"timeline-band-highlights");
+this._highlightLayer.style.display="none";
+if(this._lineLayer!=null){B.removeLayerDiv(this._lineLayer);
+}this._lineLayer=B.createLayerDiv(110,"timeline-band-lines");
+this._lineLayer.style.display="none";
+if(this._eventLayer!=null){B.removeLayerDiv(this._eventLayer);
+}this._eventLayer=B.createLayerDiv(110,"timeline-band-events");
+this._eventLayer.style.display="none";
+};
+Timeline.DetailedEventPainter.prototype.paintEvent=function(B,C,D,A){if(B.isInstant()){this.paintInstantEvent(B,C,D,A);
+}else{this.paintDurationEvent(B,C,D,A);
+}};
+Timeline.DetailedEventPainter.prototype.paintInstantEvent=function(B,C,D,A){if(B.isImprecise()){this.paintImpreciseInstantEvent(B,C,D,A);
+}else{this.paintPreciseInstantEvent(B,C,D,A);
+}};
+Timeline.DetailedEventPainter.prototype.paintDurationEvent=function(B,C,D,A){if(B.isImprecise()){this.paintImpreciseDurationEvent(B,C,D,A);
+}else{this.paintPreciseDurationEvent(B,C,D,A);
+}};
+Timeline.DetailedEventPainter.prototype.paintPreciseInstantEvent=function(K,N,Q,O){var S=this._timeline.getDocument();
+var J=K.getText();
+var E=K.getStart();
+var C=Math.round(this._band.dateToPixelOffset(E));
+var A=Math.round(C+N.iconWidth/2);
+var I=Math.round(C-N.iconWidth/2);
+var G=this._frc.computeSize(J);
+var D=this._findFreeTrackForSolid(A,C);
+var B=this._paintEventIcon(K,D,I,N,Q);
+var T=A+Q.event.label.offsetFromLine;
+var P=D;
+var F=this._getTrackData(D);
+if(Math.min(F.solid,F.text)>=T+G.width){F.solid=I;
+F.text=T;
+}else{F.solid=I;
+T=C+Q.event.label.offsetFromLine;
+P=this._findFreeTrackForText(D,T+G.width,function(U){U.line=C-2;
+});
+this._getTrackData(P).text=I;
+this._paintEventLine(K,C,D,P,N,Q);
+}var R=Math.round(N.trackOffset+P*N.trackIncrement+N.trackHeight/2-G.height/2);
+var M=this._paintEventLabel(K,J,T,R,G.width,G.height,Q);
+var L=this;
+var H=function(U,V,W){return L._onClickInstantEvent(B.elmt,V,K);
+};
+SimileAjax.DOM.registerEvent(B.elmt,"mousedown",H);
+SimileAjax.DOM.registerEvent(M.elmt,"mousedown",H);
+this._createHighlightDiv(O,B,Q);
+this._eventIdToElmt[K.getID()]=B.elmt;
+};
+Timeline.DetailedEventPainter.prototype.paintImpreciseInstantEvent=function(N,Q,V,R){var X=this._timeline.getDocument();
+var M=N.getText();
+var H=N.getStart();
+var S=N.getEnd();
+var E=Math.round(this._band.dateToPixelOffset(H));
+var B=Math.round(this._band.dateToPixelOffset(S));
+var A=Math.round(E+Q.iconWidth/2);
+var L=Math.round(E-Q.iconWidth/2);
+var J=this._frc.computeSize(M);
+var F=this._findFreeTrackForSolid(B,E);
+var G=this._paintEventTape(N,F,E,B,V.event.instant.impreciseColor,V.event.instant.impreciseOpacity,Q,V);
+var C=this._paintEventIcon(N,F,L,Q,V);
+var I=this._getTrackData(F);
+I.solid=L;
+var W=A+V.event.label.offsetFromLine;
+var D=W+J.width;
+var T;
+if(D<B){T=F;
+}else{W=E+V.event.label.offsetFromLine;
+D=W+J.width;
+T=this._findFreeTrackForText(F,D,function(Y){Y.line=E-2;
+});
+this._getTrackData(T).text=L;
+this._paintEventLine(N,E,F,T,Q,V);
+}var U=Math.round(Q.trackOffset+T*Q.trackIncrement+Q.trackHeight/2-J.height/2);
+var P=this._paintEventLabel(N,M,W,U,J.width,J.height,V);
+var O=this;
+var K=function(Y,Z,a){return O._onClickInstantEvent(C.elmt,Z,N);
+};
+SimileAjax.DOM.registerEvent(C.elmt,"mousedown",K);
+SimileAjax.DOM.registerEvent(G.elmt,"mousedown",K);
+SimileAjax.DOM.registerEvent(P.elmt,"mousedown",K);
+this._createHighlightDiv(R,C,V);
+this._eventIdToElmt[N.getID()]=C.elmt;
+};
+Timeline.DetailedEventPainter.prototype.paintPreciseDurationEvent=function(J,M,S,O){var T=this._timeline.getDocument();
+var I=J.getText();
+var D=J.getStart();
+var P=J.getEnd();
+var B=Math.round(this._band.dateToPixelOffset(D));
+var A=Math.round(this._band.dateToPixelOffset(P));
+var F=this._frc.computeSize(I);
+var E=this._findFreeTrackForSolid(A);
+var N=J.getColor();
+N=N!=null?N:S.event.duration.color;
+var C=this._paintEventTape(J,E,B,A,N,100,M,S);
+var H=this._getTrackData(E);
+H.solid=B;
+var U=B+S.event.label.offsetFromLine;
+var Q=this._findFreeTrackForText(E,U+F.width,function(V){V.line=B-2;
+});
+this._getTrackData(Q).text=B-2;
+this._paintEventLine(J,B,E,Q,M,S);
+var R=Math.round(M.trackOffset+Q*M.trackIncrement+M.trackHeight/2-F.height/2);
+var L=this._paintEventLabel(J,I,U,R,F.width,F.height,S);
+var K=this;
+var G=function(V,W,X){return K._onClickDurationEvent(C.elmt,W,J);
+};
+SimileAjax.DOM.registerEvent(C.elmt,"mousedown",G);
+SimileAjax.DOM.registerEvent(L.elmt,"mousedown",G);
+this._createHighlightDiv(O,C,S);
+this._eventIdToElmt[J.getID()]=C.elmt;
+};
+Timeline.DetailedEventPainter.prototype.paintImpreciseDurationEvent=function(L,P,W,S){var Z=this._timeline.getDocument();
+var K=L.getText();
+var D=L.getStart();
+var Q=L.getLatestStart();
+var T=L.getEnd();
+var X=L.getEarliestEnd();
+var B=Math.round(this._band.dateToPixelOffset(D));
+var F=Math.round(this._band.dateToPixelOffset(Q));
+var A=Math.round(this._band.dateToPixelOffset(T));
+var G=Math.round(this._band.dateToPixelOffset(X));
+var H=this._frc.computeSize(K);
+var E=this._findFreeTrackForSolid(A);
+var R=L.getColor();
+R=R!=null?R:W.event.duration.color;
+var O=this._paintEventTape(L,E,B,A,W.event.duration.impreciseColor,W.event.duration.impreciseOpacity,P,W);
+var C=this._paintEventTape(L,E,F,G,R,100,P,W);
+var J=this._getTrackData(E);
+J.solid=B;
+var Y=F+W.event.label.offsetFromLine;
+var U=this._findFreeTrackForText(E,Y+H.width,function(a){a.line=F-2;
+});
+this._getTrackData(U).text=F-2;
+this._paintEventLine(L,F,E,U,P,W);
+var V=Math.round(P.trackOffset+U*P.trackIncrement+P.trackHeight/2-H.height/2);
+var N=this._paintEventLabel(L,K,Y,V,H.width,H.height,W);
+var M=this;
+var I=function(a,b,c){return M._onClickDurationEvent(C.elmt,b,L);
+};
+SimileAjax.DOM.registerEvent(C.elmt,"mousedown",I);
+SimileAjax.DOM.registerEvent(N.elmt,"mousedown",I);
+this._createHighlightDiv(S,C,W);
+this._eventIdToElmt[L.getID()]=C.elmt;
+};
+Timeline.DetailedEventPainter.prototype._findFreeTrackForSolid=function(B,A){for(var D=0;
+true;
+D++){if(D<this._lowerTracks.length){var C=this._lowerTracks[D];
+if(Math.min(C.solid,C.text)>B&&(!(A)||C.line>A)){return D;
+}}else{this._lowerTracks.push({solid:Number.POSITIVE_INFINITY,text:Number.POSITIVE_INFINITY,line:Number.POSITIVE_INFINITY});
+return D;
+}if(D<this._upperTracks.length){var C=this._upperTracks[D];
+if(Math.min(C.solid,C.text)>B&&(!(A)||C.line>A)){return -1-D;
+}}else{this._upperTracks.push({solid:Number.POSITIVE_INFINITY,text:Number.POSITIVE_INFINITY,line:Number.POSITIVE_INFINITY});
+return -1-D;
+}}};
+Timeline.DetailedEventPainter.prototype._findFreeTrackForText=function(D,C,H){var F;
+var G;
+var B;
+var J;
+if(D<0){F=true;
+B=-D;
+G=this._findFreeUpperTrackForText(B,C);
+J=-1-G;
+}else{if(D>0){F=false;
+B=D+1;
+G=this._findFreeLowerTrackForText(B,C);
+J=G;
+}else{var A=this._findFreeUpperTrackForText(0,C);
+var I=this._findFreeLowerTrackForText(1,C);
+if(I-1<=A){F=false;
+B=1;
+G=I;
+J=G;
+}else{F=true;
+B=0;
+G=A;
+J=-1-G;
+}}}if(F){if(G==this._upperTracks.length){this._upperTracks.push({solid:Number.POSITIVE_INFINITY,text:Number.POSITIVE_INFINITY,line:Number.POSITIVE_INFINITY});
+}for(var E=B;
+E<G;
+E++){H(this._upperTracks[E]);
+}}else{if(G==this._lowerTracks.length){this._lowerTracks.push({solid:Number.POSITIVE_INFINITY,text:Number.POSITIVE_INFINITY,line:Number.POSITIVE_INFINITY});
+}for(var E=B;
+E<G;
+E++){H(this._lowerTracks[E]);
+}}return J;
+};
+Timeline.DetailedEventPainter.prototype._findFreeLowerTrackForText=function(A,C){for(;
+A<this._lowerTracks.length;
+A++){var B=this._lowerTracks[A];
+if(Math.min(B.solid,B.text)>=C){break;
+}}return A;
+};
+Timeline.DetailedEventPainter.prototype._findFreeUpperTrackForText=function(A,C){for(;
+A<this._upperTracks.length;
+A++){var B=this._upperTracks[A];
+if(Math.min(B.solid,B.text)>=C){break;
+}}return A;
+};
+Timeline.DetailedEventPainter.prototype._getTrackData=function(A){return(A<0)?this._upperTracks[-A-1]:this._lowerTracks[A];
+};
+Timeline.DetailedEventPainter.prototype._paintEventLine=function(I,C,F,A,G,D){var H=Math.round(G.trackOffset+F*G.trackIncrement+G.trackHeight/2);
+var J=Math.round(Math.abs(A-F)*G.trackIncrement);
+var E="1px solid "+D.event.label.lineColor;
+var B=this._timeline.getDocument().createElement("div");
+B.style.position="absolute";
+B.style.left=C+"px";
+B.style.width=D.event.label.offsetFromLine+"px";
+B.style.height=J+"px";
+if(F>A){B.style.top=(H-J)+"px";
+B.style.borderTop=E;
+}else{B.style.top=H+"px";
+B.style.borderBottom=E;
+}B.style.borderLeft=E;
+this._lineLayer.appendChild(B);
+};
+Timeline.DetailedEventPainter.prototype._paintEventIcon=function(I,E,B,F,D){var H=I.getIcon();
+H=H!=null?H:F.icon;
+var J=F.trackOffset+E*F.trackIncrement+F.trackHeight/2;
+var G=Math.round(J-F.iconHeight/2);
+var C=SimileAjax.Graphics.createTranslucentImage(H);
+var A=this._timeline.getDocument().createElement("div");
+A.style.position="absolute";
+A.style.left=B+"px";
+A.style.top=G+"px";
+A.appendChild(C);
+A.style.cursor="pointer";
+if(I._title!=null){A.title=I._title;
+}this._eventLayer.appendChild(A);
+return{left:B,top:G,width:F.iconWidth,height:F.iconHeight,elmt:A};
+};
+Timeline.DetailedEventPainter.prototype._paintEventLabel=function(H,I,B,F,A,J,D){var G=this._timeline.getDocument();
+var K=G.createElement("div");
+K.style.position="absolute";
+K.style.left=B+"px";
+K.style.width=A+"px";
+K.style.top=F+"px";
+K.style.height=J+"px";
+K.style.backgroundColor=D.event.label.backgroundColor;
+SimileAjax.Graphics.setOpacity(K,D.event.label.backgroundOpacity);
+this._eventLayer.appendChild(K);
+var E=G.createElement("div");
+E.style.position="absolute";
+E.style.left=B+"px";
+E.style.width=A+"px";
+E.style.top=F+"px";
+E.innerHTML=I;
+E.style.cursor="pointer";
+if(H._title!=null){E.title=H._title;
+}var C=H.getTextColor();
+if(C==null){C=H.getColor();
+}if(C!=null){E.style.color=C;
+}this._eventLayer.appendChild(E);
+return{left:B,top:F,width:A,height:J,elmt:E};
+};
+Timeline.DetailedEventPainter.prototype._paintEventTape=function(L,H,E,A,C,G,I,F){var B=A-E;
+var D=F.event.tape.height;
+var M=I.trackOffset+H*I.trackIncrement+I.trackHeight/2;
+var J=Math.round(M-D/2);
+var K=this._timeline.getDocument().createElement("div");
+K.style.position="absolute";
+K.style.left=E+"px";
+K.style.width=B+"px";
+K.style.top=J+"px";
+K.style.height=D+"px";
+K.style.backgroundColor=C;
+K.style.overflow="hidden";
+K.style.cursor="pointer";
+if(L._title!=null){K.title=L._title;
+}SimileAjax.Graphics.setOpacity(K,G);
+this._eventLayer.appendChild(K);
+return{left:E,top:J,width:B,height:D,elmt:K};
+};
+Timeline.DetailedEventPainter.prototype._createHighlightDiv=function(A,C,E){if(A>=0){var D=this._timeline.getDocument();
+var G=E.event;
+var B=G.highlightColors[Math.min(A,G.highlightColors.length-1)];
+var F=D.createElement("div");
+F.style.position="absolute";
+F.style.overflow="hidden";
+F.style.left=(C.left-2)+"px";
+F.style.width=(C.width+4)+"px";
+F.style.top=(C.top-2)+"px";
+F.style.height=(C.height+4)+"px";
+F.style.background=B;
+this._highlightLayer.appendChild(F);
+}};
+Timeline.DetailedEventPainter.prototype._onClickInstantEvent=function(B,C,A){var D=SimileAjax.DOM.getPageCoordinates(B);
+this._showBubble(D.left+Math.ceil(B.offsetWidth/2),D.top+Math.ceil(B.offsetHeight/2),A);
+this._fireOnSelect(A.getID());
+C.cancelBubble=true;
+SimileAjax.DOM.cancelEvent(C);
+return false;
+};
+Timeline.DetailedEventPainter.prototype._onClickDurationEvent=function(D,C,B){if("pageX" in C){var A=C.pageX;
+var F=C.pageY;
+}else{var E=SimileAjax.DOM.getPageCoordinates(D);
+var A=C.offsetX+E.left;
+var F=C.offsetY+E.top;
+}this._showBubble(A,F,B);
+this._fireOnSelect(B.getID());
+C.cancelBubble=true;
+SimileAjax.DOM.cancelEvent(C);
+return false;
+};
+Timeline.DetailedEventPainter.prototype.showBubble=function(A){var B=this._eventIdToElmt[A.getID()];
+if(B){var C=SimileAjax.DOM.getPageCoordinates(B);
+this._showBubble(C.left+B.offsetWidth/2,C.top+B.offsetHeight/2,A);
+}};
+Timeline.DetailedEventPainter.prototype._showBubble=function(A,D,B){var C=document.createElement("div");
+B.fillInfoBubble(C,this._params.theme,this._band.getLabeller());
+SimileAjax.WindowManager.cancelPopups();
+SimileAjax.Graphics.createBubbleForContentAndPoint(C,A,D,this._params.theme.event.bubble.width);
+};
+Timeline.DetailedEventPainter.prototype._fireOnSelect=function(B){for(var A=0;
+A<this._onSelectListeners.length;
+A++){this._onSelectListeners[A](B);
+}};
+
+
+/* ether-painters.js */
+Timeline.GregorianEtherPainter=function(A){this._params=A;
+this._theme=A.theme;
+this._unit=A.unit;
+this._multiple=("multiple" in A)?A.multiple:1;
+};
+Timeline.GregorianEtherPainter.prototype.initialize=function(C,B){this._band=C;
+this._timeline=B;
+this._backgroundLayer=C.createLayerDiv(0);
+this._backgroundLayer.setAttribute("name","ether-background");
+this._backgroundLayer.className="timeline-ether-bg";
+this._markerLayer=null;
+this._lineLayer=null;
+var D=("align" in this._params&&this._params.align!=undefined)?this._params.align:this._theme.ether.interval.marker[B.isHorizontal()?"hAlign":"vAlign"];
+var A=("showLine" in this._params)?this._params.showLine:this._theme.ether.interval.line.show;
+this._intervalMarkerLayout=new Timeline.EtherIntervalMarkerLayout(this._timeline,this._band,this._theme,D,A);
+this._highlight=new Timeline.EtherHighlight(this._timeline,this._band,this._theme,this._backgroundLayer);
+};
+Timeline.GregorianEtherPainter.prototype.setHighlight=function(A,B){this._highlight.position(A,B);
+};
+Timeline.GregorianEtherPainter.prototype.paint=function(){if(this._markerLayer){this._band.removeLayerDiv(this._markerLayer);
+}this._markerLayer=this._band.createLayerDiv(100);
+this._markerLayer.setAttribute("name","ether-markers");
+this._markerLayer.style.display="none";
+if(this._lineLayer){this._band.removeLayerDiv(this._lineLayer);
+}this._lineLayer=this._band.createLayerDiv(1);
+this._lineLayer.setAttribute("name","ether-lines");
+this._lineLayer.style.display="none";
+var C=this._band.getMinDate();
+var F=this._band.getMaxDate();
+var B=this._band.getTimeZone();
+var E=this._band.getLabeller();
+SimileAjax.DateTime.roundDownToInterval(C,this._unit,B,this._multiple,this._theme.firstDayOfWeek);
+var D=this;
+var A=function(G){for(var H=0;
+H<D._multiple;
+H++){SimileAjax.DateTime.incrementByInterval(G,D._unit);
+}};
+while(C.getTime()<F.getTime()){this._intervalMarkerLayout.createIntervalMarker(C,E,this._unit,this._markerLayer,this._lineLayer);
+A(C);
+}this._markerLayer.style.display="block";
+this._lineLayer.style.display="block";
+};
+Timeline.GregorianEtherPainter.prototype.softPaint=function(){};
+Timeline.GregorianEtherPainter.prototype.zoom=function(A){if(A!=0){this._unit+=A;
+}};
+Timeline.HotZoneGregorianEtherPainter=function(G){this._params=G;
+this._theme=G.theme;
+this._zones=[{startTime:Number.NEGATIVE_INFINITY,endTime:Number.POSITIVE_INFINITY,unit:G.unit,multiple:1}];
+for(var E=0;
+E<G.zones.length;
+E++){var B=G.zones[E];
+var D=SimileAjax.DateTime.parseGregorianDateTime(B.start).getTime();
+var F=SimileAjax.DateTime.parseGregorianDateTime(B.end).getTime();
+for(var C=0;
+C<this._zones.length&&F>D;
+C++){var A=this._zones[C];
+if(D<A.endTime){if(D>A.startTime){this._zones.splice(C,0,{startTime:A.startTime,endTime:D,unit:A.unit,multiple:A.multiple});
+C++;
+A.startTime=D;
+}if(F<A.endTime){this._zones.splice(C,0,{startTime:D,endTime:F,unit:B.unit,multiple:(B.multiple)?B.multiple:1});
+C++;
+A.startTime=F;
+D=F;
+}else{A.multiple=B.multiple;
+A.unit=B.unit;
+D=A.endTime;
+}}}}};
+Timeline.HotZoneGregorianEtherPainter.prototype.initialize=function(C,B){this._band=C;
+this._timeline=B;
+this._backgroundLayer=C.createLayerDiv(0);
+this._backgroundLayer.setAttribute("name","ether-background");
+this._backgroundLayer.className="timeline-ether-bg";
+this._markerLayer=null;
+this._lineLayer=null;
+var D=("align" in this._params&&this._params.align!=undefined)?this._params.align:this._theme.ether.interval.marker[B.isHorizontal()?"hAlign":"vAlign"];
+var A=("showLine" in this._params)?this._params.showLine:this._theme.ether.interval.line.show;
+this._intervalMarkerLayout=new Timeline.EtherIntervalMarkerLayout(this._timeline,this._band,this._theme,D,A);
+this._highlight=new Timeline.EtherHighlight(this._timeline,this._band,this._theme,this._backgroundLayer);
+};
+Timeline.HotZoneGregorianEtherPainter.prototype.setHighlight=function(A,B){this._highlight.position(A,B);
+};
+Timeline.HotZoneGregorianEtherPainter.prototype.paint=function(){if(this._markerLayer){this._band.removeLayerDiv(this._markerLayer);
+}this._markerLayer=this._band.createLayerDiv(100);
+this._markerLayer.setAttribute("name","ether-markers");
+this._markerLayer.style.display="none";
+if(this._lineLayer){this._band.removeLayerDiv(this._lineLayer);
+}this._lineLayer=this._band.createLayerDiv(1);
+this._lineLayer.setAttribute("name","ether-lines");
+this._lineLayer.style.display="none";
+var D=this._band.getMinDate();
+var A=this._band.getMaxDate();
+var K=this._band.getTimeZone();
+var I=this._band.getLabeller();
+var B=this;
+var L=function(N,M){for(var O=0;
+O<M.multiple;
+O++){SimileAjax.DateTime.incrementByInterval(N,M.unit);
+}};
+var C=0;
+while(C<this._zones.length){if(D.getTime()<this._zones[C].endTime){break;
+}C++;
+}var E=this._zones.length-1;
+while(E>=0){if(A.getTime()>this._zones[E].startTime){break;
+}E--;
+}for(var H=C;
+H<=E;
+H++){var G=this._zones[H];
+var J=new Date(Math.max(D.getTime(),G.startTime));
+var F=new Date(Math.min(A.getTime(),G.endTime));
+SimileAjax.DateTime.roundDownToInterval(J,G.unit,K,G.multiple,this._theme.firstDayOfWeek);
+SimileAjax.DateTime.roundUpToInterval(F,G.unit,K,G.multiple,this._theme.firstDayOfWeek);
+while(J.getTime()<F.getTime()){this._intervalMarkerLayout.createIntervalMarker(J,I,G.unit,this._markerLayer,this._lineLayer);
+L(J,G);
+}}this._markerLayer.style.display="block";
+this._lineLayer.style.display="block";
+};
+Timeline.HotZoneGregorianEtherPainter.prototype.softPaint=function(){};
+Timeline.HotZoneGregorianEtherPainter.prototype.zoom=function(B){if(B!=0){for(var A=0;
+A<this._zones.length;
+++A){if(this._zones[A]){this._zones[A].unit+=B;
+}}}};
+Timeline.YearCountEtherPainter=function(A){this._params=A;
+this._theme=A.theme;
+this._startDate=SimileAjax.DateTime.parseGregorianDateTime(A.startDate);
+this._multiple=("multiple" in A)?A.multiple:1;
+};
+Timeline.YearCountEtherPainter.prototype.initialize=function(C,B){this._band=C;
+this._timeline=B;
+this._backgroundLayer=C.createLayerDiv(0);
+this._backgroundLayer.setAttribute("name","ether-background");
+this._backgroundLayer.className="timeline-ether-bg";
+this._markerLayer=null;
+this._lineLayer=null;
+var D=("align" in this._params)?this._params.align:this._theme.ether.interval.marker[B.isHorizontal()?"hAlign":"vAlign"];
+var A=("showLine" in this._params)?this._params.showLine:this._theme.ether.interval.line.show;
+this._intervalMarkerLayout=new Timeline.EtherIntervalMarkerLayout(this._timeline,this._band,this._theme,D,A);
+this._highlight=new Timeline.EtherHighlight(this._timeline,this._band,this._theme,this._backgroundLayer);
+};
+Timeline.YearCountEtherPainter.prototype.setHighlight=function(A,B){this._highlight.position(A,B);
+};
+Timeline.YearCountEtherPainter.prototype.paint=function(){if(this._markerLayer){this._band.removeLayerDiv(this._markerLayer);
+}this._markerLayer=this._band.createLayerDiv(100);
+this._markerLayer.setAttribute("name","ether-markers");
+this._markerLayer.style.display="none";
+if(this._lineLayer){this._band.removeLayerDiv(this._lineLayer);
+}this._lineLayer=this._band.createLayerDiv(1);
+this._lineLayer.setAttribute("name","ether-lines");
+this._lineLayer.style.display="none";
+var B=new Date(this._startDate.getTime());
+var F=this._band.getMaxDate();
+var E=this._band.getMinDate().getUTCFullYear()-this._startDate.getUTCFullYear();
+B.setUTCFullYear(this._band.getMinDate().getUTCFullYear()-E%this._multiple);
+var C=this;
+var A=function(G){for(var H=0;
+H<C._multiple;
+H++){SimileAjax.DateTime.incrementByInterval(G,SimileAjax.DateTime.YEAR);
+}};
+var D={labelInterval:function(G,I){var H=G.getUTCFullYear()-C._startDate.getUTCFullYear();
+return{text:H,emphasized:H==0};
+}};
+while(B.getTime()<F.getTime()){this._intervalMarkerLayout.createIntervalMarker(B,D,SimileAjax.DateTime.YEAR,this._markerLayer,this._lineLayer);
+A(B);
+}this._markerLayer.style.display="block";
+this._lineLayer.style.display="block";
+};
+Timeline.YearCountEtherPainter.prototype.softPaint=function(){};
+Timeline.QuarterlyEtherPainter=function(A){this._params=A;
+this._theme=A.theme;
+this._startDate=SimileAjax.DateTime.parseGregorianDateTime(A.startDate);
+};
+Timeline.QuarterlyEtherPainter.prototype.initialize=function(C,B){this._band=C;
+this._timeline=B;
+this._backgroundLayer=C.createLayerDiv(0);
+this._backgroundLayer.setAttribute("name","ether-background");
+this._backgroundLayer.className="timeline-ether-bg";
+this._markerLayer=null;
+this._lineLayer=null;
+var D=("align" in this._params)?this._params.align:this._theme.ether.interval.marker[B.isHorizontal()?"hAlign":"vAlign"];
+var A=("showLine" in this._params)?this._params.showLine:this._theme.ether.interval.line.show;
+this._intervalMarkerLayout=new Timeline.EtherIntervalMarkerLayout(this._timeline,this._band,this._theme,D,A);
+this._highlight=new Timeline.EtherHighlight(this._timeline,this._band,this._theme,this._backgroundLayer);
+};
+Timeline.QuarterlyEtherPainter.prototype.setHighlight=function(A,B){this._highlight.position(A,B);
+};
+Timeline.QuarterlyEtherPainter.prototype.paint=function(){if(this._markerLayer){this._band.removeLayerDiv(this._markerLayer);
+}this._markerLayer=this._band.createLayerDiv(100);
+this._markerLayer.setAttribute("name","ether-markers");
+this._markerLayer.style.display="none";
+if(this._lineLayer){this._band.removeLayerDiv(this._lineLayer);
+}this._lineLayer=this._band.createLayerDiv(1);
+this._lineLayer.setAttribute("name","ether-lines");
+this._lineLayer.style.display="none";
+var B=new Date(0);
+var E=this._band.getMaxDate();
+B.setUTCFullYear(Math.max(this._startDate.getUTCFullYear(),this._band.getMinDate().getUTCFullYear()));
+B.setUTCMonth(this._startDate.getUTCMonth());
+var C=this;
+var A=function(F){F.setUTCMonth(F.getUTCMonth()+3);
+};
+var D={labelInterval:function(F,H){var G=(4+(F.getUTCMonth()-C._startDate.getUTCMonth())/3)%4;
+if(G!=0){return{text:"Q"+(G+1),emphasized:false};
+}else{return{text:"Y"+(F.getUTCFullYear()-C._startDate.getUTCFullYear()+1),emphasized:true};
+}}};
+while(B.getTime()<E.getTime()){this._intervalMarkerLayout.createIntervalMarker(B,D,SimileAjax.DateTime.YEAR,this._markerLayer,this._lineLayer);
+A(B);
+}this._markerLayer.style.display="block";
+this._lineLayer.style.display="block";
+};
+Timeline.QuarterlyEtherPainter.prototype.softPaint=function(){};
+Timeline.EtherIntervalMarkerLayout=function(M,L,C,E,H){var A=M.isHorizontal();
+if(A){if(E=="Top"){this.positionDiv=function(O,N){O.style.left=N+"px";
+O.style.top="0px";
+};
+}else{this.positionDiv=function(O,N){O.style.left=N+"px";
+O.style.bottom="0px";
+};
+}}else{if(E=="Left"){this.positionDiv=function(O,N){O.style.top=N+"px";
+O.style.left="0px";
+};
+}else{this.positionDiv=function(O,N){O.style.top=N+"px";
+O.style.right="0px";
+};
+}}var D=C.ether.interval.marker;
+var I=C.ether.interval.line;
+var B=C.ether.interval.weekend;
+var K=(A?"h":"v")+E;
+var G=D[K+"Styler"];
+var J=D[K+"EmphasizedStyler"];
+var F=SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.DAY];
+this.createIntervalMarker=function(T,a,b,c,Q){var U=Math.round(L.dateToPixelOffset(T));
+if(H&&b!=SimileAjax.DateTime.WEEK){var V=M.getDocument().createElement("div");
+V.className="timeline-ether-lines";
+if(I.opacity<100){SimileAjax.Graphics.setOpacity(V,I.opacity);
+}if(A){V.style.left=U+"px";
+}else{V.style.top=U+"px";
+}Q.appendChild(V);
+}if(b==SimileAjax.DateTime.WEEK){var N=C.firstDayOfWeek;
+var W=new Date(T.getTime()+(6-N-7)*F);
+var Z=new Date(W.getTime()+2*F);
+var X=Math.round(L.dateToPixelOffset(W));
+var S=Math.round(L.dateToPixelOffset(Z));
+var R=Math.max(1,S-X);
+var P=M.getDocument().createElement("div");
+P.className="timeline-ether-weekends";
+if(B.opacity<100){SimileAjax.Graphics.setOpacity(P,B.opacity);
+}if(A){P.style.left=X+"px";
+P.style.width=R+"px";
+}else{P.style.top=X+"px";
+P.style.height=R+"px";
+}Q.appendChild(P);
+}var Y=a.labelInterval(T,b);
+var O=M.getDocument().createElement("div");
+O.innerHTML=Y.text;
+O.className="timeline-date-label";
+if(Y.emphasized){O.className+=" timeline-date-label-em";
+}this.positionDiv(O,U);
+c.appendChild(O);
+return O;
+};
+};
+Timeline.EtherHighlight=function(C,E,D,B){var A=C.isHorizontal();
+this._highlightDiv=null;
+this._createHighlightDiv=function(){if(this._highlightDiv==null){this._highlightDiv=C.getDocument().createElement("div");
+this._highlightDiv.setAttribute("name","ether-highlight");
+this._highlightDiv.className="timeline-ether-highlight";
+var F=D.ether.highlightOpacity;
+if(F<100){SimileAjax.Graphics.setOpacity(this._highlightDiv,F);
+}B.appendChild(this._highlightDiv);
+}};
+this.position=function(F,I){this._createHighlightDiv();
+var J=Math.round(E.dateToPixelOffset(F));
+var H=Math.round(E.dateToPixelOffset(I));
+var G=Math.max(H-J,3);
+if(A){this._highlightDiv.style.left=J+"px";
+this._highlightDiv.style.width=G+"px";
+this._highlightDiv.style.height=(E.getViewWidth()-4)+"px";
+}else{this._highlightDiv.style.top=J+"px";
+this._highlightDiv.style.height=G+"px";
+this._highlightDiv.style.width=(E.getViewWidth()-4)+"px";
+}};
+};
+
+
+/* ethers.js */
+Timeline.LinearEther=function(A){this._params=A;
+this._interval=A.interval;
+this._pixelsPerInterval=A.pixelsPerInterval;
+};
+Timeline.LinearEther.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._unit=A.getUnit();
+if("startsOn" in this._params){this._start=this._unit.parseFromObject(this._params.startsOn);
+}else{if("endsOn" in this._params){this._start=this._unit.parseFromObject(this._params.endsOn);
+this.shiftPixels(-this._timeline.getPixelLength());
+}else{if("centersOn" in this._params){this._start=this._unit.parseFromObject(this._params.centersOn);
+this.shiftPixels(-this._timeline.getPixelLength()/2);
+}else{this._start=this._unit.makeDefaultValue();
+this.shiftPixels(-this._timeline.getPixelLength()/2);
+}}}};
+Timeline.LinearEther.prototype.setDate=function(A){this._start=this._unit.cloneValue(A);
+};
+Timeline.LinearEther.prototype.shiftPixels=function(B){var A=this._interval*B/this._pixelsPerInterval;
+this._start=this._unit.change(this._start,A);
+};
+Timeline.LinearEther.prototype.dateToPixelOffset=function(A){var B=this._unit.compare(A,this._start);
+return this._pixelsPerInterval*B/this._interval;
+};
+Timeline.LinearEther.prototype.pixelOffsetToDate=function(B){var A=B*this._interval/this._pixelsPerInterval;
+return this._unit.change(this._start,A);
+};
+Timeline.LinearEther.prototype.zoom=function(D){var B=0;
+var A=this._band._zoomIndex;
+var C=A;
+if(D&&(A>0)){C=A-1;
+}if(!D&&(A<(this._band._zoomSteps.length-1))){C=A+1;
+}this._band._zoomIndex=C;
+this._interval=SimileAjax.DateTime.gregorianUnitLengths[this._band._zoomSteps[C].unit];
+this._pixelsPerInterval=this._band._zoomSteps[C].pixelsPerInterval;
+B=this._band._zoomSteps[C].unit-this._band._zoomSteps[A].unit;
+return B;
+};
+Timeline.HotZoneEther=function(A){this._params=A;
+this._interval=A.interval;
+this._pixelsPerInterval=A.pixelsPerInterval;
+this._theme=A.theme;
+};
+Timeline.HotZoneEther.prototype.initialize=function(H,I){this._band=H;
+this._timeline=I;
+this._unit=I.getUnit();
+this._zones=[{startTime:Number.NEGATIVE_INFINITY,endTime:Number.POSITIVE_INFINITY,magnify:1}];
+var B=this._params;
+for(var D=0;
+D<B.zones.length;
+D++){var G=B.zones[D];
+var E=this._unit.parseFromObject(G.start);
+var F=this._unit.parseFromObject(G.end);
+for(var C=0;
+C<this._zones.length&&this._unit.compare(F,E)>0;
+C++){var A=this._zones[C];
+if(this._unit.compare(E,A.endTime)<0){if(this._unit.compare(E,A.startTime)>0){this._zones.splice(C,0,{startTime:A.startTime,endTime:E,magnify:A.magnify});
+C++;
+A.startTime=E;
+}if(this._unit.compare(F,A.endTime)<0){this._zones.splice(C,0,{startTime:E,endTime:F,magnify:G.magnify*A.magnify});
+C++;
+A.startTime=F;
+E=F;
+}else{A.magnify*=G.magnify;
+E=A.endTime;
+}}}}if("startsOn" in this._params){this._start=this._unit.parseFromObject(this._params.startsOn);
+}else{if("endsOn" in this._params){this._start=this._unit.parseFromObject(this._params.endsOn);
+this.shiftPixels(-this._timeline.getPixelLength());
+}else{if("centersOn" in this._params){this._start=this._unit.parseFromObject(this._params.centersOn);
+this.shiftPixels(-this._timeline.getPixelLength()/2);
+}else{this._start=this._unit.makeDefaultValue();
+this.shiftPixels(-this._timeline.getPixelLength()/2);
+}}}};
+Timeline.HotZoneEther.prototype.setDate=function(A){this._start=this._unit.cloneValue(A);
+};
+Timeline.HotZoneEther.prototype.shiftPixels=function(A){this._start=this.pixelOffsetToDate(A);
+};
+Timeline.HotZoneEther.prototype.dateToPixelOffset=function(A){return this._dateDiffToPixelOffset(this._start,A);
+};
+Timeline.HotZoneEther.prototype.pixelOffsetToDate=function(A){return this._pixelOffsetToDate(A,this._start);
+};
+Timeline.HotZoneEther.prototype.zoom=function(D){var B=0;
+var A=this._band._zoomIndex;
+var C=A;
+if(D&&(A>0)){C=A-1;
+}if(!D&&(A<(this._band._zoomSteps.length-1))){C=A+1;
+}this._band._zoomIndex=C;
+this._interval=SimileAjax.DateTime.gregorianUnitLengths[this._band._zoomSteps[C].unit];
+this._pixelsPerInterval=this._band._zoomSteps[C].pixelsPerInterval;
+B=this._band._zoomSteps[C].unit-this._band._zoomSteps[A].unit;
+return B;
+};
+Timeline.HotZoneEther.prototype._dateDiffToPixelOffset=function(I,D){var B=this._getScale();
+var H=I;
+var C=D;
+var A=0;
+if(this._unit.compare(H,C)<0){var G=0;
+while(G<this._zones.length){if(this._unit.compare(H,this._zones[G].endTime)<0){break;
+}G++;
+}while(this._unit.compare(H,C)<0){var E=this._zones[G];
+var F=this._unit.earlier(C,E.endTime);
+A+=(this._unit.compare(F,H)/(B/E.magnify));
+H=F;
+G++;
+}}else{var G=this._zones.length-1;
+while(G>=0){if(this._unit.compare(H,this._zones[G].startTime)>0){break;
+}G--;
+}while(this._unit.compare(H,C)>0){var E=this._zones[G];
+var F=this._unit.later(C,E.startTime);
+A+=(this._unit.compare(F,H)/(B/E.magnify));
+H=F;
+G--;
+}}return A;
+};
+Timeline.HotZoneEther.prototype._pixelOffsetToDate=function(H,C){var G=this._getScale();
+var E=C;
+if(H>0){var F=0;
+while(F<this._zones.length){if(this._unit.compare(E,this._zones[F].endTime)<0){break;
+}F++;
+}while(H>0){var A=this._zones[F];
+var D=G/A.magnify;
+if(A.endTime==Number.POSITIVE_INFINITY){E=this._unit.change(E,H*D);
+H=0;
+}else{var B=this._unit.compare(A.endTime,E)/D;
+if(B>H){E=this._unit.change(E,H*D);
+H=0;
+}else{E=A.endTime;
+H-=B;
+}}F++;
+}}else{var F=this._zones.length-1;
+while(F>=0){if(this._unit.compare(E,this._zones[F].startTime)>0){break;
+}F--;
+}H=-H;
+while(H>0){var A=this._zones[F];
+var D=G/A.magnify;
+if(A.startTime==Number.NEGATIVE_INFINITY){E=this._unit.change(E,-H*D);
+H=0;
+}else{var B=this._unit.compare(E,A.startTime)/D;
+if(B>H){E=this._unit.change(E,-H*D);
+H=0;
+}else{E=A.startTime;
+H-=B;
+}}F--;
+}}return E;
+};
+Timeline.HotZoneEther.prototype._getScale=function(){return this._interval/this._pixelsPerInterval;
+};
+
+
+/* labellers.js */
+Timeline.GregorianDateLabeller=function(A,B){this._locale=A;
+this._timeZone=B;
+};
+Timeline.GregorianDateLabeller.monthNames=[];
+Timeline.GregorianDateLabeller.dayNames=[];
+Timeline.GregorianDateLabeller.labelIntervalFunctions=[];
+Timeline.GregorianDateLabeller.getMonthName=function(B,A){return Timeline.GregorianDateLabeller.monthNames[A][B];
+};
+Timeline.GregorianDateLabeller.prototype.labelInterval=function(A,C){var B=Timeline.GregorianDateLabeller.labelIntervalFunctions[this._locale];
+if(B==null){B=Timeline.GregorianDateLabeller.prototype.defaultLabelInterval;
+}return B.call(this,A,C);
+};
+Timeline.GregorianDateLabeller.prototype.labelPrecise=function(A){return SimileAjax.DateTime.removeTimeZoneOffset(A,this._timeZone).toUTCString();
+};
+Timeline.GregorianDateLabeller.prototype.defaultLabelInterval=function(B,F){var C;
+var E=false;
+B=SimileAjax.DateTime.removeTimeZoneOffset(B,this._timeZone);
+switch(F){case SimileAjax.DateTime.MILLISECOND:C=B.getUTCMilliseconds();
+break;
+case SimileAjax.DateTime.SECOND:C=B.getUTCSeconds();
+break;
+case SimileAjax.DateTime.MINUTE:var A=B.getUTCMinutes();
+if(A==0){C=B.getUTCHours()+":00";
+E=true;
+}else{C=A;
+}break;
+case SimileAjax.DateTime.HOUR:C=B.getUTCHours()+"hr";
+break;
+case SimileAjax.DateTime.DAY:C=Timeline.GregorianDateLabeller.getMonthName(B.getUTCMonth(),this._locale)+" "+B.getUTCDate();
+break;
+case SimileAjax.DateTime.WEEK:C=Timeline.GregorianDateLabeller.getMonthName(B.getUTCMonth(),this._locale)+" "+B.getUTCDate();
+break;
+case SimileAjax.DateTime.MONTH:var A=B.getUTCMonth();
+if(A!=0){C=Timeline.GregorianDateLabeller.getMonthName(A,this._locale);
+break;
+}case SimileAjax.DateTime.YEAR:case SimileAjax.DateTime.DECADE:case SimileAjax.DateTime.CENTURY:case SimileAjax.DateTime.MILLENNIUM:var D=B.getUTCFullYear();
+if(D>0){C=B.getUTCFullYear();
+}else{C=(1-D)+"BC";
+}E=(F==SimileAjax.DateTime.MONTH)||(F==SimileAjax.DateTime.DECADE&&D%100==0)||(F==SimileAjax.DateTime.CENTURY&&D%1000==0);
+break;
+default:C=B.toUTCString();
+}return{text:C,emphasized:E};
+};
+
+
+/* original-painter.js */
+Timeline.OriginalEventPainter=function(A){this._params=A;
+this._onSelectListeners=[];
+this._filterMatcher=null;
+this._highlightMatcher=null;
+this._frc=null;
+this._eventIdToElmt={};
+};
+Timeline.OriginalEventPainter.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._backLayer=null;
+this._eventLayer=null;
+this._lineLayer=null;
+this._highlightLayer=null;
+this._eventIdToElmt=null;
+};
+Timeline.OriginalEventPainter.prototype.addOnSelectListener=function(A){this._onSelectListeners.push(A);
+};
+Timeline.OriginalEventPainter.prototype.removeOnSelectListener=function(B){for(var A=0;
+A<this._onSelectListeners.length;
+A++){if(this._onSelectListeners[A]==B){this._onSelectListeners.splice(A,1);
+break;
+}}};
+Timeline.OriginalEventPainter.prototype.getFilterMatcher=function(){return this._filterMatcher;
+};
+Timeline.OriginalEventPainter.prototype.setFilterMatcher=function(A){this._filterMatcher=A;
+};
+Timeline.OriginalEventPainter.prototype.getHighlightMatcher=function(){return this._highlightMatcher;
+};
+Timeline.OriginalEventPainter.prototype.setHighlightMatcher=function(A){this._highlightMatcher=A;
+};
+Timeline.OriginalEventPainter.prototype.paint=function(){var B=this._band.getEventSource();
+if(B==null){return ;
+}this._eventIdToElmt={};
+this._prepareForPainting();
+var I=this._params.theme.event;
+var G=Math.max(I.track.height,I.tape.height+this._frc.getLineHeight());
+var F={trackOffset:I.track.gap,trackHeight:G,trackGap:I.track.gap,trackIncrement:G+I.track.gap,icon:I.instant.icon,iconWidth:I.instant.iconWidth,iconHeight:I.instant.iconHeight,labelWidth:I.label.width};
+var C=this._band.getMinDate();
+var A=this._band.getMaxDate();
+var J=(this._filterMatcher!=null)?this._filterMatcher:function(K){return true;
+};
+var E=(this._highlightMatcher!=null)?this._highlightMatcher:function(K){return -1;
+};
+var D=B.getEventReverseIterator(C,A);
+while(D.hasNext()){var H=D.next();
+if(J(H)){this.paintEvent(H,F,this._params.theme,E(H));
+}}this._highlightLayer.style.display="block";
+this._lineLayer.style.display="block";
+this._eventLayer.style.display="block";
+};
+Timeline.OriginalEventPainter.prototype.softPaint=function(){};
+Timeline.OriginalEventPainter.prototype._prepareForPainting=function(){var B=this._band;
+if(this._backLayer==null){this._backLayer=this._band.createLayerDiv(0,"timeline-band-events");
+this._backLayer.style.visibility="hidden";
+var A=document.createElement("span");
+A.className="timeline-event-label";
+this._backLayer.appendChild(A);
+this._frc=SimileAjax.Graphics.getFontRenderingContext(A);
+}this._frc.update();
+this._tracks=[];
+if(this._highlightLayer!=null){B.removeLayerDiv(this._highlightLayer);
+}this._highlightLayer=B.createLayerDiv(105,"timeline-band-highlights");
+this._highlightLayer.style.display="none";
+if(this._lineLayer!=null){B.removeLayerDiv(this._lineLayer);
+}this._lineLayer=B.createLayerDiv(110,"timeline-band-lines");
+this._lineLayer.style.display="none";
+if(this._eventLayer!=null){B.removeLayerDiv(this._eventLayer);
+}this._eventLayer=B.createLayerDiv(115,"timeline-band-events");
+this._eventLayer.style.display="none";
+};
+Timeline.OriginalEventPainter.prototype.paintEvent=function(B,C,D,A){if(B.isInstant()){this.paintInstantEvent(B,C,D,A);
+}else{this.paintDurationEvent(B,C,D,A);
+}};
+Timeline.OriginalEventPainter.prototype.paintInstantEvent=function(B,C,D,A){if(B.isImprecise()){this.paintImpreciseInstantEvent(B,C,D,A);
+}else{this.paintPreciseInstantEvent(B,C,D,A);
+}};
+Timeline.OriginalEventPainter.prototype.paintDurationEvent=function(B,C,D,A){if(B.isImprecise()){this.paintImpreciseDurationEvent(B,C,D,A);
+}else{this.paintPreciseDurationEvent(B,C,D,A);
+}};
+Timeline.OriginalEventPainter.prototype.paintPreciseInstantEvent=function(J,N,P,O){var S=this._timeline.getDocument();
+var I=J.getText();
+var E=J.getStart();
+var C=Math.round(this._band.dateToPixelOffset(E));
+var A=Math.round(C+N.iconWidth/2);
+var H=Math.round(C-N.iconWidth/2);
+var F=this._frc.computeSize(I);
+var T=A+P.event.label.offsetFromLine;
+var D=T+F.width;
+var R=D;
+var L=this._findFreeTrack(R);
+var Q=Math.round(N.trackOffset+L*N.trackIncrement+N.trackHeight/2-F.height/2);
+var B=this._paintEventIcon(J,L,H,N,P);
+var M=this._paintEventLabel(J,I,T,Q,F.width,F.height,P);
+var K=this;
+var G=function(U,V,W){return K._onClickInstantEvent(B.elmt,V,J);
+};
+SimileAjax.DOM.registerEvent(B.elmt,"mousedown",G);
+SimileAjax.DOM.registerEvent(M.elmt,"mousedown",G);
+this._createHighlightDiv(O,B,P);
+this._eventIdToElmt[J.getID()]=B.elmt;
+this._tracks[L]=H;
+};
+Timeline.OriginalEventPainter.prototype.paintImpreciseInstantEvent=function(L,P,U,R){var W=this._timeline.getDocument();
+var K=L.getText();
+var G=L.getStart();
+var S=L.getEnd();
+var D=Math.round(this._band.dateToPixelOffset(G));
+var B=Math.round(this._band.dateToPixelOffset(S));
+var A=Math.round(D+P.iconWidth/2);
+var J=Math.round(D-P.iconWidth/2);
+var H=this._frc.computeSize(K);
+var X=A+U.event.label.offsetFromLine;
+var E=X+H.width;
+var V=Math.max(E,B);
+var N=this._findFreeTrack(V);
+var T=Math.round(P.trackOffset+N*P.trackIncrement+P.trackHeight/2-H.height/2);
+var C=this._paintEventIcon(L,N,J,P,U);
+var O=this._paintEventLabel(L,K,X,T,H.width,H.height,U);
+var Q=L.getColor();
+Q=Q!=null?Q:U.event.instant.impreciseColor;
+var F=this._paintEventTape(L,N,D,B,Q,U.event.instant.impreciseOpacity,P,U);
+var M=this;
+var I=function(Y,Z,a){return M._onClickInstantEvent(C.elmt,Z,L);
+};
+SimileAjax.DOM.registerEvent(C.elmt,"mousedown",I);
+SimileAjax.DOM.registerEvent(F.elmt,"mousedown",I);
+SimileAjax.DOM.registerEvent(O.elmt,"mousedown",I);
+this._createHighlightDiv(R,C,U);
+this._eventIdToElmt[L.getID()]=C.elmt;
+this._tracks[N]=J;
+};
+Timeline.OriginalEventPainter.prototype.paintPreciseDurationEvent=function(I,M,Q,O){var T=this._timeline.getDocument();
+var H=I.getText();
+var E=I.getStart();
+var P=I.getEnd();
+var B=Math.round(this._band.dateToPixelOffset(E));
+var A=Math.round(this._band.dateToPixelOffset(P));
+var F=this._frc.computeSize(H);
+var U=B;
+var C=U+F.width;
+var S=Math.max(C,A);
+var K=this._findFreeTrack(S);
+var R=Math.round(M.trackOffset+K*M.trackIncrement+Q.event.tape.height);
+var N=I.getColor();
+N=N!=null?N:Q.event.duration.color;
+var D=this._paintEventTape(I,K,B,A,N,100,M,Q);
+var L=this._paintEventLabel(I,H,U,R,F.width,F.height,Q);
+var J=this;
+var G=function(V,W,X){return J._onClickDurationEvent(D.elmt,W,I);
+};
+SimileAjax.DOM.registerEvent(D.elmt,"mousedown",G);
+SimileAjax.DOM.registerEvent(L.elmt,"mousedown",G);
+this._createHighlightDiv(O,D,Q);
+this._eventIdToElmt[I.getID()]=D.elmt;
+this._tracks[K]=B;
+};
+Timeline.OriginalEventPainter.prototype.paintImpreciseDurationEvent=function(K,P,V,S){var Y=this._timeline.getDocument();
+var J=K.getText();
+var E=K.getStart();
+var Q=K.getLatestStart();
+var T=K.getEnd();
+var X=K.getEarliestEnd();
+var B=Math.round(this._band.dateToPixelOffset(E));
+var F=Math.round(this._band.dateToPixelOffset(Q));
+var A=Math.round(this._band.dateToPixelOffset(T));
+var G=Math.round(this._band.dateToPixelOffset(X));
+var H=this._frc.computeSize(J);
+var Z=F;
+var C=Z+H.width;
+var W=Math.max(C,A);
+var M=this._findFreeTrack(W);
+var U=Math.round(P.trackOffset+M*P.trackIncrement+V.event.tape.height);
+var R=K.getColor();
+R=R!=null?R:V.event.duration.color;
+var O=this._paintEventTape(K,M,B,A,V.event.duration.impreciseColor,V.event.duration.impreciseOpacity,P,V);
+var D=this._paintEventTape(K,M,F,G,R,100,P,V);
+var N=this._paintEventLabel(K,J,Z,U,H.width,H.height,V);
+var L=this;
+var I=function(a,b,c){return L._onClickDurationEvent(D.elmt,b,K);
+};
+SimileAjax.DOM.registerEvent(D.elmt,"mousedown",I);
+SimileAjax.DOM.registerEvent(N.elmt,"mousedown",I);
+this._createHighlightDiv(S,D,V);
+this._eventIdToElmt[K.getID()]=D.elmt;
+this._tracks[M]=B;
+};
+Timeline.OriginalEventPainter.prototype._findFreeTrack=function(A){for(var C=0;
+C<this._tracks.length;
+C++){var B=this._tracks[C];
+if(B>A){break;
+}}return C;
+};
+Timeline.OriginalEventPainter.prototype._paintEventIcon=function(I,E,B,F,D){var H=I.getIcon();
+H=H!=null?H:F.icon;
+var J=F.trackOffset+E*F.trackIncrement+F.trackHeight/2;
+var G=Math.round(J-F.iconHeight/2);
+var C=SimileAjax.Graphics.createTranslucentImage(H);
+var A=this._timeline.getDocument().createElement("div");
+A.className="timeline-event-icon";
+A.style.left=B+"px";
+A.style.top=G+"px";
+A.appendChild(C);
+if(I._title!=null){A.title=I._title;
+}this._eventLayer.appendChild(A);
+return{left:B,top:G,width:F.iconWidth,height:F.iconHeight,elmt:A};
+};
+Timeline.OriginalEventPainter.prototype._paintEventLabel=function(I,J,B,G,A,K,E){var H=this._timeline.getDocument();
+var F=H.createElement("div");
+F.className="timeline-event-label";
+F.style.left=B+"px";
+F.style.width=A+"px";
+F.style.top=G+"px";
+F.innerHTML=J;
+if(I._title!=null){F.title=I._title;
+}var D=I.getTextColor();
+if(D==null){D=I.getColor();
+}if(D!=null){F.style.color=D;
+}var C=I.getClassName();
+if(C!=null){F.className+=" "+C;
+}this._eventLayer.appendChild(F);
+return{left:B,top:G,width:A,height:K,elmt:F};
+};
+Timeline.OriginalEventPainter.prototype._paintEventTape=function(O,J,G,A,D,I,K,H){var C=A-G;
+var F=H.event.tape.height;
+var L=K.trackOffset+J*K.trackIncrement;
+var N=this._timeline.getDocument().createElement("div");
+N.className="timeline-event-tape";
+N.style.left=G+"px";
+N.style.width=C+"px";
+N.style.height=F+"px";
+N.style.top=L+"px";
+if(O._title!=null){N.title=O._title;
+}if(D!=null){N.style.backgroundColor=D;
+}var M=O.getTapeImage();
+var E=O.getTapeRepeat();
+E=E!=null?E:"repeat";
+if(M!=null){N.style.backgroundImage="url("+M+")";
+N.style.backgroundRepeat=E;
+}SimileAjax.Graphics.setOpacity(N,I);
+var B=O.getClassName();
+if(B!=null){N.className+=" "+B;
+}this._eventLayer.appendChild(N);
+return{left:G,top:L,width:C,height:F,elmt:N};
+};
+Timeline.OriginalEventPainter.prototype._createHighlightDiv=function(A,C,E){if(A>=0){var D=this._timeline.getDocument();
+var G=E.event;
+var B=G.highlightColors[Math.min(A,G.highlightColors.length-1)];
+var F=D.createElement("div");
+F.style.position="absolute";
+F.style.overflow="hidden";
+F.style.left=(C.left-2)+"px";
+F.style.width=(C.width+4)+"px";
+F.style.top=(C.top-2)+"px";
+F.style.height=(C.height+4)+"px";
+this._highlightLayer.appendChild(F);
+}};
+Timeline.OriginalEventPainter.prototype._onClickInstantEvent=function(B,C,A){var D=SimileAjax.DOM.getPageCoordinates(B);
+this._showBubble(D.left+Math.ceil(B.offsetWidth/2),D.top+Math.ceil(B.offsetHeight/2),A);
+this._fireOnSelect(A.getID());
+C.cancelBubble=true;
+SimileAjax.DOM.cancelEvent(C);
+return false;
+};
+Timeline.OriginalEventPainter.prototype._onClickDurationEvent=function(D,C,B){if("pageX" in C){var A=C.pageX;
+var F=C.pageY;
+}else{var E=SimileAjax.DOM.getPageCoordinates(D);
+var A=C.offsetX+E.left;
+var F=C.offsetY+E.top;
+}this._showBubble(A,F,B);
+this._fireOnSelect(B.getID());
+C.cancelBubble=true;
+SimileAjax.DOM.cancelEvent(C);
+return false;
+};
+Timeline.OriginalEventPainter.prototype.showBubble=function(A){var B=this._eventIdToElmt[A.getID()];
+if(B){var C=SimileAjax.DOM.getPageCoordinates(B);
+this._showBubble(C.left+B.offsetWidth/2,C.top+B.offsetHeight/2,A);
+}};
+Timeline.OriginalEventPainter.prototype._showBubble=function(A,D,B){var C=document.createElement("div");
+B.fillInfoBubble(C,this._params.theme,this._band.getLabeller());
+SimileAjax.WindowManager.cancelPopups();
+SimileAjax.Graphics.createBubbleForContentAndPoint(C,A,D,this._params.theme.event.bubble.width);
+};
+Timeline.OriginalEventPainter.prototype._fireOnSelect=function(B){for(var A=0;
+A<this._onSelectListeners.length;
+A++){this._onSelectListeners[A](B);
+}};
+
+
+/* overview-painter.js */
+Timeline.OverviewEventPainter=function(A){this._params=A;
+this._onSelectListeners=[];
+this._filterMatcher=null;
+this._highlightMatcher=null;
+};
+Timeline.OverviewEventPainter.prototype.initialize=function(B,A){this._band=B;
+this._timeline=A;
+this._eventLayer=null;
+this._highlightLayer=null;
+};
+Timeline.OverviewEventPainter.prototype.addOnSelectListener=function(A){this._onSelectListeners.push(A);
+};
+Timeline.OverviewEventPainter.prototype.removeOnSelectListener=function(B){for(var A=0;
+A<this._onSelectListeners.length;
+A++){if(this._onSelectListeners[A]==B){this._onSelectListeners.splice(A,1);
+break;
+}}};
+Timeline.OverviewEventPainter.prototype.getFilterMatcher=function(){return this._filterMatcher;
+};
+Timeline.OverviewEventPainter.prototype.setFilterMatcher=function(A){this._filterMatcher=A;
+};
+Timeline.OverviewEventPainter.prototype.getHighlightMatcher=function(){return this._highlightMatcher;
+};
+Timeline.OverviewEventPainter.prototype.setHighlightMatcher=function(A){this._highlightMatcher=A;
+};
+Timeline.OverviewEventPainter.prototype.paint=function(){var B=this._band.getEventSource();
+if(B==null){return ;
+}this._prepareForPainting();
+var H=this._params.theme.event;
+var F={trackOffset:H.overviewTrack.offset,trackHeight:H.overviewTrack.height,trackGap:H.overviewTrack.gap,trackIncrement:H.overviewTrack.height+H.overviewTrack.gap};
+var C=this._band.getMinDate();
+var A=this._band.getMaxDate();
+var I=(this._filterMatcher!=null)?this._filterMatcher:function(J){return true;
+};
+var E=(this._highlightMatcher!=null)?this._highlightMatcher:function(J){return -1;
+};
+var D=B.getEventReverseIterator(C,A);
+while(D.hasNext()){var G=D.next();
+if(I(G)){this.paintEvent(G,F,this._params.theme,E(G));
+}}this._highlightLayer.style.display="block";
+this._eventLayer.style.display="block";
+};
+Timeline.OverviewEventPainter.prototype.softPaint=function(){};
+Timeline.OverviewEventPainter.prototype._prepareForPainting=function(){var A=this._band;
+this._tracks=[];
+if(this._highlightLayer!=null){A.removeLayerDiv(this._highlightLayer);
+}this._highlightLayer=A.createLayerDiv(105,"timeline-band-highlights");
+this._highlightLayer.style.display="none";
+if(this._eventLayer!=null){A.removeLayerDiv(this._eventLayer);
+}this._eventLayer=A.createLayerDiv(110,"timeline-band-events");
+this._eventLayer.style.display="none";
+};
+Timeline.OverviewEventPainter.prototype.paintEvent=function(B,C,D,A){if(B.isInstant()){this.paintInstantEvent(B,C,D,A);
+}else{this.paintDurationEvent(B,C,D,A);
+}};
+Timeline.OverviewEventPainter.prototype.paintInstantEvent=function(C,F,G,B){var A=C.getStart();
+var H=Math.round(this._band.dateToPixelOffset(A));
+var D=C.getColor();
+D=D!=null?D:G.event.duration.color;
+var E=this._paintEventTick(C,H,D,100,F,G);
+this._createHighlightDiv(B,E,G);
+};
+Timeline.OverviewEventPainter.prototype.paintDurationEvent=function(K,J,I,D){var A=K.getLatestStart();
+var C=K.getEarliestEnd();
+var B=Math.round(this._band.dateToPixelOffset(A));
+var E=Math.round(this._band.dateToPixelOffset(C));
+var H=0;
+for(;
+H<this._tracks.length;
+H++){if(E<this._tracks[H]){break;
+}}this._tracks[H]=E;
+var G=K.getColor();
+G=G!=null?G:I.event.duration.color;
+var F=this._paintEventTape(K,H,B,E,G,100,J,I);
+this._createHighlightDiv(D,F,I);
+};
+Timeline.OverviewEventPainter.prototype._paintEventTape=function(K,B,C,J,D,F,G,E){var H=G.trackOffset+B*G.trackIncrement;
+var A=J-C;
+var L=G.trackHeight;
+var I=this._timeline.getDocument().createElement("div");
+I.className="timeline-small-event-tape";
+I.style.left=C+"px";
+I.style.width=A+"px";
+I.style.top=H+"px";
+if(F<100){SimileAjax.Graphics.setOpacity(I,F);
+}this._eventLayer.appendChild(I);
+return{left:C,top:H,width:A,height:L,elmt:I};
+};
+Timeline.OverviewEventPainter.prototype._paintEventTick=function(J,B,D,F,G,E){var K=E.event.overviewTrack.tickHeight;
+var H=G.trackOffset-K;
+var A=1;
+var I=this._timeline.getDocument().createElement("div");
+I.className="timeline-small-event-icon";
+I.style.left=B+"px";
+I.style.top=H+"px";
+var C=J.getClassName();
+if(C){I.className+=" small-"+C;
+}if(F<100){SimileAjax.Graphics.setOpacity(I,F);
+}this._eventLayer.appendChild(I);
+return{left:B,top:H,width:A,height:K,elmt:I};
+};
+Timeline.OverviewEventPainter.prototype._createHighlightDiv=function(A,C,E){if(A>=0){var D=this._timeline.getDocument();
+var G=E.event;
+var B=G.highlightColors[Math.min(A,G.highlightColors.length-1)];
+var F=D.createElement("div");
+F.style.position="absolute";
+F.style.overflow="hidden";
+F.style.left=(C.left-1)+"px";
+F.style.width=(C.width+2)+"px";
+F.style.top=(C.top-1)+"px";
+F.style.height=(C.height+2)+"px";
+F.style.background=B;
+this._highlightLayer.appendChild(F);
+}};
+Timeline.OverviewEventPainter.prototype.showBubble=function(A){};
+
+
+/* sources.js */
+Timeline.DefaultEventSource=function(A){
+    this._events=(A instanceof Object)?A:new SimileAjax.EventIndex();
+    this._listeners=[];
+};
+Timeline.DefaultEventSource.prototype.addListener=function(A){this._listeners.push(A);
+};
+Timeline.DefaultEventSource.prototype.removeListener=function(B){for(var A=0;
+A<this._listeners.length;
+A++){if(this._listeners[A]==B){this._listeners.splice(A,1);
+break;
+}}};
+Timeline.DefaultEventSource.prototype.loadXML=function(F,A){var B=this._getBaseURL(A);
+var G=F.documentElement.getAttribute("wiki-url");
+var K=F.documentElement.getAttribute("wiki-section");
+var D=F.documentElement.getAttribute("date-time-format");
+var E=this._events.getUnit().getParser(D);
+var C=F.documentElement.firstChild;
+var H=false;
+while(C!=null){if(C.nodeType==1){var J="";
+if(C.firstChild!=null&&C.firstChild.nodeType==3){J=C.firstChild.nodeValue;
+}var I=new Timeline.DefaultEventSource.Event({id:C.getAttribute("id"),start:E(C.getAttribute("start")),end:E(C.getAttribute("end")),latestStart:E(C.getAttribute("latestStart")),earliestEnd:E(C.getAttribute("earliestEnd")),instant:C.getAttribute("isDuration")!="true",text:C.getAttribute("title"),description:J,image:this._resolveRelativeURL(C.getAttribute("image"),B),link:this._resolveRelativeURL(C.getAttribute("link"),B),icon:this._resolveRelativeURL(C.getAttribute("icon"),B),color:C.getAttribute("color"),textColor:C.getAttribute("textColor"),hoverText:C.getAttribute("hoverText"),classname:C.getAttribute("classname"),tapeImage:C.getAttribute("tapeImage"),tapeRepeat:C.getAttribute("tapeRepeat"),caption:C.getAttribute("caption"),eventID:C.getAttribute("eventID")});
+I._node=C;
+I.getProperty=function(L){return this._node.getAttribute(L);
+};
+I.setWikiInfo(G,K);
+this._events.add(I);
+H=true;
+}C=C.nextSibling;
+}if(H){this._fire("onAddMany",[]);
+}};
+Timeline.DefaultEventSource.prototype.loadJSON=function(F,B){var C=this._getBaseURL(B);
+var I=false;
+if(F&&F.events){var H=("wikiURL" in F)?F.wikiURL:null;
+var K=("wikiSection" in F)?F.wikiSection:null;
+var D=("dateTimeFormat" in F)?F.dateTimeFormat:null;
+var G=this._events.getUnit().getParser(D);
+for(var E=0;
+E<F.events.length;
+E++){var A=F.events[E];
+var J=new Timeline.DefaultEventSource.Event({id:("id" in A)?A.id:undefined,start:G(A.start),end:G(A.end),latestStart:G(A.latestStart),earliestEnd:G(A.earliestEnd),instant:A.isDuration||false,text:A.title,description:A.description,image:this._resolveRelativeURL(A.image,C),link:this._resolveRelativeURL(A.link,C),icon:this._resolveRelativeURL(A.icon,C),color:A.color,textColor:A.textColor,hoverText:A.hoverText,classname:A.classname,tapeImage:A.tapeImage,tapeRepeat:A.tapeRepeat,caption:A.caption,eventID:A.eventID});
+J._obj=A;
+J.getProperty=function(L){return this._obj[L];
+};
+J.setWikiInfo(H,K);
+this._events.add(J);
+I=true;
+}}if(I){this._fire("onAddMany",[]);
+}};
+Timeline.DefaultEventSource.prototype.loadSPARQL=function(G,A){var C=this._getBaseURL(A);
+var E="iso8601";
+var F=this._events.getUnit().getParser(E);
+if(G==null){return ;
+}var D=G.documentElement.firstChild;
+while(D!=null&&(D.nodeType!=1||D.nodeName!="results")){D=D.nextSibling;
+}var I=null;
+var L=null;
+if(D!=null){I=D.getAttribute("wiki-url");
+L=D.getAttribute("wiki-section");
+D=D.firstChild;
+}var J=false;
+while(D!=null){if(D.nodeType==1){var B={};
+var H=D.firstChild;
+while(H!=null){if(H.nodeType==1&&H.firstChild!=null&&H.firstChild.nodeType==1&&H.firstChild.firstChild!=null&&H.firstChild.firstChild.nodeType==3){B[H.getAttribute("name")]=H.firstChild.firstChild.nodeValue;
+}H=H.nextSibling;
+}if(B["start"]==null&&B["date"]!=null){B["start"]=B["date"];
+}var K=new Timeline.DefaultEventSource.Event({id:B["id"],start:F(B["start"]),end:F(B["end"]),latestStart:F(B["latestStart"]),earliestEnd:F(B["earliestEnd"]),instant:B["isDuration"]!="true",text:B["title"],description:B["description"],image:this._resolveRelativeURL(B["image"],C),link:this._resolveRelativeURL(B["link"],C),icon:this._resolveRelativeURL(B["icon"],C),color:B["color"],textColor:B["textColor"],hoverText:B["hoverText"],caption:B["caption"],classname:B["classname"],tapeImage:B["tapeImage"],tapeRepeat:B["tapeRepeat"],eventID:B["eventID"]});
+K._bindings=B;
+K.getProperty=function(M){return this._bindings[M];
+};
+K.setWikiInfo(I,L);
+this._events.add(K);
+J=true;
+}D=D.nextSibling;
+}if(J){this._fire("onAddMany",[]);
+}};
+Timeline.DefaultEventSource.prototype.add=function(A){this._events.add(A);
+this._fire("onAddOne",[A]);
+};
+Timeline.DefaultEventSource.prototype.addMany=function(B){for(var A=0;
+A<B.length;
+A++){this._events.add(B[A]);
+}this._fire("onAddMany",[]);
+};
+Timeline.DefaultEventSource.prototype.clear=function(){this._events.removeAll();
+this._fire("onClear",[]);
+};
+Timeline.DefaultEventSource.prototype.getEvent=function(A){return this._events.getEvent(A);
+};
+Timeline.DefaultEventSource.prototype.getEventIterator=function(A,B){return this._events.getIterator(A,B);
+};
+Timeline.DefaultEventSource.prototype.getEventReverseIterator=function(A,B){return this._events.getReverseIterator(A,B);
+};
+Timeline.DefaultEventSource.prototype.getAllEventIterator=function(){return this._events.getAllIterator();
+};
+Timeline.DefaultEventSource.prototype.getCount=function(){return this._events.getCount();
+};
+Timeline.DefaultEventSource.prototype.getEarliestDate=function(){return this._events.getEarliestDate();
+};
+Timeline.DefaultEventSource.prototype.getLatestDate=function(){return this._events.getLatestDate();
+};
+Timeline.DefaultEventSource.prototype._fire=function(B,A){for(var C=0;
+C<this._listeners.length;
+C++){var D=this._listeners[C];
+if(B in D){try{D[B].apply(D,A);
+}catch(E){SimileAjax.Debug.exception(E);
+}}}};
+Timeline.DefaultEventSource.prototype._getBaseURL=function(A){if(A.indexOf("://")<0){var C=this._getBaseURL(document.location.href);
+if(A.substr(0,1)=="/"){A=C.substr(0,C.indexOf("/",C.indexOf("://")+3))+A;
+}else{A=C+A;
+}}var B=A.lastIndexOf("/");
+if(B<0){return"";
+}else{return A.substr(0,B+1);
+}};
+Timeline.DefaultEventSource.prototype._resolveRelativeURL=function(A,B){if(A==null||A==""){return A;
+}else{if(A.indexOf("://")>0){return A;
+}else{if(A.substr(0,1)=="/"){return B.substr(0,B.indexOf("/",B.indexOf("://")+3))+A;
+}else{return B+A;
+}}}};
+Timeline.DefaultEventSource.Event=function(A){function C(D){return(A[D]!=null&&A[D]!="")?A[D]:null;
+}var B=(A.id)?A.id.trim():"";
+this._id=B.length>0?B:("e"+Math.floor(Math.random()*1000000));
+this._instant=A.instant||(A.end==null);
+this._start=A.start;
+this._end=(A.end!=null)?A.end:A.start;
+this._latestStart=(A.latestStart!=null)?A.latestStart:(A.instant?this._end:this._start);
+this._earliestEnd=(A.earliestEnd!=null)?A.earliestEnd:(A.instant?this._start:this._end);
+this._eventID=C("eventID");
+this._text=(A.text!=null)?SimileAjax.HTML.deEntify(A.text):"";
+this._description=SimileAjax.HTML.deEntify(A.description);
+this._image=C("image");
+this._link=C("link");
+this._title=C("hoverText");
+this._title=C("caption");
+this._icon=C("icon");
+this._color=C("color");
+this._textColor=C("textColor");
+this._classname=C("classname");
+this._tapeImage=C("tapeImage");
+this._tapeRepeat=C("tapeRepeat");
+this._wikiURL=null;
+this._wikiSection=null;
+};
+Timeline.DefaultEventSource.Event.prototype={getID:function(){return this._id;
+},isInstant:function(){return this._instant;
+},isImprecise:function(){return this._start!=this._latestStart||this._end!=this._earliestEnd;
+},getStart:function(){return this._start;
+},getEnd:function(){return this._end;
+},getLatestStart:function(){return this._latestStart;
+},getEarliestEnd:function(){return this._earliestEnd;
+},getEventID:function(){return this._eventID;
+},getText:function(){return this._text;
+},getDescription:function(){return this._description;
+},getImage:function(){return this._image;
+},getLink:function(){return this._link;
+},getIcon:function(){return this._icon;
+},getColor:function(){return this._color;
+},getTextColor:function(){return this._textColor;
+},getClassName:function(){return this._classname;
+},getTapeImage:function(){return this._tapeImage;
+},getTapeRepeat:function(){return this._tapeRepeat;
+},getProperty:function(A){return null;
+},getWikiURL:function(){return this._wikiURL;
+},getWikiSection:function(){return this._wikiSection;
+},setWikiInfo:function(B,A){this._wikiURL=B;
+this._wikiSection=A;
+},fillDescription:function(A){A.innerHTML=this._description;
+},fillWikiInfo:function(D){D.style.display="none";
+if(this._wikiURL==null||this._wikiSection==null){return ;
+}var C=this.getProperty("wikiID");
+if(C==null||C.length==0){C=this.getText();
+}if(C==null||C.length==0){return ;
+}D.style.display="inline";
+C=C.replace(/\s/g,"_");
+var B=this._wikiURL+this._wikiSection.replace(/\s/g,"_")+"/"+C;
+var A=document.createElement("a");
+A.href=B;
+A.target="new";
+A.innerHTML=Timeline.strings[Timeline.clientLocale].wikiLinkLabel;
+D.appendChild(document.createTextNode("["));
+D.appendChild(A);
+D.appendChild(document.createTextNode("]"));
+},fillTime:function(A,B){if(this._instant){if(this.isImprecise()){A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._start)));
+A.appendChild(A.ownerDocument.createElement("br"));
+A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._end)));
+}else{A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._start)));
+}}else{if(this.isImprecise()){A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._start)+" ~ "+B.labelPrecise(this._latestStart)));
+A.appendChild(A.ownerDocument.createElement("br"));
+A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._earliestEnd)+" ~ "+B.labelPrecise(this._end)));
+}else{A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._start)));
+A.appendChild(A.ownerDocument.createElement("br"));
+A.appendChild(A.ownerDocument.createTextNode(B.labelPrecise(this._end)));
+}}},fillInfoBubble:function(A,D,K){var L=A.ownerDocument;
+var J=this.getText();
+var H=this.getLink();
+var C=this.getImage();
+if(C!=null){var E=L.createElement("img");
+E.src=C;
+D.event.bubble.imageStyler(E);
+A.appendChild(E);
+}var M=L.createElement("div");
+var B=L.createTextNode(J);
+if(H!=null){var I=L.createElement("a");
+I.href=H;
+I.appendChild(B);
+M.appendChild(I);
+}else{M.appendChild(B);
+}D.event.bubble.titleStyler(M);
+A.appendChild(M);
+var N=L.createElement("div");
+this.fillDescription(N);
+D.event.bubble.bodyStyler(N);
+A.appendChild(N);
+var G=L.createElement("div");
+this.fillTime(G,K);
+D.event.bubble.timeStyler(G);
+A.appendChild(G);
+var F=L.createElement("div");
+this.fillWikiInfo(F);
+D.event.bubble.wikiStyler(F);
+A.appendChild(F);
+}};
+
+
+/* themes.js */
+Timeline.ClassicTheme=new Object();
+Timeline.ClassicTheme.implementations=[];
+Timeline.ClassicTheme.create=function(A){if(A==null){A=Timeline.getDefaultLocale();
+}var B=Timeline.ClassicTheme.implementations[A];
+if(B==null){B=Timeline.ClassicTheme._Impl;
+}return new B();
+};
+Timeline.ClassicTheme._Impl=function(){this.firstDayOfWeek=0;
+this.ether={backgroundColors:[],highlightOpacity:50,interval:{line:{show:true,opacity:25},weekend:{opacity:30},marker:{hAlign:"Bottom",vAlign:"Right"}}};
+this.event={track:{height:10,gap:2},overviewTrack:{offset:20,tickHeight:6,height:2,gap:1},tape:{height:4},instant:{icon:Timeline.urlPrefix+"data/timeline/dull-blue-circle.png",iconWidth:10,iconHeight:10,impreciseOpacity:20},duration:{impreciseOpacity:20},label:{backgroundOpacity:50,offsetFromLine:3},highlightColors:[],bubble:{width:250,height:125,titleStyler:function(A){A.className="timeline-event-bubble-title";
+},bodyStyler:function(A){A.className="timeline-event-bubble-body";
+},imageStyler:function(A){A.className="timeline-event-bubble-image";
+},wikiStyler:function(A){A.className="timeline-event-bubble-wiki";
+},timeStyler:function(A){A.className="timeline-event-bubble-time";
+}}};
+this.mouseWheel="scroll";
+};
+
+
+/* timeline.js */
+Timeline.strings={};
+Timeline.getDefaultLocale=function(){return Timeline.clientLocale;
+};
+Timeline.create=function(C,B,A,D){return new Timeline._Impl(C,B,A,D);
+};
+Timeline.HORIZONTAL=0;
+Timeline.VERTICAL=1;
+Timeline._defaultTheme=null;
+Timeline.createBandInfo=function(D){var E=("theme" in D)?D.theme:Timeline.getDefaultTheme();
+var B=("eventSource" in D)?D.eventSource:null;
+var F={interval:SimileAjax.DateTime.gregorianUnitLengths[D.intervalUnit],pixelsPerInterval:D.intervalPixels};
+if("startsOn" in D||"endsOn" in D){if("startsOn" in D){F.startsOn=D.startsOn;
+}if("endsOn" in D){F.endsOn=D.endsOn;
+}}else{if("date" in D){F.centersOn=D.date;
+}else{F.centersOn=new Date();
+}}var G=new Timeline.LinearEther(F);
+var H=new Timeline.GregorianEtherPainter({unit:D.intervalUnit,multiple:("multiple" in D)?D.multiple:1,theme:E,align:("align" in D)?D.align:undefined});
+var J={showText:("showEventText" in D)?D.showEventText:true,theme:E};
+if("eventPainterParams" in D){for(var A in D.eventPainterParams){J[A]=D.eventPainterParams[A];
+}}if("trackHeight" in D){J.trackHeight=D.trackHeight;
+}if("trackGap" in D){J.trackGap=D.trackGap;
+}var I=("overview" in D&&D.overview)?"overview":("layout" in D?D.layout:"original");
+var C;
+if("eventPainter" in D){C=new D.eventPainter(J);
+}else{switch(I){case"overview":C=new Timeline.OverviewEventPainter(J);
+break;
+case"detailed":C=new Timeline.DetailedEventPainter(J);
+break;
+default:C=new Timeline.OriginalEventPainter(J);
+}}return{width:D.width,eventSource:B,timeZone:("timeZone" in D)?D.timeZone:0,ether:G,etherPainter:H,eventPainter:C,theme:E,zoomIndex:("zoomIndex" in D)?D.zoomIndex:0,zoomSteps:("zoomSteps" in D)?D.zoomSteps:null};
+};
+Timeline.createHotZoneBandInfo=function(D){var E=("theme" in D)?D.theme:Timeline.getDefaultTheme();
+var B=("eventSource" in D)?D.eventSource:null;
+var F=new Timeline.HotZoneEther({centersOn:("date" in D)?D.date:new Date(),interval:SimileAjax.DateTime.gregorianUnitLengths[D.intervalUnit],pixelsPerInterval:D.intervalPixels,zones:D.zones,theme:E});
+var G=new Timeline.HotZoneGregorianEtherPainter({unit:D.intervalUnit,zones:D.zones,theme:E,align:("align" in D)?D.align:undefined});
+var I={showText:("showEventText" in D)?D.showEventText:true,theme:E};
+if("eventPainterParams" in D){for(var A in D.eventPainterParams){I[A]=D.eventPainterParams[A];
+}}if("trackHeight" in D){I.trackHeight=D.trackHeight;
+}if("trackGap" in D){I.trackGap=D.trackGap;
+}var H=("overview" in D&&D.overview)?"overview":("layout" in D?D.layout:"original");
+var C;
+if("eventPainter" in D){C=new D.eventPainter(I);
+}else{switch(H){case"overview":C=new Timeline.OverviewEventPainter(I);
+break;
+case"detailed":C=new Timeline.DetailedEventPainter(I);
+break;
+default:C=new Timeline.OriginalEventPainter(I);
+}}return{width:D.width,eventSource:B,timeZone:("timeZone" in D)?D.timeZone:0,ether:F,etherPainter:G,eventPainter:C,theme:E,zoomIndex:("zoomIndex" in D)?D.zoomIndex:0,zoomSteps:("zoomSteps" in D)?D.zoomSteps:null};
+};
+Timeline.getDefaultTheme=function(){if(Timeline._defaultTheme==null){Timeline._defaultTheme=Timeline.ClassicTheme.create(Timeline.getDefaultLocale());
+}return Timeline._defaultTheme;
+};
+Timeline.setDefaultTheme=function(A){Timeline._defaultTheme=A;
+};
+Timeline.loadXML=function(A,C){var D=function(G,E,F){alert("Failed to load data xml from "+A+"\n"+G);
+};
+var B=function(F){var E=F.responseXML;
+if(!E.documentElement&&F.responseStream){E.load(F.responseStream);
+}C(E,A);
+};
+SimileAjax.XmlHttp.get(A,D,B);
+};
+Timeline.loadJSON=function(url,f){var fError=function(statusText,status,xmlhttp){alert("Failed to load json data from "+url+"\n"+statusText);
+};
+var fDone=function(xmlhttp){f(eval("("+xmlhttp.responseText+")"),url);
+};
+SimileAjax.XmlHttp.get(url,fError,fDone);
+};
+Timeline._Impl=function(C,B,A,D){SimileAjax.WindowManager.initialize();
+this._containerDiv=C;
+this._bandInfos=B;
+this._orientation=A==null?Timeline.HORIZONTAL:A;
+this._unit=(D!=null)?D:SimileAjax.NativeDateUnit;
+this._initialize();
+};
+Timeline._Impl.prototype.dispose=function(){for(var A=0;
+A<this._bands.length;
+A++){this._bands[A].dispose();
+}this._bands=null;
+this._bandInfos=null;
+this._containerDiv.innerHTML="";
+};
+Timeline._Impl.prototype.getBandCount=function(){return this._bands.length;
+};
+Timeline._Impl.prototype.getBand=function(A){return this._bands[A];
+};
+Timeline._Impl.prototype.layout=function(){this._distributeWidths();
+};
+Timeline._Impl.prototype.paint=function(){for(var A=0;
+A<this._bands.length;
+A++){this._bands[A].paint();
+}};
+Timeline._Impl.prototype.getDocument=function(){return this._containerDiv.ownerDocument;
+};
+Timeline._Impl.prototype.addDiv=function(A){this._containerDiv.appendChild(A);
+};
+Timeline._Impl.prototype.removeDiv=function(A){this._containerDiv.removeChild(A);
+};
+Timeline._Impl.prototype.isHorizontal=function(){return this._orientation==Timeline.HORIZONTAL;
+};
+Timeline._Impl.prototype.isVertical=function(){return this._orientation==Timeline.VERTICAL;
+};
+Timeline._Impl.prototype.getPixelLength=function(){return this._orientation==Timeline.HORIZONTAL?this._containerDiv.offsetWidth:this._containerDiv.offsetHeight;
+};
+Timeline._Impl.prototype.getPixelWidth=function(){return this._orientation==Timeline.VERTICAL?this._containerDiv.offsetWidth:this._containerDiv.offsetHeight;
+};
+Timeline._Impl.prototype.getUnit=function(){return this._unit;
+};
+Timeline._Impl.prototype.loadXML=function(B,D){var A=this;
+var E=function(H,F,G){alert("Failed to load data xml from "+B+"\n"+H);
+A.hideLoadingMessage();
+};
+var C=function(G){try{var F=G.responseXML;
+if(!F.documentElement&&G.responseStream){F.load(G.responseStream);
+}D(F,B);
+}finally{A.hideLoadingMessage();
+}};
+this.showLoadingMessage();
+window.setTimeout(function(){SimileAjax.XmlHttp.get(B,E,C);
+},0);
+};
+Timeline._Impl.prototype.loadJSON=function(url,f){var tl=this;
+var fError=function(statusText,status,xmlhttp){alert("Failed to load json data from "+url+"\n"+statusText);
+tl.hideLoadingMessage();
+};
+var fDone=function(xmlhttp){try{f(eval("("+xmlhttp.responseText+")"),url);
+}finally{tl.hideLoadingMessage();
+}};
+this.showLoadingMessage();
+window.setTimeout(function(){SimileAjax.XmlHttp.get(url,fError,fDone);
+},0);
+};
+Timeline._Impl.prototype._initialize=function(){var E=this._containerDiv;
+var G=E.ownerDocument;
+E.className=E.className.split(" ").concat("timeline-container").join(" ");
+var A=(this.isHorizontal())?"horizontal":"vertical";
+E.className+=" timeline-"+A;
+while(E.firstChild){E.removeChild(E.firstChild);
+}var B=SimileAjax.Graphics.createTranslucentImage(Timeline.urlPrefix+(this.isHorizontal()?"data/timeline/copyright-vertical.png":"data/timeline/copyright.png"));
+B.className="timeline-copyright";
+B.title="Timeline (c) SIMILE - http://simile.mit.edu/timeline/";
+SimileAjax.DOM.registerEvent(B,"click",function(){window.location="http://simile.mit.edu/timeline/";
+});
+E.appendChild(B);
+this._bands=[];
+for(var C=0;
+C<this._bandInfos.length;
+C++){var F=this._bandInfos[C];
+var D=F.bandClass||Timeline._Band;
+var H=new D(this,F,C);
+this._bands.push(H);
+}this._distributeWidths();
+for(var C=0;
+C<this._bandInfos.length;
+C++){var F=this._bandInfos[C];
+if("syncWith" in F){this._bands[C].setSyncWithBand(this._bands[F.syncWith],("highlight" in F)?F.highlight:false);
+}}var I=SimileAjax.Graphics.createMessageBubble(G);
+I.containerDiv.className="timeline-message-container";
+E.appendChild(I.containerDiv);
+I.contentDiv.className="timeline-message";
+I.contentDiv.innerHTML="<img src='"+Timeline.urlPrefix+"data/timeline/progress-running.gif' /> Loading...";
+this.showLoadingMessage=function(){I.containerDiv.style.display="block";
+};
+this.hideLoadingMessage=function(){I.containerDiv.style.display="none";
+};
+};
+Timeline._Impl.prototype._distributeWidths=function(){var B=this.getPixelLength();
+var A=this.getPixelWidth();
+var D=0;
+for(var E=0;
+E<this._bands.length;
+E++){var I=this._bands[E];
+var J=this._bandInfos[E];
+var F=J.width;
+var H=F.indexOf("%");
+if(H>0){var G=parseInt(F.substr(0,H));
+var C=G*A/100;
+}else{var C=parseInt(F);
+}I.setBandShiftAndWidth(D,C);
+I.setViewLength(B);
+D+=C;
+}};
+Timeline._Impl.prototype.zoom=function(G,B,F,D){var C=new RegExp("^timeline-band-([0-9]+)$");
+var E=null;
+var A=C.exec(D.id);
+if(A){E=parseInt(A[1]);
+}if(E!=null){this._bands[E].zoom(G,B,F,D);
+}this.paint();
+};
+Timeline._Band=function(B,C,A){if(B!==undefined){this.initialize(B,C,A);
+}};
+Timeline._Band.prototype.initialize=function(F,G,B){this._timeline=F;
+this._bandInfo=G;
+this._index=B;
+this._locale=("locale" in G)?G.locale:Timeline.getDefaultLocale();
+this._timeZone=("timeZone" in G)?G.timeZone:0;
+this._labeller=("labeller" in G)?G.labeller:(("createLabeller" in F.getUnit())?F.getUnit().createLabeller(this._locale,this._timeZone):new Timeline.GregorianDateLabeller(this._locale,this._timeZone));
+this._theme=G.theme;
+this._zoomIndex=("zoomIndex" in G)?G.zoomIndex:0;
+this._zoomSteps=("zoomSteps" in G)?G.zoomSteps:null;
+this._dragging=false;
+this._changing=false;
+this._originalScrollSpeed=5;
+this._scrollSpeed=this._originalScrollSpeed;
+this._onScrollListeners=[];
+var A=this;
+this._syncWithBand=null;
+this._syncWithBandHandler=function(H){A._onHighlightBandScroll();
+};
+this._selectorListener=function(H){A._onHighlightBandScroll();
+};
+var D=this._timeline.getDocument().createElement("div");
+D.className="timeline-band-input";
+this._timeline.addDiv(D);
+this._keyboardInput=document.createElement("input");
+this._keyboardInput.type="text";
+D.appendChild(this._keyboardInput);
+SimileAjax.DOM.registerEventWithObject(this._keyboardInput,"keydown",this,"_onKeyDown");
+SimileAjax.DOM.registerEventWithObject(this._keyboardInput,"keyup",this,"_onKeyUp");
+this._div=this._timeline.getDocument().createElement("div");
+this._div.id="timeline-band-"+B;
+this._div.className="timeline-band timeline-band-"+B;
+this._timeline.addDiv(this._div);
+SimileAjax.DOM.registerEventWithObject(this._div,"mousedown",this,"_onMouseDown");
+SimileAjax.DOM.registerEventWithObject(this._div,"mousemove",this,"_onMouseMove");
+SimileAjax.DOM.registerEventWithObject(this._div,"mouseup",this,"_onMouseUp");
+SimileAjax.DOM.registerEventWithObject(this._div,"mouseout",this,"_onMouseOut");
+SimileAjax.DOM.registerEventWithObject(this._div,"dblclick",this,"_onDblClick");
+var E=this._theme!=null?this._theme.mouseWheel:"scroll";
+if(E==="zoom"||E==="scroll"||this._zoomSteps){if(SimileAjax.Platform.browser.isFirefox){SimileAjax.DOM.registerEventWithObject(this._div,"DOMMouseScroll",this,"_onMouseScroll");
+}else{SimileAjax.DOM.registerEventWithObject(this._div,"mousewheel",this,"_onMouseScroll");
+}}this._innerDiv=this._timeline.getDocument().createElement("div");
+this._innerDiv.className="timeline-band-inner";
+this._div.appendChild(this._innerDiv);
+this._ether=G.ether;
+G.ether.initialize(this,F);
+this._etherPainter=G.etherPainter;
+G.etherPainter.initialize(this,F);
+this._eventSource=G.eventSource;
+if(this._eventSource){this._eventListener={onAddMany:function(){A._onAddMany();
+},onClear:function(){A._onClear();
+}};
+this._eventSource.addListener(this._eventListener);
+}this._eventPainter=G.eventPainter;
+G.eventPainter.initialize(this,F);
+this._decorators=("decorators" in G)?G.decorators:[];
+for(var C=0;
+C<this._decorators.length;
+C++){this._decorators[C].initialize(this,F);
+}};
+Timeline._Band.SCROLL_MULTIPLES=5;
+Timeline._Band.prototype.dispose=function(){this.closeBubble();
+if(this._eventSource){this._eventSource.removeListener(this._eventListener);
+this._eventListener=null;
+this._eventSource=null;
+}this._timeline=null;
+this._bandInfo=null;
+this._labeller=null;
+this._ether=null;
+this._etherPainter=null;
+this._eventPainter=null;
+this._decorators=null;
+this._onScrollListeners=null;
+this._syncWithBandHandler=null;
+this._selectorListener=null;
+this._div=null;
+this._innerDiv=null;
+this._keyboardInput=null;
+};
+Timeline._Band.prototype.addOnScrollListener=function(A){this._onScrollListeners.push(A);
+};
+Timeline._Band.prototype.removeOnScrollListener=function(B){for(var A=0;
+A<this._onScrollListeners.length;
+A++){if(this._onScrollListeners[A]==B){this._onScrollListeners.splice(A,1);
+break;
+}}};
+Timeline._Band.prototype.setSyncWithBand=function(B,A){if(this._syncWithBand){this._syncWithBand.removeOnScrollListener(this._syncWithBandHandler);
+}this._syncWithBand=B;
+this._syncWithBand.addOnScrollListener(this._syncWithBandHandler);
+this._highlight=A;
+this._positionHighlight();
+};
+Timeline._Band.prototype.getLocale=function(){return this._locale;
+};
+Timeline._Band.prototype.getTimeZone=function(){return this._timeZone;
+};
+Timeline._Band.prototype.getLabeller=function(){return this._labeller;
+};
+Timeline._Band.prototype.getIndex=function(){return this._index;
+};
+Timeline._Band.prototype.getEther=function(){return this._ether;
+};
+Timeline._Band.prototype.getEtherPainter=function(){return this._etherPainter;
+};
+Timeline._Band.prototype.getEventSource=function(){return this._eventSource;
+};
+Timeline._Band.prototype.getEventPainter=function(){return this._eventPainter;
+};
+Timeline._Band.prototype.layout=function(){this.paint();
+};
+Timeline._Band.prototype.paint=function(){this._etherPainter.paint();
+this._paintDecorators();
+this._paintEvents();
+};
+Timeline._Band.prototype.softLayout=function(){this.softPaint();
+};
+Timeline._Band.prototype.softPaint=function(){this._etherPainter.softPaint();
+this._softPaintDecorators();
+this._softPaintEvents();
+};
+Timeline._Band.prototype.setBandShiftAndWidth=function(A,D){var C=this._keyboardInput.parentNode;
+var B=A+Math.floor(D/2);
+if(this._timeline.isHorizontal()){this._div.style.top=A+"px";
+this._div.style.height=D+"px";
+C.style.top=B+"px";
+C.style.left="-1em";
+}else{this._div.style.left=A+"px";
+this._div.style.width=D+"px";
+C.style.left=B+"px";
+C.style.top="-1em";
+}};
+Timeline._Band.prototype.getViewWidth=function(){if(this._timeline.isHorizontal()){return this._div.offsetHeight;
+}else{return this._div.offsetWidth;
+}};
+Timeline._Band.prototype.setViewLength=function(A){this._viewLength=A;
+this._recenterDiv();
+this._onChanging();
+};
+Timeline._Band.prototype.getViewLength=function(){return this._viewLength;
+};
+Timeline._Band.prototype.getTotalViewLength=function(){return Timeline._Band.SCROLL_MULTIPLES*this._viewLength;
+};
+Timeline._Band.prototype.getViewOffset=function(){return this._viewOffset;
+};
+Timeline._Band.prototype.getMinDate=function(){return this._ether.pixelOffsetToDate(this._viewOffset);
+};
+Timeline._Band.prototype.getMaxDate=function(){return this._ether.pixelOffsetToDate(this._viewOffset+Timeline._Band.SCROLL_MULTIPLES*this._viewLength);
+};
+Timeline._Band.prototype.getMinVisibleDate=function(){return this._ether.pixelOffsetToDate(0);
+};
+Timeline._Band.prototype.getMaxVisibleDate=function(){return this._ether.pixelOffsetToDate(this._viewLength);
+};
+Timeline._Band.prototype.getCenterVisibleDate=function(){return this._ether.pixelOffsetToDate(this._viewLength/2);
+};
+Timeline._Band.prototype.setMinVisibleDate=function(A){if(!this._changing){this._moveEther(Math.round(-this._ether.dateToPixelOffset(A)));
+}};
+Timeline._Band.prototype.setMaxVisibleDate=function(A){if(!this._changing){this._moveEther(Math.round(this._viewLength-this._ether.dateToPixelOffset(A)));
+}};
+Timeline._Band.prototype.setCenterVisibleDate=function(A){if(!this._changing){this._moveEther(Math.round(this._viewLength/2-this._ether.dateToPixelOffset(A)));
+}};
+Timeline._Band.prototype.dateToPixelOffset=function(A){return this._ether.dateToPixelOffset(A)-this._viewOffset;
+};
+Timeline._Band.prototype.pixelOffsetToDate=function(A){return this._ether.pixelOffsetToDate(A+this._viewOffset);
+};
+Timeline._Band.prototype.createLayerDiv=function(D,B){var C=this._timeline.getDocument().createElement("div");
+C.className="timeline-band-layer"+(typeof B=="string"?(" "+B):"");
+C.style.zIndex=D;
+this._innerDiv.appendChild(C);
+var A=this._timeline.getDocument().createElement("div");
+A.className="timeline-band-layer-inner";
+if(SimileAjax.Platform.browser.isIE){A.style.cursor="move";
+}else{A.style.cursor="-moz-grab";
+}C.appendChild(A);
+return A;
+};
+Timeline._Band.prototype.removeLayerDiv=function(A){this._innerDiv.removeChild(A.parentNode);
+};
+Timeline._Band.prototype.scrollToCenter=function(B,C){var A=this._ether.dateToPixelOffset(B);
+if(A<-this._viewLength/2){this.setCenterVisibleDate(this.pixelOffsetToDate(A+this._viewLength));
+}else{if(A>3*this._viewLength/2){this.setCenterVisibleDate(this.pixelOffsetToDate(A-this._viewLength));
+}}this._autoScroll(Math.round(this._viewLength/2-this._ether.dateToPixelOffset(B)),C);
+};
+Timeline._Band.prototype.showBubbleForEvent=function(C){var A=this.getEventSource().getEvent(C);
+if(A){var B=this;
+this.scrollToCenter(A.getStart(),function(){B._eventPainter.showBubble(A);
+});
+}};
+Timeline._Band.prototype.zoom=function(F,A,E,C){if(!this._zoomSteps){return ;
+}A+=this._viewOffset;
+var D=this._ether.pixelOffsetToDate(A);
+var B=this._ether.zoom(F);
+this._etherPainter.zoom(B);
+this._moveEther(Math.round(-this._ether.dateToPixelOffset(D)));
+this._moveEther(A);
+};
+Timeline._Band.prototype._onMouseDown=function(B,A,C){this.closeBubble();
+this._dragging=true;
+this._dragX=A.clientX;
+this._dragY=A.clientY;
+};
+Timeline._Band.prototype._onMouseMove=function(D,A,E){if(this._dragging){var C=A.clientX-this._dragX;
+var B=A.clientY-this._dragY;
+this._dragX=A.clientX;
+this._dragY=A.clientY;
+this._moveEther(this._timeline.isHorizontal()?C:B);
+this._positionHighlight();
+}};
+Timeline._Band.prototype._onMouseUp=function(B,A,C){this._dragging=false;
+this._keyboardInput.focus();
+};
+Timeline._Band.prototype._onMouseOut=function(B,A,D){var C=SimileAjax.DOM.getEventRelativeCoordinates(A,B);
+C.x+=this._viewOffset;
+if(C.x<0||C.x>B.offsetWidth||C.y<0||C.y>B.offsetHeight){this._dragging=false;
+}};
+Timeline._Band.prototype._onMouseScroll=function(G,I,E){var A=new Date();
+A=A.getTime();
+if(!this._lastScrollTime||((A-this._lastScrollTime)>50)){this._lastScrollTime=A;
+var H=0;
+if(I.wheelDelta){H=I.wheelDelta/120;
+}else{if(I.detail){H=-I.detail/3;
+}}var F=this._theme.mouseWheel;
+if(this._zoomSteps||F==="zoom"){var D=SimileAjax.DOM.getEventRelativeCoordinates(I,G);
+if(H!=0){var C;
+if(H>0){C=true;
+}if(H<0){C=false;
+}this._timeline.zoom(C,D.x,D.y,G);
+}}else{if(F==="scroll"){var B=50*(H<0?-1:1);
+this._moveEther(B);
+}}}if(I.stopPropagation){I.stopPropagation();
+}I.cancelBubble=true;
+if(I.preventDefault){I.preventDefault();
+}I.returnValue=false;
+};
+Timeline._Band.prototype._onDblClick=function(B,A,D){var C=SimileAjax.DOM.getEventRelativeCoordinates(A,B);
+var E=C.x-(this._viewLength/2-this._viewOffset);
+this._autoScroll(-E);
+};
+Timeline._Band.prototype._onKeyDown=function(B,A,C){if(!this._dragging){switch(A.keyCode){case 27:break;
+case 37:case 38:this._scrollSpeed=Math.min(50,Math.abs(this._scrollSpeed*1.05));
+this._moveEther(this._scrollSpeed);
+break;
+case 39:case 40:this._scrollSpeed=-Math.min(50,Math.abs(this._scrollSpeed*1.05));
+this._moveEther(this._scrollSpeed);
+break;
+default:return true;
+}this.closeBubble();
+SimileAjax.DOM.cancelEvent(A);
+return false;
+}return true;
+};
+Timeline._Band.prototype._onKeyUp=function(B,A,C){if(!this._dragging){this._scrollSpeed=this._originalScrollSpeed;
+switch(A.keyCode){case 35:this.setCenterVisibleDate(this._eventSource.getLatestDate());
+break;
+case 36:this.setCenterVisibleDate(this._eventSource.getEarliestDate());
+break;
+case 33:this._autoScroll(this._timeline.getPixelLength());
+break;
+case 34:this._autoScroll(-this._timeline.getPixelLength());
+break;
+default:return true;
+}this.closeBubble();
+SimileAjax.DOM.cancelEvent(A);
+return false;
+}return true;
+};
+Timeline._Band.prototype._autoScroll=function(D,C){var A=this;
+var B=SimileAjax.Graphics.createAnimation(function(E,F){A._moveEther(F);
+},0,D,1000,C);
+B.run();
+};
+Timeline._Band.prototype._moveEther=function(A){this.closeBubble();
+this._viewOffset+=A;
+this._ether.shiftPixels(-A);
+if(this._timeline.isHorizontal()){this._div.style.left=this._viewOffset+"px";
+}else{this._div.style.top=this._viewOffset+"px";
+}if(this._viewOffset>-this._viewLength*0.5||this._viewOffset<-this._viewLength*(Timeline._Band.SCROLL_MULTIPLES-1.5)){this._recenterDiv();
+}else{this.softLayout();
+}this._onChanging();
+};
+Timeline._Band.prototype._onChanging=function(){this._changing=true;
+this._fireOnScroll();
+this._setSyncWithBandDate();
+this._changing=false;
+};
+Timeline._Band.prototype._fireOnScroll=function(){for(var A=0;
+A<this._onScrollListeners.length;
+A++){this._onScrollListeners[A](this);
+}};
+Timeline._Band.prototype._setSyncWithBandDate=function(){if(this._syncWithBand){var A=this._ether.pixelOffsetToDate(this.getViewLength()/2);
+this._syncWithBand.setCenterVisibleDate(A);
+}};
+Timeline._Band.prototype._onHighlightBandScroll=function(){if(this._syncWithBand){var A=this._syncWithBand.getCenterVisibleDate();
+var B=this._ether.dateToPixelOffset(A);
+this._moveEther(Math.round(this._viewLength/2-B));
+if(this._highlight){this._etherPainter.setHighlight(this._syncWithBand.getMinVisibleDate(),this._syncWithBand.getMaxVisibleDate());
+}}};
+Timeline._Band.prototype._onAddMany=function(){this._paintEvents();
+};
+Timeline._Band.prototype._onClear=function(){this._paintEvents();
+};
+Timeline._Band.prototype._positionHighlight=function(){if(this._syncWithBand){var A=this._syncWithBand.getMinVisibleDate();
+var B=this._syncWithBand.getMaxVisibleDate();
+if(this._highlight){this._etherPainter.setHighlight(A,B);
+}}};
+Timeline._Band.prototype._recenterDiv=function(){this._viewOffset=-this._viewLength*(Timeline._Band.SCROLL_MULTIPLES-1)/2;
+if(this._timeline.isHorizontal()){this._div.style.left=this._viewOffset+"px";
+this._div.style.width=(Timeline._Band.SCROLL_MULTIPLES*this._viewLength)+"px";
+}else{this._div.style.top=this._viewOffset+"px";
+this._div.style.height=(Timeline._Band.SCROLL_MULTIPLES*this._viewLength)+"px";
+}this.layout();
+};
+Timeline._Band.prototype._paintEvents=function(){this._eventPainter.paint();
+};
+Timeline._Band.prototype._softPaintEvents=function(){this._eventPainter.softPaint();
+};
+Timeline._Band.prototype._paintDecorators=function(){for(var A=0;
+A<this._decorators.length;
+A++){this._decorators[A].paint();
+}};
+Timeline._Band.prototype._softPaintDecorators=function(){for(var A=0;
+A<this._decorators.length;
+A++){this._decorators[A].softPaint();
+}};
+Timeline._Band.prototype.closeBubble=function(){SimileAjax.WindowManager.cancelPopups();
+};
+
+
+/* units.js */
+Timeline.NativeDateUnit=new Object();
+Timeline.NativeDateUnit.createLabeller=function(A,B){return new Timeline.GregorianDateLabeller(A,B);
+};
+Timeline.NativeDateUnit.makeDefaultValue=function(){return new Date();
+};
+Timeline.NativeDateUnit.cloneValue=function(A){return new Date(A.getTime());
+};
+Timeline.NativeDateUnit.getParser=function(A){if(typeof A=="string"){A=A.toLowerCase();
+}return(A=="iso8601"||A=="iso 8601")?Timeline.DateTime.parseIso8601DateTime:Timeline.DateTime.parseGregorianDateTime;
+};
+Timeline.NativeDateUnit.parseFromObject=function(A){return Timeline.DateTime.parseGregorianDateTime(A);
+};
+Timeline.NativeDateUnit.toNumber=function(A){return A.getTime();
+};
+Timeline.NativeDateUnit.fromNumber=function(A){return new Date(A);
+};
+Timeline.NativeDateUnit.compare=function(D,C){var B,A;
+if(typeof D=="object"){B=D.getTime();
+}else{B=Number(D);
+}if(typeof C=="object"){A=C.getTime();
+}else{A=Number(C);
+}return B-A;
+};
+Timeline.NativeDateUnit.earlier=function(B,A){return Timeline.NativeDateUnit.compare(B,A)<0?B:A;
+};
+Timeline.NativeDateUnit.later=function(B,A){return Timeline.NativeDateUnit.compare(B,A)>0?B:A;
+};
+Timeline.NativeDateUnit.change=function(A,B){return new Date(A.getTime()+B);
+};
+/*==================================================
+ *  Timeline
+ *==================================================
+ */
+
+Timeline.strings = {}; // localization string tables
+
+Timeline.getDefaultLocale = function() {
+    return Timeline.clientLocale;
+};
+
+Timeline.create = function(elmt, bandInfos, orientation, unit) {
+    return new Timeline._Impl(elmt, bandInfos, orientation, unit);
+};
+
+Timeline.HORIZONTAL = 0;
+Timeline.VERTICAL = 1;
+
+Timeline._defaultTheme = null;
+
+Timeline.createBandInfo = function(params) {
+    var theme = ("theme" in params) ? params.theme : Timeline.getDefaultTheme();
+
+    var eventSource = ("eventSource" in params) ? params.eventSource : null;
+
+    var etherParams = {
+        interval:           SimileAjax.DateTime.gregorianUnitLengths[params.intervalUnit],
+        pixelsPerInterval:  params.intervalPixels
+    };
+    if ('startsOn' in params || 'endsOn' in params) {
+      if ('startsOn' in params) {
+	etherParams.startsOn = params.startsOn;
+      }
+      if ('endsOn' in params) {
+	etherParams.endsOn = params.endsOn;
+      }
+    } else {
+      if ('date' in params) {
+	etherParams.centersOn = params.date;
+      } else {
+	etherParams.centersOn = new Date();
+      }
+    }
+    var ether = new Timeline.LinearEther(etherParams);
+
+    var etherPainter = new Timeline.GregorianEtherPainter({
+        unit:       params.intervalUnit,
+        multiple:   ("multiple" in params) ? params.multiple : 1,
+        theme:      theme,
+        align:      ("align" in params) ? params.align : undefined
+    });
+
+    var eventPainterParams = {
+        showText:   ("showEventText" in params) ? params.showEventText : true,
+        theme:      theme
+    };
+    // pass in custom parameters for the event painter
+    if ("eventPainterParams" in params) {
+        for (var prop in params.eventPainterParams) {
+            eventPainterParams[prop] = params.eventPainterParams[prop];
+        }
+    }
+
+    if ("trackHeight" in params) {
+        eventPainterParams.trackHeight = params.trackHeight;
+    }
+    if ("trackGap" in params) {
+        eventPainterParams.trackGap = params.trackGap;
+    }
+
+    var layout = ("overview" in params && params.overview) ? "overview" : ("layout" in params ? params.layout : "original");
+    var eventPainter;
+    if ("eventPainter" in params) {
+        eventPainter = new params.eventPainter(eventPainterParams);
+    } else {
+        switch (layout) {
+            case "overview" :
+                eventPainter = new Timeline.OverviewEventPainter(eventPainterParams);
+                break;
+            case "detailed" :
+                eventPainter = new Timeline.DetailedEventPainter(eventPainterParams);
+                break;
+            default:
+                eventPainter = new Timeline.OriginalEventPainter(eventPainterParams);
+        }
+    }
+
+    return {
+        width:          params.width,
+        eventSource:    eventSource,
+        timeZone:       ("timeZone" in params) ? params.timeZone : 0,
+        ether:          ether,
+        etherPainter:   etherPainter,
+        eventPainter:   eventPainter,
+        theme:          theme,
+        zoomIndex:      ("zoomIndex" in params) ? params.zoomIndex : 0,
+        zoomSteps:      ("zoomSteps" in params) ? params.zoomSteps : null
+    };
+};
+
+Timeline.createHotZoneBandInfo = function(params) {
+    var theme = ("theme" in params) ? params.theme : Timeline.getDefaultTheme();
+
+    var eventSource = ("eventSource" in params) ? params.eventSource : null;
+
+    var ether = new Timeline.HotZoneEther({
+        centersOn:          ("date" in params) ? params.date : new Date(),
+        interval:           SimileAjax.DateTime.gregorianUnitLengths[params.intervalUnit],
+        pixelsPerInterval:  params.intervalPixels,
+        zones:              params.zones,
+        theme:              theme
+    });
+
+    var etherPainter = new Timeline.HotZoneGregorianEtherPainter({
+        unit:       params.intervalUnit,
+        zones:      params.zones,
+        theme:      theme,
+        align:      ("align" in params) ? params.align : undefined
+    });
+
+    var eventPainterParams = {
+        showText:   ("showEventText" in params) ? params.showEventText : true,
+        theme:      theme
+    };
+    // pass in custom parameters for the event painter
+    if ("eventPainterParams" in params) {
+        for (var prop in params.eventPainterParams) {
+            eventPainterParams[prop] = params.eventPainterParams[prop];
+        }
+    }
+    if ("trackHeight" in params) {
+        eventPainterParams.trackHeight = params.trackHeight;
+    }
+    if ("trackGap" in params) {
+        eventPainterParams.trackGap = params.trackGap;
+    }
+
+    var layout = ("overview" in params && params.overview) ? "overview" : ("layout" in params ? params.layout : "original");
+    var eventPainter;
+    if ("eventPainter" in params) {
+        eventPainter = new params.eventPainter(eventPainterParams);
+    } else {
+        switch (layout) {
+            case "overview" :
+                eventPainter = new Timeline.OverviewEventPainter(eventPainterParams);
+                break;
+            case "detailed" :
+                eventPainter = new Timeline.DetailedEventPainter(eventPainterParams);
+                break;
+            default:
+                eventPainter = new Timeline.OriginalEventPainter(eventPainterParams);
+        }
+    }
+    return {
+        width:          params.width,
+        eventSource:    eventSource,
+        timeZone:       ("timeZone" in params) ? params.timeZone : 0,
+        ether:          ether,
+        etherPainter:   etherPainter,
+        eventPainter:   eventPainter,
+        theme:          theme,
+        zoomIndex:      ("zoomIndex" in params) ? params.zoomIndex : 0,
+        zoomSteps:      ("zoomSteps" in params) ? params.zoomSteps : null
+    };
+};
+
+Timeline.getDefaultTheme = function() {
+    if (Timeline._defaultTheme == null) {
+        Timeline._defaultTheme = Timeline.ClassicTheme.create(Timeline.getDefaultLocale());
+    }
+    return Timeline._defaultTheme;
+};
+
+Timeline.setDefaultTheme = function(theme) {
+    Timeline._defaultTheme = theme;
+};
+
+Timeline.loadXML = function(url, f) {
+    var fError = function(statusText, status, xmlhttp) {
+        alert("Failed to load data xml from " + url + "\n" + statusText);
+    };
+    var fDone = function(xmlhttp) {
+        var xml = xmlhttp.responseXML;
+        if (!xml.documentElement && xmlhttp.responseStream) {
+            xml.load(xmlhttp.responseStream);
+        }
+        f(xml, url);
+    };
+    SimileAjax.XmlHttp.get(url, fError, fDone);
+};
+
+
+Timeline.loadJSON = function(url, f) {
+    var fError = function(statusText, status, xmlhttp) {
+        alert("Failed to load json data from " + url + "\n" + statusText);
+    };
+    var fDone = function(xmlhttp) {
+        f(eval('(' + xmlhttp.responseText + ')'), url);
+    };
+    SimileAjax.XmlHttp.get(url, fError, fDone);
+};
+
+
+Timeline._Impl = function(elmt, bandInfos, orientation, unit) {
+    SimileAjax.WindowManager.initialize();
+
+    this._containerDiv = elmt;
+
+    this._bandInfos = bandInfos;
+    this._orientation = orientation == null ? Timeline.HORIZONTAL : orientation;
+    this._unit = (unit != null) ? unit : SimileAjax.NativeDateUnit;
+
+    this._initialize();
+};
+
+Timeline._Impl.prototype.dispose = function() {
+    for (var i = 0; i < this._bands.length; i++) {
+        this._bands[i].dispose();
+    }
+    this._bands = null;
+    this._bandInfos = null;
+    this._containerDiv.innerHTML = "";
+};
+
+Timeline._Impl.prototype.getBandCount = function() {
+    return this._bands.length;
+};
+
+Timeline._Impl.prototype.getBand = function(index) {
+    return this._bands[index];
+};
+
+Timeline._Impl.prototype.layout = function() {
+    this._distributeWidths();
+};
+
+Timeline._Impl.prototype.paint = function() {
+    for (var i = 0; i < this._bands.length; i++) {
+        this._bands[i].paint();
+    }
+};
+
+Timeline._Impl.prototype.getDocument = function() {
+    return this._containerDiv.ownerDocument;
+};
+
+Timeline._Impl.prototype.addDiv = function(div) {
+    this._containerDiv.appendChild(div);
+};
+
+Timeline._Impl.prototype.removeDiv = function(div) {
+    this._containerDiv.removeChild(div);
+};
+
+Timeline._Impl.prototype.isHorizontal = function() {
+    return this._orientation == Timeline.HORIZONTAL;
+};
+
+Timeline._Impl.prototype.isVertical = function() {
+    return this._orientation == Timeline.VERTICAL;
+};
+
+Timeline._Impl.prototype.getPixelLength = function() {
+    return this._orientation == Timeline.HORIZONTAL ?
+        this._containerDiv.offsetWidth : this._containerDiv.offsetHeight;
+};
+
+Timeline._Impl.prototype.getPixelWidth = function() {
+    return this._orientation == Timeline.VERTICAL ?
+        this._containerDiv.offsetWidth : this._containerDiv.offsetHeight;
+};
+
+Timeline._Impl.prototype.getUnit = function() {
+    return this._unit;
+};
+
+Timeline._Impl.prototype.loadXML = function(url, f) {
+    var tl = this;
+
+
+    var fError = function(statusText, status, xmlhttp) {
+        alert("Failed to load data xml from " + url + "\n" + statusText);
+        tl.hideLoadingMessage();
+    };
+    var fDone = function(xmlhttp) {
+        try {
+            var xml = xmlhttp.responseXML;
+            if (!xml.documentElement && xmlhttp.responseStream) {
+                xml.load(xmlhttp.responseStream);
+            }
+            f(xml, url);
+        } finally {
+            tl.hideLoadingMessage();
+        }
+    };
+
+    this.showLoadingMessage();
+    window.setTimeout(function() { SimileAjax.XmlHttp.get(url, fError, fDone); }, 0);
+};
+
+Timeline._Impl.prototype.loadJSON = function(url, f) {
+    var tl = this;
+
+
+    var fError = function(statusText, status, xmlhttp) {
+        alert("Failed to load json data from " + url + "\n" + statusText);
+        tl.hideLoadingMessage();
+    };
+    var fDone = function(xmlhttp) {
+        try {
+            f(eval('(' + xmlhttp.responseText + ')'), url);
+        } finally {
+            tl.hideLoadingMessage();
+        }
+    };
+
+    this.showLoadingMessage();
+    window.setTimeout(function() { SimileAjax.XmlHttp.get(url, fError, fDone); }, 0);
+};
+
+Timeline._Impl.prototype._initialize = function() {
+    var containerDiv = this._containerDiv;
+    var doc = containerDiv.ownerDocument;
+
+    containerDiv.className =
+        containerDiv.className.split(" ").concat("timeline-container").join(" ");
+
+	/*
+	 * Set css-class on container div that will define orientation
+	 */
+	var orientation = (this.isHorizontal()) ? 'horizontal' : 'vertical'
+	containerDiv.className +=' timeline-'+orientation;
+
+
+    while (containerDiv.firstChild) {
+        containerDiv.removeChild(containerDiv.firstChild);
+    }
+
+    /*
+     *  inserting copyright and link to simile
+     */
+    var elmtCopyright = SimileAjax.Graphics.createTranslucentImage(Timeline.urlPrefix + (this.isHorizontal() ? "data/timeline/copyright-vertical.png" : "data/timeline/copyright.png"));
+    elmtCopyright.className = "timeline-copyright";
+    elmtCopyright.title = "Timeline (c) SIMILE - http://simile.mit.edu/timeline/";
+    SimileAjax.DOM.registerEvent(elmtCopyright, "click", function() { window.location = "http://simile.mit.edu/timeline/"; });
+    containerDiv.appendChild(elmtCopyright);
+
+    /*
+     *  creating bands
+     */
+    this._bands = [];
+    for (var i = 0; i < this._bandInfos.length; i++) {
+        var bandInfo = this._bandInfos[i];
+        var bandClass = bandInfo.bandClass || Timeline._Band;
+        var band = new bandClass(this, bandInfo, i);
+        this._bands.push(band);
+    }
+    this._distributeWidths();
+
+    /*
+     *  sync'ing bands
+     */
+    for (var i = 0; i < this._bandInfos.length; i++) {
+        var bandInfo = this._bandInfos[i];
+        if ("syncWith" in bandInfo) {
+            this._bands[i].setSyncWithBand(
+                this._bands[bandInfo.syncWith],
+                ("highlight" in bandInfo) ? bandInfo.highlight : false
+            );
+        }
+    }
+
+    /*
+     *  creating loading UI
+     */
+    var message = SimileAjax.Graphics.createMessageBubble(doc);
+    message.containerDiv.className = "timeline-message-container";
+    containerDiv.appendChild(message.containerDiv);
+
+    message.contentDiv.className = "timeline-message";
+    message.contentDiv.innerHTML = "<img src='" + Timeline.urlPrefix + "data/timeline/progress-running.gif' /> Loading...";
+
+    this.showLoadingMessage = function() { message.containerDiv.style.display = "block"; };
+    this.hideLoadingMessage = function() { message.containerDiv.style.display = "none"; };
+};
+
+Timeline._Impl.prototype._distributeWidths = function() {
+    var length = this.getPixelLength();
+    var width = this.getPixelWidth();
+    var cumulativeWidth = 0;
+
+    for (var i = 0; i < this._bands.length; i++) {
+        var band = this._bands[i];
+        var bandInfos = this._bandInfos[i];
+        var widthString = bandInfos.width;
+
+        var x = widthString.indexOf("%");
+        if (x > 0) {
+            var percent = parseInt(widthString.substr(0, x));
+            var bandWidth = percent * width / 100;
+        } else {
+            var bandWidth = parseInt(widthString);
+        }
+
+        band.setBandShiftAndWidth(cumulativeWidth, bandWidth);
+        band.setViewLength(length);
+
+        cumulativeWidth += bandWidth;
+    }
+};
+
+Timeline._Impl.prototype.zoom = function (zoomIn, x, y, target) {
+  var matcher = new RegExp("^timeline-band-([0-9]+)$");
+  var bandIndex = null;
+
+  var result = matcher.exec(target.id);
+  if (result) {
+    bandIndex = parseInt(result[1]);
+  }
+
+  if (bandIndex != null) {
+    this._bands[bandIndex].zoom(zoomIn, x, y, target);
+  }
+
+  this.paint();
+};
+
+/*==================================================
+ *  Band
+ *==================================================
+ */
+Timeline._Band = function(timeline, bandInfo, index) {
+    // hack for easier subclassing
+    if (timeline !== undefined) {
+        this.initialize(timeline, bandInfo, index);
+    }
+};
+
+Timeline._Band.prototype.initialize = function(timeline, bandInfo, index) {
+    this._timeline = timeline;
+    this._bandInfo = bandInfo;
+    this._index = index;
+
+    this._locale = ("locale" in bandInfo) ? bandInfo.locale : Timeline.getDefaultLocale();
+    this._timeZone = ("timeZone" in bandInfo) ? bandInfo.timeZone : 0;
+    this._labeller = ("labeller" in bandInfo) ? bandInfo.labeller :
+        (("createLabeller" in timeline.getUnit()) ?
+            timeline.getUnit().createLabeller(this._locale, this._timeZone) :
+            new Timeline.GregorianDateLabeller(this._locale, this._timeZone));
+    this._theme = bandInfo.theme;
+    this._zoomIndex = ("zoomIndex" in bandInfo) ? bandInfo.zoomIndex : 0;
+    this._zoomSteps = ("zoomSteps" in bandInfo) ? bandInfo.zoomSteps : null;
+
+    this._dragging = false;
+    this._changing = false;
+    this._originalScrollSpeed = 5; // pixels
+    this._scrollSpeed = this._originalScrollSpeed;
+    this._onScrollListeners = [];
+
+    var b = this;
+    this._syncWithBand = null;
+    this._syncWithBandHandler = function(band) {
+        b._onHighlightBandScroll();
+    };
+    this._selectorListener = function(band) {
+        b._onHighlightBandScroll();
+    };
+
+    /*
+     *  Install a textbox to capture keyboard events
+     */
+    var inputDiv = this._timeline.getDocument().createElement("div");
+    inputDiv.className = "timeline-band-input";
+    this._timeline.addDiv(inputDiv);
+
+    this._keyboardInput = document.createElement("input");
+    this._keyboardInput.type = "text";
+    inputDiv.appendChild(this._keyboardInput);
+    SimileAjax.DOM.registerEventWithObject(this._keyboardInput, "keydown", this, "_onKeyDown");
+    SimileAjax.DOM.registerEventWithObject(this._keyboardInput, "keyup", this, "_onKeyUp");
+
+    /*
+     *  The band's outer most div that slides with respect to the timeline's div
+     */
+    this._div = this._timeline.getDocument().createElement("div");
+    this._div.id = "timeline-band-" + index;
+    this._div.className = "timeline-band timeline-band-" + index;
+    this._timeline.addDiv(this._div);
+
+    SimileAjax.DOM.registerEventWithObject(this._div, "mousedown", this, "_onMouseDown");
+    SimileAjax.DOM.registerEventWithObject(this._div, "mousemove", this, "_onMouseMove");
+    SimileAjax.DOM.registerEventWithObject(this._div, "mouseup", this, "_onMouseUp");
+    SimileAjax.DOM.registerEventWithObject(this._div, "mouseout", this, "_onMouseOut");
+    SimileAjax.DOM.registerEventWithObject(this._div, "dblclick", this, "_onDblClick");
+
+    var mouseWheel = this._theme!= null ? this._theme.mouseWheel : 'scroll'; // theme is not always defined
+    if (mouseWheel === 'zoom' || mouseWheel === 'scroll' || this._zoomSteps) {
+    	// capture mouse scroll
+      if (SimileAjax.Platform.browser.isFirefox) {
+        SimileAjax.DOM.registerEventWithObject(this._div, "DOMMouseScroll", this, "_onMouseScroll");
+      } else {
+        SimileAjax.DOM.registerEventWithObject(this._div, "mousewheel", this, "_onMouseScroll");
+      }
+    }
+
+    /*
+     *  The inner div that contains layers
+     */
+    this._innerDiv = this._timeline.getDocument().createElement("div");
+    this._innerDiv.className = "timeline-band-inner";
+    this._div.appendChild(this._innerDiv);
+
+    /*
+     *  Initialize parts of the band
+     */
+    this._ether = bandInfo.ether;
+    bandInfo.ether.initialize(this, timeline);
+
+    this._etherPainter = bandInfo.etherPainter;
+    bandInfo.etherPainter.initialize(this, timeline);
+
+    this._eventSource = bandInfo.eventSource;
+    if (this._eventSource) {
+        this._eventListener = {
+            onAddMany: function() { b._onAddMany(); },
+            onClear:   function() { b._onClear(); }
+        }
+        this._eventSource.addListener(this._eventListener);
+    }
+
+    this._eventPainter = bandInfo.eventPainter;
+    bandInfo.eventPainter.initialize(this, timeline);
+
+    this._decorators = ("decorators" in bandInfo) ? bandInfo.decorators : [];
+    for (var i = 0; i < this._decorators.length; i++) {
+        this._decorators[i].initialize(this, timeline);
+    }
+};
+
+Timeline._Band.SCROLL_MULTIPLES = 5;
+
+Timeline._Band.prototype.dispose = function() {
+    this.closeBubble();
+
+    if (this._eventSource) {
+        this._eventSource.removeListener(this._eventListener);
+        this._eventListener = null;
+        this._eventSource = null;
+    }
+
+    this._timeline = null;
+    this._bandInfo = null;
+
+    this._labeller = null;
+    this._ether = null;
+    this._etherPainter = null;
+    this._eventPainter = null;
+    this._decorators = null;
+
+    this._onScrollListeners = null;
+    this._syncWithBandHandler = null;
+    this._selectorListener = null;
+
+    this._div = null;
+    this._innerDiv = null;
+    this._keyboardInput = null;
+};
+
+Timeline._Band.prototype.addOnScrollListener = function(listener) {
+    this._onScrollListeners.push(listener);
+};
+
+Timeline._Band.prototype.removeOnScrollListener = function(listener) {
+    for (var i = 0; i < this._onScrollListeners.length; i++) {
+        if (this._onScrollListeners[i] == listener) {
+            this._onScrollListeners.splice(i, 1);
+            break;
+        }
+    }
+};
+
+Timeline._Band.prototype.setSyncWithBand = function(band, highlight) {
+    if (this._syncWithBand) {
+        this._syncWithBand.removeOnScrollListener(this._syncWithBandHandler);
+    }
+
+    this._syncWithBand = band;
+    this._syncWithBand.addOnScrollListener(this._syncWithBandHandler);
+    this._highlight = highlight;
+    this._positionHighlight();
+};
+
+Timeline._Band.prototype.getLocale = function() {
+    return this._locale;
+};
+
+Timeline._Band.prototype.getTimeZone = function() {
+    return this._timeZone;
+};
+
+Timeline._Band.prototype.getLabeller = function() {
+    return this._labeller;
+};
+
+Timeline._Band.prototype.getIndex = function() {
+    return this._index;
+};
+
+Timeline._Band.prototype.getEther = function() {
+    return this._ether;
+};
+
+Timeline._Band.prototype.getEtherPainter = function() {
+    return this._etherPainter;
+};
+
+Timeline._Band.prototype.getEventSource = function() {
+    return this._eventSource;
+};
+
+Timeline._Band.prototype.getEventPainter = function() {
+    return this._eventPainter;
+};
+
+Timeline._Band.prototype.layout = function() {
+    this.paint();
+};
+
+Timeline._Band.prototype.paint = function() {
+    this._etherPainter.paint();
+    this._paintDecorators();
+    this._paintEvents();
+};
+
+Timeline._Band.prototype.softLayout = function() {
+    this.softPaint();
+};
+
+Timeline._Band.prototype.softPaint = function() {
+    this._etherPainter.softPaint();
+    this._softPaintDecorators();
+    this._softPaintEvents();
+};
+
+Timeline._Band.prototype.setBandShiftAndWidth = function(shift, width) {
+    var inputDiv = this._keyboardInput.parentNode;
+    var middle = shift + Math.floor(width / 2);
+    if (this._timeline.isHorizontal()) {
+        this._div.style.top = shift + "px";
+        this._div.style.height = width + "px";
+
+        inputDiv.style.top = middle + "px";
+        inputDiv.style.left = "-1em";
+    } else {
+        this._div.style.left = shift + "px";
+        this._div.style.width = width + "px";
+
+        inputDiv.style.left = middle + "px";
+        inputDiv.style.top = "-1em";
+    }
+};
+
+Timeline._Band.prototype.getViewWidth = function() {
+    if (this._timeline.isHorizontal()) {
+        return this._div.offsetHeight;
+    } else {
+        return this._div.offsetWidth;
+    }
+};
+
+Timeline._Band.prototype.setViewLength = function(length) {
+    this._viewLength = length;
+    this._recenterDiv();
+    this._onChanging();
+};
+
+Timeline._Band.prototype.getViewLength = function() {
+    return this._viewLength;
+};
+
+Timeline._Band.prototype.getTotalViewLength = function() {
+    return Timeline._Band.SCROLL_MULTIPLES * this._viewLength;
+};
+
+Timeline._Band.prototype.getViewOffset = function() {
+    return this._viewOffset;
+};
+
+Timeline._Band.prototype.getMinDate = function() {
+    return this._ether.pixelOffsetToDate(this._viewOffset);
+};
+
+Timeline._Band.prototype.getMaxDate = function() {
+    return this._ether.pixelOffsetToDate(this._viewOffset + Timeline._Band.SCROLL_MULTIPLES * this._viewLength);
+};
+
+Timeline._Band.prototype.getMinVisibleDate = function() {
+    return this._ether.pixelOffsetToDate(0);
+};
+
+Timeline._Band.prototype.getMaxVisibleDate = function() {
+    return this._ether.pixelOffsetToDate(this._viewLength);
+};
+
+Timeline._Band.prototype.getCenterVisibleDate = function() {
+    return this._ether.pixelOffsetToDate(this._viewLength / 2);
+};
+
+Timeline._Band.prototype.setMinVisibleDate = function(date) {
+    if (!this._changing) {
+        this._moveEther(Math.round(-this._ether.dateToPixelOffset(date)));
+    }
+};
+
+Timeline._Band.prototype.setMaxVisibleDate = function(date) {
+    if (!this._changing) {
+        this._moveEther(Math.round(this._viewLength - this._ether.dateToPixelOffset(date)));
+    }
+};
+
+Timeline._Band.prototype.setCenterVisibleDate = function(date) {
+    if (!this._changing) {
+        this._moveEther(Math.round(this._viewLength / 2 - this._ether.dateToPixelOffset(date)));
+    }
+};
+
+Timeline._Band.prototype.dateToPixelOffset = function(date) {
+    return this._ether.dateToPixelOffset(date) - this._viewOffset;
+};
+
+Timeline._Band.prototype.pixelOffsetToDate = function(pixels) {
+    return this._ether.pixelOffsetToDate(pixels + this._viewOffset);
+};
+
+Timeline._Band.prototype.createLayerDiv = function(zIndex, className) {
+    var div = this._timeline.getDocument().createElement("div");
+    div.className = "timeline-band-layer" + (typeof className == "string" ? (" " + className) : "");
+    div.style.zIndex = zIndex;
+    this._innerDiv.appendChild(div);
+
+    var innerDiv = this._timeline.getDocument().createElement("div");
+    innerDiv.className = "timeline-band-layer-inner";
+    if (SimileAjax.Platform.browser.isIE) {
+        innerDiv.style.cursor = "move";
+    } else {
+        innerDiv.style.cursor = "-moz-grab";
+    }
+    div.appendChild(innerDiv);
+
+    return innerDiv;
+};
+
+Timeline._Band.prototype.removeLayerDiv = function(div) {
+    this._innerDiv.removeChild(div.parentNode);
+};
+
+Timeline._Band.prototype.scrollToCenter = function(date, f) {
+    var pixelOffset = this._ether.dateToPixelOffset(date);
+    if (pixelOffset < -this._viewLength / 2) {
+        this.setCenterVisibleDate(this.pixelOffsetToDate(pixelOffset + this._viewLength));
+    } else if (pixelOffset > 3 * this._viewLength / 2) {
+        this.setCenterVisibleDate(this.pixelOffsetToDate(pixelOffset - this._viewLength));
+    }
+    this._autoScroll(Math.round(this._viewLength / 2 - this._ether.dateToPixelOffset(date)), f);
+};
+
+Timeline._Band.prototype.showBubbleForEvent = function(eventID) {
+    var evt = this.getEventSource().getEvent(eventID);
+    if (evt) {
+        var self = this;
+        this.scrollToCenter(evt.getStart(), function() {
+            self._eventPainter.showBubble(evt);
+        });
+    }
+};
+
+Timeline._Band.prototype.zoom = function(zoomIn, x, y, target) {
+  if (!this._zoomSteps) {
+    // zoom disabled
+    return;
+  }
+
+  // shift the x value by our offset
+  x += this._viewOffset;
+
+  var zoomDate = this._ether.pixelOffsetToDate(x);
+  var netIntervalChange = this._ether.zoom(zoomIn);
+  this._etherPainter.zoom(netIntervalChange);
+
+  // shift our zoom date to the far left
+  this._moveEther(Math.round(-this._ether.dateToPixelOffset(zoomDate)));
+  // then shift it back to where the mouse was
+  this._moveEther(x);
+};
+
+Timeline._Band.prototype._onMouseDown = function(innerFrame, evt, target) {
+    this.closeBubble();
+
+    this._dragging = true;
+    this._dragX = evt.clientX;
+    this._dragY = evt.clientY;
+};
+
+Timeline._Band.prototype._onMouseMove = function(innerFrame, evt, target) {
+    if (this._dragging) {
+        var diffX = evt.clientX - this._dragX;
+        var diffY = evt.clientY - this._dragY;
+
+        this._dragX = evt.clientX;
+        this._dragY = evt.clientY;
+
+        this._moveEther(this._timeline.isHorizontal() ? diffX : diffY);
+        this._positionHighlight();
+    }
+};
+
+Timeline._Band.prototype._onMouseUp = function(innerFrame, evt, target) {
+    this._dragging = false;
+    this._keyboardInput.focus();
+};
+
+Timeline._Band.prototype._onMouseOut = function(innerFrame, evt, target) {
+    var coords = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
+    coords.x += this._viewOffset;
+    if (coords.x < 0 || coords.x > innerFrame.offsetWidth ||
+        coords.y < 0 || coords.y > innerFrame.offsetHeight) {
+        this._dragging = false;
+    }
+};
+
+Timeline._Band.prototype._onMouseScroll = function(innerFrame, evt, target) {
+  var now = new Date();
+  now = now.getTime();
+
+  if (!this._lastScrollTime || ((now - this._lastScrollTime) > 50)) {
+    // limit 1 scroll per 200ms due to FF3 sending multiple events back to back
+    this._lastScrollTime = now;
+
+    var delta = 0;
+    if (evt.wheelDelta) {
+      delta = evt.wheelDelta/120;
+    } else if (evt.detail) {
+      delta = -evt.detail/3;
+    }
+
+    // either scroll or zoom
+    var mouseWheel = this._theme.mouseWheel;
+
+    if (this._zoomSteps || mouseWheel === 'zoom') {
+      var loc = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
+      if (delta != 0) {
+        var zoomIn;
+        if (delta > 0)
+          zoomIn = true;
+        if (delta < 0)
+          zoomIn = false;
+        // call zoom on the timeline so we could zoom multiple bands if desired
+        this._timeline.zoom(zoomIn, loc.x, loc.y, innerFrame);
+      }
+    }
+    else if (mouseWheel === 'scroll') {
+    	var move_amt = 50 * (delta < 0 ? -1 : 1);
+      this._moveEther(move_amt);
+    }
+  }
+
+  // prevent bubble
+  if (evt.stopPropagation) {
+    evt.stopPropagation();
+  }
+  evt.cancelBubble = true;
+
+  // prevent the default action
+  if (evt.preventDefault) {
+    evt.preventDefault();
+  }
+  evt.returnValue = false;
+};
+
+Timeline._Band.prototype._onDblClick = function(innerFrame, evt, target) {
+    var coords = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
+    var distance = coords.x - (this._viewLength / 2 - this._viewOffset);
+
+    this._autoScroll(-distance);
+};
+
+Timeline._Band.prototype._onKeyDown = function(keyboardInput, evt, target) {
+    if (!this._dragging) {
+        switch (evt.keyCode) {
+        case 27: // ESC
+            break;
+        case 37: // left arrow
+        case 38: // up arrow
+            this._scrollSpeed = Math.min(50, Math.abs(this._scrollSpeed * 1.05));
+            this._moveEther(this._scrollSpeed);
+            break;
+        case 39: // right arrow
+        case 40: // down arrow
+            this._scrollSpeed = -Math.min(50, Math.abs(this._scrollSpeed * 1.05));
+            this._moveEther(this._scrollSpeed);
+            break;
+        default:
+            return true;
+        }
+        this.closeBubble();
+
+        SimileAjax.DOM.cancelEvent(evt);
+        return false;
+    }
+    return true;
+};
+
+Timeline._Band.prototype._onKeyUp = function(keyboardInput, evt, target) {
+    if (!this._dragging) {
+        this._scrollSpeed = this._originalScrollSpeed;
+
+        switch (evt.keyCode) {
+        case 35: // end
+            this.setCenterVisibleDate(this._eventSource.getLatestDate());
+            break;
+        case 36: // home
+            this.setCenterVisibleDate(this._eventSource.getEarliestDate());
+            break;
+        case 33: // page up
+            this._autoScroll(this._timeline.getPixelLength());
+            break;
+        case 34: // page down
+            this._autoScroll(-this._timeline.getPixelLength());
+            break;
+        default:
+            return true;
+        }
+
+        this.closeBubble();
+
+        SimileAjax.DOM.cancelEvent(evt);
+        return false;
+    }
+    return true;
+};
+
+Timeline._Band.prototype._autoScroll = function(distance, f) {
+    var b = this;
+    var a = SimileAjax.Graphics.createAnimation(
+        function(abs, diff) {
+            b._moveEther(diff);
+        },
+        0,
+        distance,
+        1000,
+        f
+    );
+    a.run();
+};
+
+Timeline._Band.prototype._moveEther = function(shift) {
+    this.closeBubble();
+
+    this._viewOffset += shift;
+    this._ether.shiftPixels(-shift);
+    if (this._timeline.isHorizontal()) {
+        this._div.style.left = this._viewOffset + "px";
+    } else {
+        this._div.style.top = this._viewOffset + "px";
+    }
+
+    if (this._viewOffset > -this._viewLength * 0.5 ||
+        this._viewOffset < -this._viewLength * (Timeline._Band.SCROLL_MULTIPLES - 1.5)) {
+
+        this._recenterDiv();
+    } else {
+        this.softLayout();
+    }
+
+    this._onChanging();
+}
+
+Timeline._Band.prototype._onChanging = function() {
+    this._changing = true;
+
+    this._fireOnScroll();
+    this._setSyncWithBandDate();
+
+    this._changing = false;
+};
+
+Timeline._Band.prototype._fireOnScroll = function() {
+    for (var i = 0; i < this._onScrollListeners.length; i++) {
+        this._onScrollListeners[i](this);
+    }
+};
+
+Timeline._Band.prototype._setSyncWithBandDate = function() {
+    if (this._syncWithBand) {
+        var centerDate = this._ether.pixelOffsetToDate(this.getViewLength() / 2);
+        this._syncWithBand.setCenterVisibleDate(centerDate);
+    }
+};
+
+Timeline._Band.prototype._onHighlightBandScroll = function() {
+    if (this._syncWithBand) {
+        var centerDate = this._syncWithBand.getCenterVisibleDate();
+        var centerPixelOffset = this._ether.dateToPixelOffset(centerDate);
+
+        this._moveEther(Math.round(this._viewLength / 2 - centerPixelOffset));
+
+        if (this._highlight) {
+            this._etherPainter.setHighlight(
+                this._syncWithBand.getMinVisibleDate(),
+                this._syncWithBand.getMaxVisibleDate());
+        }
+    }
+};
+
+Timeline._Band.prototype._onAddMany = function() {
+    this._paintEvents();
+};
+
+Timeline._Band.prototype._onClear = function() {
+    this._paintEvents();
+};
+
+Timeline._Band.prototype._positionHighlight = function() {
+    if (this._syncWithBand) {
+        var startDate = this._syncWithBand.getMinVisibleDate();
+        var endDate = this._syncWithBand.getMaxVisibleDate();
+
+        if (this._highlight) {
+            this._etherPainter.setHighlight(startDate, endDate);
+        }
+    }
+};
+
+Timeline._Band.prototype._recenterDiv = function() {
+    this._viewOffset = -this._viewLength * (Timeline._Band.SCROLL_MULTIPLES - 1) / 2;
+    if (this._timeline.isHorizontal()) {
+        this._div.style.left = this._viewOffset + "px";
+        this._div.style.width = (Timeline._Band.SCROLL_MULTIPLES * this._viewLength) + "px";
+    } else {
+        this._div.style.top = this._viewOffset + "px";
+        this._div.style.height = (Timeline._Band.SCROLL_MULTIPLES * this._viewLength) + "px";
+    }
+    this.layout();
+};
+
+Timeline._Band.prototype._paintEvents = function() {
+    this._eventPainter.paint();
+};
+
+Timeline._Band.prototype._softPaintEvents = function() {
+    this._eventPainter.softPaint();
+};
+
+Timeline._Band.prototype._paintDecorators = function() {
+    for (var i = 0; i < this._decorators.length; i++) {
+        this._decorators[i].paint();
+    }
+};
+
+Timeline._Band.prototype._softPaintDecorators = function() {
+    for (var i = 0; i < this._decorators.length; i++) {
+        this._decorators[i].softPaint();
+    }
+};
+
+Timeline._Band.prototype.closeBubble = function() {
+    SimileAjax.WindowManager.cancelPopups();
+};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.timeline-ext.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,101 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ *
+ */
+
+
+/* provide our own custom date parser since the default
+ * one only understands iso8601 and gregorian dates
+ */
+Timeline.NativeDateUnit.getParser = function(format) {
+    if (typeof format == "string") {
+	if (format.indexOf('%') != -1) {
+	    return function(datestring) {
+		if (datestring) {
+		    return strptime(datestring, format);
+		}
+		return null;
+	    };
+	}
+        format = format.toLowerCase();
+    }
+    if (format == "iso8601" || format == "iso 8601") {
+	return Timeline.DateTime.parseIso8601DateTime;
+    }
+    return Timeline.DateTime.parseGregorianDateTime;
+};
+
+/*** CUBICWEB EVENT PAINTER *****************************************************/
+Timeline.CubicWebEventPainter = function(params) {
+//  Timeline.OriginalEventPainter.apply(this, arguments);
+   this._params = params;
+   this._onSelectListeners = [];
+
+   this._filterMatcher = null;
+   this._highlightMatcher = null;
+   this._frc = null;
+
+   this._eventIdToElmt = {};
+  this.foo = 'bar';
+};
+
+Timeline.CubicWebEventPainter.prototype = new Timeline.OriginalEventPainter();
+
+Timeline.CubicWebEventPainter.prototype._paintEventLabel = function(
+  evt, text, left, top, width, height, theme) {
+    var doc = this._timeline.getDocument();
+
+    var labelDiv = doc.createElement("div");
+    labelDiv.className = 'timeline-event-label';
+
+    labelDiv.style.left = left + "px";
+    labelDiv.style.width = width + "px";
+    labelDiv.style.top = top + "px";
+
+    if (evt._obj.onclick) {
+	labelDiv.appendChild(A({'href': evt._obj.onclick}, text));
+    } else if (evt._obj.image) {
+      labelDiv.appendChild(IMG({src: evt._obj.image, width: '30px', height: '30px'}));
+    } else {
+      labelDiv.innerHTML = text;
+    }
+
+    if(evt._title != null)
+        labelDiv.title = evt._title;
+
+    var color = evt.getTextColor();
+    if (color == null) {
+        color = evt.getColor();
+    }
+    if (color != null) {
+        labelDiv.style.color = color;
+    }
+    var classname = evt.getClassName();
+    if(classname) labelDiv.className +=' ' + classname;
+
+    this._eventLayer.appendChild(labelDiv);
+
+    return {
+        left:   left,
+        top:    top,
+        width:  width,
+        height: height,
+        elmt:   labelDiv
+    };
+};
+
+
+Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) {
+  var div = DIV({id: 'xxx'});
+  var width = this._params.theme.event.bubble.width;
+  if (!evt._obj.bubbleUrl) {
+    evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller());
+  }
+  SimileAjax.WindowManager.cancelPopups();
+  SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width);
+  if (evt._obj.bubbleUrl) {
+    jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace');
+  }
+};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.timetable.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,114 @@
+/* styles for the timetable view
+ *
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ */
+
+table.timetable { 
+  width: auto;
+  table-layout: fixed;
+}
+
+table.timetable th { 
+  padding:1pt;
+  align:center;
+}
+
+
+table.timetable td { 
+  width:10px; 
+}
+
+table.timetable td.ttlf { 
+  border-right:none;
+  border-left-width:2pt;
+  border-bottom:none;
+  border-top:none;
+}
+
+table.timetable td.ttrf { 
+  border-left: none;
+  border-right-width:2pt;
+  border-bottom:none;
+  border-top:none;
+}
+
+table.timetable td.ttmf { 
+  border: none;
+}
+
+table.timetable td.ttle { 
+  border-left-width:2pt;
+}
+
+table.timetable td.ttre { 
+  border-right-width:2pt;
+}
+
+table.timetable th.ttdate {
+  font-size :90%;
+  font-weight : normal;
+  font-family: Verdana, sans-serif;
+  padding-left: 4pt;
+  padding-right: 4pt;
+  align: left;
+  width: auto;
+}
+
+
+table.timetable td.ttempty { 
+  border:none;
+  border-top:1px solid #DFDFDF;
+  border-bottom:1px solid #DFDFDF;
+}
+
+/* div blocks in timetable are used as tooltips */
+table.timetable td.ttlf div,
+table.timetable td.ttmf div,
+table.timetable td.ttrf div,
+table.timetable td.ttle div,
+table.timetable td.ttme div,
+table.timetable td.ttre div { 
+  display: none; 
+}
+
+table.timetable td.ttlf:hover div,
+table.timetable td.ttmf:hover div,
+table.timetable td.ttrf:hover div
+{
+  font-style: normal;
+  display: block;
+  position: absolute;
+  padding: 5px;
+  color: #000;
+  border: 1px solid #bbb;
+  background: #ffc;
+  width:200px;
+}
+
+table.timetable td.col0 { background-color: #C33; }
+table.timetable td.col1 { background-color: #3C3; }
+table.timetable td.col2 { background-color: #33C; }
+table.timetable td.col3 { background-color: #CC3; }
+table.timetable td.col4 { background-color: #C3C; }
+table.timetable td.col5 { background-color: #3CC; }
+table.timetable td.colb { background-color: #2AA; }
+table.timetable td.col7 { background-color: #F2A; }
+table.timetable td.col8 { background-color: #AA2; }
+table.timetable td.col9 { background-color: #22A; }
+table.timetable td.cola { background-color: #2A2; }
+table.timetable td.col6 { background-color: #A22; }
+
+table.timetable td.col0:hover { background-color: #933; }
+table.timetable td.col1:hover { background-color: #393; }
+table.timetable td.col2:hover { background-color: #339; }
+table.timetable td.col3:hover { background-color: #993; }
+table.timetable td.col4:hover { background-color: #939; }
+table.timetable td.col5:hover { background-color: #399; }
+table.timetable td.colb:hover { background-color: #266; }
+table.timetable td.col7:hover { background-color: #626; }
+table.timetable td.col8:hover { background-color: #662; }
+table.timetable td.col9:hover { background-color: #226; }
+table.timetable td.cola:hover { background-color: #262; }
+table.timetable td.col6:hover { background-color: #622; }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/cubicweb.widgets.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,299 @@
+/*
+ *  :organization: Logilab
+ *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+ *
+ *
+ */
+
+// widget namespace
+Widgets = {};
+
+
+/* this function takes a DOM node defining a widget and
+ * instantiates / builds the appropriate widget class
+ */
+function buildWidget(wdgnode) {
+    var wdgclass = Widgets[wdgnode.getAttribute('cubicweb:wdgtype')];
+    if (wdgclass) {
+	var wdg = new wdgclass(wdgnode);
+    }
+}
+
+/* This function is called on load and is in charge to build
+ * JS widgets according to DOM nodes found in the page
+ */
+function buildWidgets(root) {
+    root = root || document;
+    jQuery(root).find('.widget').each(function() {
+	if (this.getAttribute('cubicweb:loadtype') == 'auto') {
+	    buildWidget(this);
+	}
+    });
+}
+
+
+// we need to differenciate cases where initFacetBoxEvents is called
+// with one argument or without any argument. If we use `initFacetBoxEvents`
+// as the direct callback on the jQuery.ready event, jQuery will pass some argument
+// of his, so we use this small anonymous function instead.
+jQuery(document).ready(function() {buildWidgets();});
+
+
+Widgets.SuggestField = defclass('SuggestField', null, {
+    __init__: function(node, options) {
+	var multi = node.getAttribute('cubicweb:multi') || "no";
+	options = options || {};
+	options.multiple = (multi == "yes") ? true : false;
+	var dataurl = node.getAttribute('cubicweb:dataurl');
+        var method = postJSON;
+	if (options.method == 'get'){
+	  method = function(url, data, callback) {
+	    // We can't rely on jQuery.getJSON because the server
+	    // might set the Content-Type's response header to 'text/plain'
+	    jQuery.get(url, data, function(response) {
+	      callback(evalJSON(response));
+	    });
+	  };
+	}
+	var self = this; // closure
+	method(dataurl, null, function(data) {
+	    // in case we received a list of couple, we assume that the first
+	    // element is the real value to be sent, and the second one is the
+	    // value to be displayed
+	    if (data.length && data[0].length == 2) {
+		options.formatItem = function(row) { return row[1]; };
+		self.hideRealValue(node);
+		self.setCurrentValue(node, data);
+	    }
+	    jQuery(node).autocomplete(data, options);
+	});
+    },
+
+    hideRealValue: function(node) {
+	var hidden = INPUT({'type': "hidden", 'name': node.name, 'value': node.value});
+	node.parentNode.appendChild(hidden);
+	// remove 'name' attribute from visible input so that it is not submitted
+	// and set correct value in the corresponding hidden field
+	jQuery(node).removeAttr('name').bind('result', function(_, row, _) {
+	    hidden.value = row[0];
+	});
+    },
+
+    setCurrentValue: function(node, data) {
+	// called when the data is loaded to reset the correct displayed
+	// value in the visible input field (typically replacing an eid
+	// by a displayable value)
+	var curvalue = node.value;
+	if (!node.value) {
+	    return;
+	}
+	for (var i=0,length=data.length; i<length; i++) {
+	    var row = data[i];
+	    if (row[0] == curvalue) {
+		node.value = row[1];
+		return;
+	    }
+	}
+    }
+});
+
+Widgets.StaticFileSuggestField = defclass('StaticSuggestField', [Widgets.SuggestField], {
+
+    __init__ : function(node) {
+	Widgets.SuggestField.__init__(this, node, {method: 'get'});
+    }
+
+});
+
+Widgets.RestrictedSuggestField = defclass('RestrictedSuggestField', [Widgets.SuggestField], {
+
+    __init__ : function(node) {
+	Widgets.SuggestField.__init__(this, node, {mustMatch: true});
+    }
+
+});
+
+
+/*
+ * suggestform displays a suggest field and associated validate / cancel buttons
+ * constructor's argumemts are the same that BaseSuggestField widget
+ */
+Widgets.SuggestForm = defclass("SuggestForm", null, {
+
+    __init__ : function(inputid, initfunc, varargs, validatefunc, options) {
+	this.validatefunc = validatefunc || noop;
+	this.sgfield = new Widgets.BaseSuggestField(inputid, initfunc,
+						    varargs, options);
+	this.oklabel = options.oklabel || 'ok';
+	this.cancellabel = options.cancellabel || 'cancel';
+	bindMethods(this);
+	connect(this.sgfield, 'validate', this, this.entryValidated);
+    },
+
+    show : function(parentnode) {
+	var sgnode = this.sgfield.builddom();
+	var buttons = DIV({'class' : "sgformbuttons"},
+			  [A({'href' : "javascript: noop();",
+			      'onclick' : this.onValidateClicked}, this.oklabel),
+			   ' / ',
+			   A({'href' : "javascript: noop();",
+			      'onclick' : this.destroy}, escapeHTML(this.cancellabel))]);
+	var formnode = DIV({'class' : "sgform"}, [sgnode, buttons]);
+ 	appendChildNodes(parentnode, formnode);
+	this.sgfield.textinput.focus();
+	this.formnode = formnode;
+	return formnode;
+    },
+
+    destroy : function() {
+	signal(this, 'destroy');
+	this.sgfield.destroy();
+	removeElement(this.formnode);
+    },
+
+    onValidateClicked : function() {
+	this.validatefunc(this, this.sgfield.taglist());
+    },
+    /* just an indirection to pass the form instead of the sgfield as first parameter */
+    entryValidated : function(sgfield, taglist) {
+	this.validatefunc(this, taglist);
+    }
+});
+
+
+/* called when the use clicks on a tree node
+ *  - if the node has a `cubicweb:loadurl` attribute, replace the content of the node
+ *    by the url's content.
+ *  - else, there's nothing to do, let the jquery plugin handle it.
+ */
+function toggleTree(event) {
+    var linode = jQuery(this);
+    var url = linode.attr('cubicweb:loadurl');
+    linode.find('ul.placeholder').remove();
+    if (url) {
+	linode.loadxhtml(url, {callback: function(domnode) {
+	    linode.removeAttr('cubicweb:loadurl');
+	    jQuery(domnode).treeview({toggle: toggleTree,
+				      prerendered: true});
+	    return null;
+	}}, 'post', 'append');
+    }
+}
+
+Widgets.TreeView = defclass("TreeView", null, {
+    __init__: function(wdgnode) {
+	jQuery(wdgnode).treeview({toggle: toggleTree,
+				  prerendered: true
+				 });
+    }
+});
+
+
+/* widget based on SIMILE's timeline widget
+ * http://code.google.com/p/simile-widgets/
+ *
+ * Beware not to mess with SIMILE's Timeline JS namepsace !
+ */
+
+Widgets.TimelineWidget = defclass("TimelineWidget", null, {
+    __init__: function (wdgnode) {
+ 	var tldiv = DIV({id: "tl", style: 'height: 200px; border: 1px solid #ccc;'});
+	wdgnode.appendChild(tldiv);
+	var tlunit = wdgnode.getAttribute('cubicweb:tlunit') || 'YEAR';
+	var eventSource = new Timeline.DefaultEventSource();
+	var bandData = {
+	  eventPainter:     Timeline.CubicWebEventPainter,
+	  eventSource:    eventSource,
+	  width:          "100%",
+	  intervalUnit:   Timeline.DateTime[tlunit.toUpperCase()],
+	  intervalPixels: 100
+	};
+	var bandInfos = [ Timeline.createBandInfo(bandData) ];
+	var tl = Timeline.create(tldiv, bandInfos);
+	var loadurl = wdgnode.getAttribute('cubicweb:loadurl');
+	Timeline.loadJSON(loadurl, function(json, url) {
+			    eventSource.loadJSON(json, url); });
+
+    }
+});
+
+Widgets.TemplateTextField = defclass("TemplateTextField", null, {
+
+    __init__ : function(wdgnode) {
+	this.variables = getNodeAttribute(wdgnode, 'cubicweb:variables').split(',');
+	this.options = {'name' : wdgnode.getAttribute('cubicweb:inputname'),
+			'id'   : wdgnode.getAttribute('cubicweb:inputid'),
+			'rows' : wdgnode.getAttribute('cubicweb:rows') || 40,
+			'cols' : wdgnode.getAttribute('cubicweb:cols') || 80
+		       };
+	// this.variableRegexp = /%\((\w+)\)s/;
+	this.parentnode = wdgnode;
+    },
+
+    show : function(parentnode) {
+	parentnode = parentnode || this.parentnode;
+	this.errorField = DIV({'class' : "textfieldErrors"});
+	this.textField = TEXTAREA(this.options);
+	connect(this.textField, 'onkeyup', this, this.highlightInvalidVariables);
+	appendChildNodes(parentnode, this.textField, this.errorField);
+	appendChildNodes(parentnode, this.textField);
+    },
+
+    /* signal callbacks */
+
+    highlightInvalidVariables : function() {
+	var text = this.textField.value;
+	var unknownVariables = [];
+	var it=0;
+	var group = null;
+	var variableRegexp = /%\((\w+)\)s/g;
+	// emulates rgx.findAll()
+	while ( group=variableRegexp.exec(text) ) {
+	    if ( !this.variables.contains(group[1]) ) {
+		unknownVariables.push(group[1]);
+	    }
+	    it++;
+	    if (it > 5)
+		break;
+	}
+	var errText = '';
+	if (unknownVariables.length) {
+	    errText = "Detected invalid variables : " + ", ".join(unknownVariables);
+	}
+	this.errorField.innerHTML = errText;
+    }
+
+});
+
+/*
+ * ComboBox with a textinput : allows to add a new value
+ */
+
+Widgets.AddComboBox = defclass('AddComboBox', null, {
+   __init__ : function(wdgnode) {
+       jQuery("#add_newopt").click(function() {
+	  var new_val = jQuery("#newopt").val();
+	      if (!new_val){
+		  return false;
+	      }
+          name = wdgnode.getAttribute('name').split(':');
+	  this.rel = name[0];
+	  this.eid_to = name[1];
+          this.etype_to = wdgnode.getAttribute('cubicweb:etype_to');
+          this.etype_from = wdgnode.getAttribute('cubicweb:etype_from');
+     	  var d = async_remote_exec('add_and_link_new_entity', this.etype_to, this.rel, this.eid_to, this.etype_from, 'new_val');
+          d.addCallback(function (eid) {
+          jQuery(wdgnode).find("option[selected]").removeAttr("selected");
+          var new_option = OPTION({'value':eid, 'selected':'selected'}, value=new_val);
+          wdgnode.appendChild(new_option);
+          });
+          d.addErrback(function (xxx) {
+             log('xxx =', xxx);
+          });
+     });
+   }
+});
+
+
+CubicWeb.provide('widgets.js');
Binary file web/data/desc.gif has changed
Binary file web/data/download.gif has changed
Binary file web/data/dublincore-button.png has changed
Binary file web/data/dublincore-icon.png has changed
Binary file web/data/error.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/external_resources	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,53 @@
+# -*- shell-script -*-
+###############################################################################
+#
+# external resources file for core library resources
+#
+# Commented values are default values used by the application.
+#
+###############################################################################
+
+
+# CSS stylesheets to include in HTML headers
+#STYLESHEETS = DATADIR/cubicweb.css
+
+# CSS stylesheets for print
+#STYLESHEETS_PRINT = DATADIR/cubicweb.print.css
+
+#CSS stylesheets for IE
+#IE_STYLESHEETS = DATADIR/cubicweb.ie.css
+
+# Javascripts files to include in HTML headers
+#JAVASCRIPTS = DATADIR/jqyery.js, DATADIR/cubicweb.python.js, DATADIR/jquery.json.js, DATADIR/cubicweb.compat.js, DATADIR/cubicweb.htmlhelpers.js
+
+# path to favicon (relative to the application main script, seen as a
+# directory, hence .. when you are not using an absolute path)
+#FAVICON = DATADIR/favicon.ico
+
+# path to the logo (relative to the application main script, seen as a
+# directory, hence .. when you are not using an absolute path)
+LOGO = DATADIR/logo.png
+
+# rss logo (link to get the rss view of a selection)
+RSS_LOGO = DATADIR/rss.png
+RSS_LOGO_16 = DATADIR/feed-icon16x16.png
+RSS_LOGO_32 = DATADIR/feed-icon32x32.png
+
+# path to search image
+SEARCH_GO =  DATADIR/go.png
+
+#FCKEDITOR_PATH = /usr/share/fckeditor/
+
+
+# icons for entity types
+BOOKMARK_ICON = DATADIR/icon_bookmark.gif
+EMAILADDRESS_ICON = DATADIR/icon_emailaddress.gif
+EUSER_ICON = DATADIR/icon_euser.gif
+STATE_ICON = DATADIR/icon_state.gif
+
+# other icons
+CALENDAR_ICON = DATADIR/calendar.gif
+CANCEL_EMAIL_ICON = DATADIR/sendcancel.png
+SEND_EMAIL_ICON = DATADIR/sendok.png
+DOWNLOAD_ICON = DATADIR/download.gif
+GMARKER_ICON = DATADIR/gmap_blue_marker.png
\ No newline at end of file
Binary file web/data/favicon.ico has changed
Binary file web/data/feed-icon.png has changed
Binary file web/data/feed-icon16x16.png has changed
Binary file web/data/feed-icon32x32.png has changed
Binary file web/data/file.gif has changed
Binary file web/data/folder-closed.gif has changed
Binary file web/data/folder.gif has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/gmap.utility.labeledmarker.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+/* http://code.google.com/p/gmaps-utility-library/source/browse/trunk/labeledmarker/ */
+function LabeledMarker(latlng,opt_opts){this.latlng_=latlng;this.opts_=opt_opts;this.labelText_=opt_opts.labelText||"";this.labelClass_=opt_opts.labelClass||"LabeledMarker_markerLabel";this.labelOffset_=opt_opts.labelOffset||new GSize(0,0);this.clickable_=opt_opts.clickable||true;this.title_=opt_opts.title||"";this.labelVisibility_=true;if(opt_opts.draggable){opt_opts.draggable=false}GMarker.apply(this,arguments)};LabeledMarker.prototype=new GMarker(new GLatLng(0,0));LabeledMarker.prototype.initialize=function(map){GMarker.prototype.initialize.apply(this,arguments);this.map_=map;this.div_=document.createElement("div");this.div_.className=this.labelClass_;this.div_.innerHTML=this.labelText_;this.div_.style.position="absolute";this.div_.style.cursor="pointer";this.div_.title=this.title_;map.getPane(G_MAP_MARKER_PANE).appendChild(this.div_);if(this.clickable_){function newEventPassthru(obj,event){return function(){GEvent.trigger(obj,event)}}var eventPassthrus=['click','dblclick','mousedown','mouseup','mouseover','mouseout'];for(var i=0;i<eventPassthrus.length;i++){var name=eventPassthrus[i];GEvent.addDomListener(this.div_,name,newEventPassthru(this,name))}}};LabeledMarker.prototype.redraw=function(force){GMarker.prototype.redraw.apply(this,arguments);this.redrawLabel_()};LabeledMarker.prototype.redrawLabel_=function(){var p=this.map_.fromLatLngToDivPixel(this.latlng_);var z=GOverlay.getZIndex(this.latlng_.lat());this.div_.style.left=(p.x+this.labelOffset_.width)+"px";this.div_.style.top=(p.y+this.labelOffset_.height)+"px";this.div_.style.zIndex=z};LabeledMarker.prototype.remove=function(){GEvent.clearInstanceListeners(this.div_);if(this.div_.outerHTML){this.div_.outerHTML=""}if(this.div_.parentNode){this.div_.parentNode.removeChild(this.div_)}this.div_=null;GMarker.prototype.remove.apply(this,arguments)};LabeledMarker.prototype.copy=function(){return new LabeledMarker(this.latlng_,this.opts_)};LabeledMarker.prototype.show=function(){GMarker.prototype.show.apply(this,arguments);if(this.labelVisibility_){this.showLabel()}else{this.hideLabel()}};LabeledMarker.prototype.hide=function(){GMarker.prototype.hide.apply(this,arguments);this.hideLabel()};LabeledMarker.prototype.setLatLng=function(latlng){this.latlng_=latlng;GMarker.prototype.setLatLng.apply(this,arguments);this.redrawLabel_()};LabeledMarker.prototype.setLabelVisibility=function(visibility){this.labelVisibility_=visibility;if(!this.isHidden()){if(this.labelVisibility_){this.showLabel()}else{this.hideLabel()}}};LabeledMarker.prototype.getLabelVisibility=function(){return this.labelVisibility_};LabeledMarker.prototype.hideLabel=function(){this.div_.style.visibility='hidden'};LabeledMarker.prototype.showLabel=function(){this.div_.style.visibility='visible'};
Binary file web/data/gmap_blue_marker.png has changed
Binary file web/data/go.png has changed
Binary file web/data/gradient-grey-up.png has changed
Binary file web/data/gradient-grey.gif has changed
Binary file web/data/help.png has changed
Binary file web/data/help_ie.png has changed
Binary file web/data/icon_blank.png has changed
Binary file web/data/icon_bookmark.gif has changed
Binary file web/data/icon_emailaddress.gif has changed
Binary file web/data/icon_euser.gif has changed
Binary file web/data/icon_map.png has changed
Binary file web/data/icon_state.gif has changed
Binary file web/data/information.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.autocomplete.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,50 @@
+.ac_results {
+	padding: 0px;
+	border: 1px solid black;
+	background-color: white;
+	overflow: hidden;
+	z-index: 99999;
+}
+
+.ac_results ul {
+	width: 100%;
+	list-style-position: outside;
+	list-style: none;
+	padding: 0;
+	margin: 0;
+}
+
+.ac_results li {
+	margin: 0px;
+	padding: 2px 5px;
+	cursor: default;
+	display: block;
+	/* 
+	if width will be 100% horizontal scrollbar will apear 
+	when scroll mode will be used
+	*/
+	/*width: 100%;*/
+	font: menu;
+	font-size: 12px;
+	/* 
+	it is very important, if line-height not setted or setted 
+	in relative units scroll will be broken in firefox
+	*/
+	line-height: 16px;
+	overflow: hidden;
+        background-image: none;
+        padding: 0px 0px 1px 1px;
+}
+
+.ac_loading {
+	background: white url('indicator.gif') right center no-repeat;
+}
+
+.ac_odd {
+	background-color: #eee;
+}
+
+.ac_over {
+	background-color: #0A246A;
+	color: white;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.autocomplete.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+/*
+ * Autocomplete - jQuery plugin 1.0.2
+ *
+ * Copyright (c) 2007 Dylan Verheul, Dan G. Switzer, Anjesh Tuladhar, Jörn Zaefferer
+ *
+ * Dual licensed under the MIT and GPL licenses:
+ *   http://www.opensource.org/licenses/mit-license.php
+ *   http://www.gnu.org/licenses/gpl.html
+ *
+ * Revision: $Id: jquery.autocomplete.js 5747 2008-06-25 18:30:55Z joern.zaefferer $
+ *
+ */;(function($){$.fn.extend({autocomplete:function(urlOrData,options){var isUrl=typeof urlOrData=="string";options=$.extend({},$.Autocompleter.defaults,{url:isUrl?urlOrData:null,data:isUrl?null:urlOrData,delay:isUrl?$.Autocompleter.defaults.delay:10,max:options&&!options.scroll?10:150},options);options.highlight=options.highlight||function(value){return value;};options.formatMatch=options.formatMatch||options.formatItem;return this.each(function(){new $.Autocompleter(this,options);});},result:function(handler){return this.bind("result",handler);},search:function(handler){return this.trigger("search",[handler]);},flushCache:function(){return this.trigger("flushCache");},setOptions:function(options){return this.trigger("setOptions",[options]);},unautocomplete:function(){return this.trigger("unautocomplete");}});$.Autocompleter=function(input,options){var KEY={UP:38,DOWN:40,DEL:46,TAB:9,RETURN:13,ESC:27,COMMA:188,PAGEUP:33,PAGEDOWN:34,BACKSPACE:8};var $input=$(input).attr("autocomplete","off").addClass(options.inputClass);var timeout;var previousValue="";var cache=$.Autocompleter.Cache(options);var hasFocus=0;var lastKeyPressCode;var config={mouseDownOnSelect:false};var select=$.Autocompleter.Select(options,input,selectCurrent,config);var blockSubmit;$.browser.opera&&$(input.form).bind("submit.autocomplete",function(){if(blockSubmit){blockSubmit=false;return false;}});$input.bind(($.browser.opera?"keypress":"keydown")+".autocomplete",function(event){lastKeyPressCode=event.keyCode;switch(event.keyCode){case KEY.UP:event.preventDefault();if(select.visible()){select.prev();}else{onChange(0,true);}break;case KEY.DOWN:event.preventDefault();if(select.visible()){select.next();}else{onChange(0,true);}break;case KEY.PAGEUP:event.preventDefault();if(select.visible()){select.pageUp();}else{onChange(0,true);}break;case KEY.PAGEDOWN:event.preventDefault();if(select.visible()){select.pageDown();}else{onChange(0,true);}break;case options.multiple&&$.trim(options.multipleSeparator)==","&&KEY.COMMA:case KEY.TAB:case KEY.RETURN:if(selectCurrent()){event.preventDefault();blockSubmit=true;return false;}break;case KEY.ESC:select.hide();break;default:clearTimeout(timeout);timeout=setTimeout(onChange,options.delay);break;}}).focus(function(){hasFocus++;}).blur(function(){hasFocus=0;if(!config.mouseDownOnSelect){hideResults();}}).click(function(){if(hasFocus++>1&&!select.visible()){onChange(0,true);}}).bind("search",function(){var fn=(arguments.length>1)?arguments[1]:null;function findValueCallback(q,data){var result;if(data&&data.length){for(var i=0;i<data.length;i++){if(data[i].result.toLowerCase()==q.toLowerCase()){result=data[i];break;}}}if(typeof fn=="function")fn(result);else $input.trigger("result",result&&[result.data,result.value]);}$.each(trimWords($input.val()),function(i,value){request(value,findValueCallback,findValueCallback);});}).bind("flushCache",function(){cache.flush();}).bind("setOptions",function(){$.extend(options,arguments[1]);if("data"in arguments[1])cache.populate();}).bind("unautocomplete",function(){select.unbind();$input.unbind();$(input.form).unbind(".autocomplete");});function selectCurrent(){var selected=select.selected();if(!selected)return false;var v=selected.result;previousValue=v;if(options.multiple){var words=trimWords($input.val());if(words.length>1){v=words.slice(0,words.length-1).join(options.multipleSeparator)+options.multipleSeparator+v;}v+=options.multipleSeparator;}$input.val(v);hideResultsNow();$input.trigger("result",[selected.data,selected.value]);return true;}function onChange(crap,skipPrevCheck){if(lastKeyPressCode==KEY.DEL){select.hide();return;}var currentValue=$input.val();if(!skipPrevCheck&&currentValue==previousValue)return;previousValue=currentValue;currentValue=lastWord(currentValue);if(currentValue.length>=options.minChars){$input.addClass(options.loadingClass);if(!options.matchCase)currentValue=currentValue.toLowerCase();request(currentValue,receiveData,hideResultsNow);}else{stopLoading();select.hide();}};function trimWords(value){if(!value){return[""];}var words=value.split(options.multipleSeparator);var result=[];$.each(words,function(i,value){if($.trim(value))result[i]=$.trim(value);});return result;}function lastWord(value){if(!options.multiple)return value;var words=trimWords(value);return words[words.length-1];}function autoFill(q,sValue){if(options.autoFill&&(lastWord($input.val()).toLowerCase()==q.toLowerCase())&&lastKeyPressCode!=KEY.BACKSPACE){$input.val($input.val()+sValue.substring(lastWord(previousValue).length));$.Autocompleter.Selection(input,previousValue.length,previousValue.length+sValue.length);}};function hideResults(){clearTimeout(timeout);timeout=setTimeout(hideResultsNow,200);};function hideResultsNow(){var wasVisible=select.visible();select.hide();clearTimeout(timeout);stopLoading();if(options.mustMatch){$input.search(function(result){if(!result){if(options.multiple){var words=trimWords($input.val()).slice(0,-1);$input.val(words.join(options.multipleSeparator)+(words.length?options.multipleSeparator:""));}else
+$input.val("");}});}if(wasVisible)$.Autocompleter.Selection(input,input.value.length,input.value.length);};function receiveData(q,data){if(data&&data.length&&hasFocus){stopLoading();select.display(data,q);autoFill(q,data[0].value);select.show();}else{hideResultsNow();}};function request(term,success,failure){if(!options.matchCase)term=term.toLowerCase();var data=cache.load(term);if(data&&data.length){success(term,data);}else if((typeof options.url=="string")&&(options.url.length>0)){var extraParams={timestamp:+new Date()};$.each(options.extraParams,function(key,param){extraParams[key]=typeof param=="function"?param():param;});$.ajax({mode:"abort",port:"autocomplete"+input.name,dataType:options.dataType,url:options.url,data:$.extend({q:lastWord(term),limit:options.max},extraParams),success:function(data){var parsed=options.parse&&options.parse(data)||parse(data);cache.add(term,parsed);success(term,parsed);}});}else{select.emptyList();failure(term);}};function parse(data){var parsed=[];var rows=data.split("\n");for(var i=0;i<rows.length;i++){var row=$.trim(rows[i]);if(row){row=row.split("|");parsed[parsed.length]={data:row,value:row[0],result:options.formatResult&&options.formatResult(row,row[0])||row[0]};}}return parsed;};function stopLoading(){$input.removeClass(options.loadingClass);};};$.Autocompleter.defaults={inputClass:"ac_input",resultsClass:"ac_results",loadingClass:"ac_loading",minChars:1,delay:400,matchCase:false,matchSubset:true,matchContains:false,cacheLength:10,max:100,mustMatch:false,extraParams:{},selectFirst:true,formatItem:function(row){return row[0];},formatMatch:null,autoFill:false,width:0,multiple:false,multipleSeparator:", ",highlight:function(value,term){return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)("+term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi,"\\$1")+")(?![^<>]*>)(?![^&;]+;)","gi"),"<strong>$1</strong>");},scroll:true,scrollHeight:180};$.Autocompleter.Cache=function(options){var data={};var length=0;function matchSubset(s,sub){if(!options.matchCase)s=s.toLowerCase();var i=s.indexOf(sub);if(i==-1)return false;return i==0||options.matchContains;};function add(q,value){if(length>options.cacheLength){flush();}if(!data[q]){length++;}data[q]=value;}function populate(){if(!options.data)return false;var stMatchSets={},nullData=0;if(!options.url)options.cacheLength=1;stMatchSets[""]=[];for(var i=0,ol=options.data.length;i<ol;i++){var rawValue=options.data[i];rawValue=(typeof rawValue=="string")?[rawValue]:rawValue;var value=options.formatMatch(rawValue,i+1,options.data.length);if(value===false)continue;var firstChar=value.charAt(0).toLowerCase();if(!stMatchSets[firstChar])stMatchSets[firstChar]=[];var row={value:value,data:rawValue,result:options.formatResult&&options.formatResult(rawValue)||value};stMatchSets[firstChar].push(row);if(nullData++<options.max){stMatchSets[""].push(row);}};$.each(stMatchSets,function(i,value){options.cacheLength++;add(i,value);});}setTimeout(populate,25);function flush(){data={};length=0;}return{flush:flush,add:add,populate:populate,load:function(q){if(!options.cacheLength||!length)return null;if(!options.url&&options.matchContains){var csub=[];for(var k in data){if(k.length>0){var c=data[k];$.each(c,function(i,x){if(matchSubset(x.value,q)){csub.push(x);}});}}return csub;}else
+if(data[q]){return data[q];}else
+if(options.matchSubset){for(var i=q.length-1;i>=options.minChars;i--){var c=data[q.substr(0,i)];if(c){var csub=[];$.each(c,function(i,x){if(matchSubset(x.value,q)){csub[csub.length]=x;}});return csub;}}}return null;}};};$.Autocompleter.Select=function(options,input,select,config){var CLASSES={ACTIVE:"ac_over"};var listItems,active=-1,data,term="",needsInit=true,element,list;function init(){if(!needsInit)return;element=$("<div/>").hide().addClass(options.resultsClass).css("position","absolute").appendTo(document.body);list=$("<ul/>").appendTo(element).mouseover(function(event){if(target(event).nodeName&&target(event).nodeName.toUpperCase()=='LI'){active=$("li",list).removeClass(CLASSES.ACTIVE).index(target(event));$(target(event)).addClass(CLASSES.ACTIVE);}}).click(function(event){$(target(event)).addClass(CLASSES.ACTIVE);select();input.focus();return false;}).mousedown(function(){config.mouseDownOnSelect=true;}).mouseup(function(){config.mouseDownOnSelect=false;});if(options.width>0)element.css("width",options.width);needsInit=false;}function target(event){var element=event.target;while(element&&element.tagName!="LI")element=element.parentNode;if(!element)return[];return element;}function moveSelect(step){listItems.slice(active,active+1).removeClass(CLASSES.ACTIVE);movePosition(step);var activeItem=listItems.slice(active,active+1).addClass(CLASSES.ACTIVE);if(options.scroll){var offset=0;listItems.slice(0,active).each(function(){offset+=this.offsetHeight;});if((offset+activeItem[0].offsetHeight-list.scrollTop())>list[0].clientHeight){list.scrollTop(offset+activeItem[0].offsetHeight-list.innerHeight());}else if(offset<list.scrollTop()){list.scrollTop(offset);}}};function movePosition(step){active+=step;if(active<0){active=listItems.size()-1;}else if(active>=listItems.size()){active=0;}}function limitNumberOfItems(available){return options.max&&options.max<available?options.max:available;}function fillList(){list.empty();var max=limitNumberOfItems(data.length);for(var i=0;i<max;i++){if(!data[i])continue;var formatted=options.formatItem(data[i].data,i+1,max,data[i].value,term);if(formatted===false)continue;var li=$("<li/>").html(options.highlight(formatted,term)).addClass(i%2==0?"ac_even":"ac_odd").appendTo(list)[0];$.data(li,"ac_data",data[i]);}listItems=list.find("li");if(options.selectFirst){listItems.slice(0,1).addClass(CLASSES.ACTIVE);active=0;}if($.fn.bgiframe)list.bgiframe();}return{display:function(d,q){init();data=d;term=q;fillList();},next:function(){moveSelect(1);},prev:function(){moveSelect(-1);},pageUp:function(){if(active!=0&&active-8<0){moveSelect(-active);}else{moveSelect(-8);}},pageDown:function(){if(active!=listItems.size()-1&&active+8>listItems.size()){moveSelect(listItems.size()-1-active);}else{moveSelect(8);}},hide:function(){element&&element.hide();listItems&&listItems.removeClass(CLASSES.ACTIVE);active=-1;},visible:function(){return element&&element.is(":visible");},current:function(){return this.visible()&&(listItems.filter("."+CLASSES.ACTIVE)[0]||options.selectFirst&&listItems[0]);},show:function(){var offset=$(input).offset();element.css({width:typeof options.width=="string"||options.width>0?options.width:$(input).width(),top:offset.top+input.offsetHeight,left:offset.left}).show();if(options.scroll){list.scrollTop(0);list.css({maxHeight:options.scrollHeight,overflow:'auto'});if($.browser.msie&&typeof document.body.style.maxHeight==="undefined"){var listHeight=0;listItems.each(function(){listHeight+=this.offsetHeight;});var scrollbarsVisible=listHeight>options.scrollHeight;list.css('height',scrollbarsVisible?options.scrollHeight:listHeight);if(!scrollbarsVisible){listItems.width(list.width()-parseInt(listItems.css("padding-left"))-parseInt(listItems.css("padding-right")));}}}},selected:function(){var selected=listItems&&listItems.filter("."+CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);return selected&&selected.length&&$.data(selected[0],"ac_data");},emptyList:function(){list&&list.empty();},unbind:function(){element&&element.remove();}};};$.Autocompleter.Selection=function(field,start,end){if(field.createTextRange){var selRange=field.createTextRange();selRange.collapse(true);selRange.moveStart("character",start);selRange.moveEnd("character",end);selRange.select();}else if(field.setSelectionRange){field.setSelectionRange(start,end);}else{if(field.selectionStart){field.selectionStart=start;field.selectionEnd=end;}}field.focus();};})(jQuery);
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,32 @@
+/*
+ * jQuery 1.2.6 - New Wave Javascript
+ *
+ * Copyright (c) 2008 John Resig (jquery.com)
+ * Dual licensed under the MIT (MIT-LICENSE.txt)
+ * and GPL (GPL-LICENSE.txt) licenses.
+ *
+ * $Date: 2008-05-24 14:22:17 -0400 (Sat, 24 May 2008) $
+ * $Rev: 5685 $
+ */
+(function(){var _jQuery=window.jQuery,_$=window.$;var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);};var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;if(selector.nodeType){this[0]=selector;this.length=1;return this;}if(typeof selector=="string"){var match=quickExpr.exec(selector);if(match&&(match[1]||!context)){if(match[1])selector=jQuery.clean([match[1]],context);else{var elem=document.getElementById(match[3]);if(elem){if(elem.id!=match[3])return jQuery().find(selector);return jQuery(elem);}selector=[];}}else
+return jQuery(context).find(selector);}else if(jQuery.isFunction(selector))return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);return this.setArray(jQuery.makeArray(selector));},jquery:"1.2.6",size:function(){return this.length;},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];},pushStack:function(elems){var ret=jQuery(elems);ret.prevObject=this;return ret;},setArray:function(elems){this.length=0;Array.prototype.push.apply(this,elems);return this;},each:function(callback,args){return jQuery.each(this,callback,args);},index:function(elem){var ret=-1;return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);},attr:function(name,value,type){var options=name;if(name.constructor==String)if(value===undefined)return this[0]&&jQuery[type||"attr"](this[0],name);else{options={};options[name]=value;}return this.each(function(i){for(name in options)jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));});},css:function(key,value){if((key=='width'||key=='height')&&parseFloat(value)<0)value=undefined;return this.attr(key,value,"curCSS");},text:function(text){if(typeof text!="object"&&text!=null)return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));var ret="";jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8)ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);});});return ret;},wrapAll:function(html){if(this[0])jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;while(elem.firstChild)elem=elem.firstChild;return elem;}).append(this);return this;},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);});},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);});},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1)this.appendChild(elem);});},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1)this.insertBefore(elem,this.firstChild);});},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);});},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);});},end:function(){return this.prevObject||jQuery([]);},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);});return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");container.appendChild(clone);return jQuery.clean([container.innerHTML])[0];}else
+return this.cloneNode(true);});var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined)this[expando]=null;});if(events===true)this.find("*").andSelf().each(function(i){if(this.nodeType==3)return;var events=jQuery.data(this,"events");for(var type in events)for(var handler in events[type])jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);});return ret;},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);})||jQuery.multiFilter(selector,this));},not:function(selector){if(selector.constructor==String)if(isSimple.test(selector))return this.pushStack(jQuery.multiFilter(selector,this,true));else
+selector=jQuery.multiFilter(selector,this);var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;});},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=='string'?jQuery(selector):jQuery.makeArray(selector))));},is:function(selector){return!!selector&&jQuery.multiFilter(selector,this).length>0;},hasClass:function(selector){return this.is("."+selector);},val:function(value){if(value==undefined){if(this.length){var elem=this[0];if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";if(index<0)return null;for(var i=one?index:0,max=one?index+1:options.length;i<max;i++){var option=options[i];if(option.selected){value=jQuery.browser.msie&&!option.attributes.value.specified?option.text:option.value;if(one)return value;values.push(value);}}return values;}else
+return(this[0].value||"").replace(/\r/g,"");}return undefined;}if(value.constructor==Number)value+='';return this.each(function(){if(this.nodeType!=1)return;if(value.constructor==Array&&/radio|checkbox/.test(this.type))this.checked=(jQuery.inArray(this.value,value)>=0||jQuery.inArray(this.name,value)>=0);else if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);});if(!values.length)this.selectedIndex=-1;}else
+this.value=value;});},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);},replaceWith:function(value){return this.after(value).remove();},eq:function(i){return this.slice(i,i+1);},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);}));},andSelf:function(){return this.add(this.prevObject);},data:function(key,value){var parts=key.split(".");parts[1]=parts[1]?"."+parts[1]:"";if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);if(data===undefined&&this.length)data=jQuery.data(this[0],key);return data===undefined&&parts[1]?this.data(parts[0]):data;}else
+return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);});},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);});},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);if(reverse)elems.reverse();}var obj=this;if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr"))obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));var scripts=jQuery([]);jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;if(jQuery.nodeName(elem,"script"))scripts=scripts.add(elem);else{if(elem.nodeType==1)scripts=scripts.add(jQuery("script",elem).remove());callback.call(obj,elem);}});scripts.each(evalScript);});}};jQuery.fn.init.prototype=jQuery.fn;function evalScript(i,elem){if(elem.src)jQuery.ajax({url:elem.src,async:false,dataType:"script"});else
+jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");if(elem.parentNode)elem.parentNode.removeChild(elem);}function now(){return+new Date;}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;if(target.constructor==Boolean){deep=target;target=arguments[1]||{};i=2;}if(typeof target!="object"&&typeof target!="function")target={};if(length==i){target=this;--i;}for(;i<length;i++)if((options=arguments[i])!=null)for(var name in options){var src=target[name],copy=options[name];if(target===copy)continue;if(deep&&copy&&typeof copy=="object"&&!copy.nodeType)target[name]=jQuery.extend(deep,src||(copy.length!=null?[]:{}),copy);else if(copy!==undefined)target[name]=copy;}return target;};var expando="jQuery"+now(),uuid=0,windowData={},exclude=/z-?index|font-?weight|opacity|zoom|line-?height/i,defaultView=document.defaultView||{};jQuery.extend({noConflict:function(deep){window.$=_$;if(deep)window.jQuery=_jQuery;return jQuery;},isFunction:function(fn){return!!fn&&typeof fn!="string"&&!fn.nodeName&&fn.constructor!=Array&&/^[\s[]?function/.test(fn+"");},isXMLDoc:function(elem){return elem.documentElement&&!elem.body||elem.tagName&&elem.ownerDocument&&!elem.ownerDocument.body;},globalEval:function(data){data=jQuery.trim(data);if(data){var head=document.getElementsByTagName("head")[0]||document.documentElement,script=document.createElement("script");script.type="text/javascript";if(jQuery.browser.msie)script.text=data;else
+script.appendChild(document.createTextNode(data));head.insertBefore(script,head.firstChild);head.removeChild(script);}},nodeName:function(elem,name){return elem.nodeName&&elem.nodeName.toUpperCase()==name.toUpperCase();},cache:{},data:function(elem,name,data){elem=elem==window?windowData:elem;var id=elem[expando];if(!id)id=elem[expando]=++uuid;if(name&&!jQuery.cache[id])jQuery.cache[id]={};if(data!==undefined)jQuery.cache[id][name]=data;return name?jQuery.cache[id][name]:id;},removeData:function(elem,name){elem=elem==window?windowData:elem;var id=elem[expando];if(name){if(jQuery.cache[id]){delete jQuery.cache[id][name];name="";for(name in jQuery.cache[id])break;if(!name)jQuery.removeData(elem);}}else{try{delete elem[expando];}catch(e){if(elem.removeAttribute)elem.removeAttribute(expando);}delete jQuery.cache[id];}},each:function(object,callback,args){var name,i=0,length=object.length;if(args){if(length==undefined){for(name in object)if(callback.apply(object[name],args)===false)break;}else
+for(;i<length;)if(callback.apply(object[i++],args)===false)break;}else{if(length==undefined){for(name in object)if(callback.call(object[name],name,object[name])===false)break;}else
+for(var value=object[0];i<length&&callback.call(value,i,value)!==false;value=object[++i]){}}return object;},prop:function(elem,value,type,i,name){if(jQuery.isFunction(value))value=value.call(elem,i);return value&&value.constructor==Number&&type=="curCSS"&&!exclude.test(name)?value+"px":value;},className:{add:function(elem,classNames){jQuery.each((classNames||"").split(/\s+/),function(i,className){if(elem.nodeType==1&&!jQuery.className.has(elem.className,className))elem.className+=(elem.className?" ":"")+className;});},remove:function(elem,classNames){if(elem.nodeType==1)elem.className=classNames!=undefined?jQuery.grep(elem.className.split(/\s+/),function(className){return!jQuery.className.has(classNames,className);}).join(" "):"";},has:function(elem,className){return jQuery.inArray(className,(elem.className||elem).toString().split(/\s+/))>-1;}},swap:function(elem,options,callback){var old={};for(var name in options){old[name]=elem.style[name];elem.style[name]=options[name];}callback.call(elem);for(var name in options)elem.style[name]=old[name];},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;var padding=0,border=0;jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;});val-=Math.round(padding+border);}if(jQuery(elem).is(":visible"))getWH();else
+jQuery.swap(elem,props,getWH);return Math.max(0,val);}return jQuery.curCSS(elem,name,force);},curCSS:function(elem,name,force){var ret,style=elem.style;function color(elem){if(!jQuery.browser.safari)return false;var ret=defaultView.getComputedStyle(elem,null);return!ret||ret.getPropertyValue("color")=="";}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");return ret==""?"1":ret;}if(jQuery.browser.opera&&name=="display"){var save=style.outline;style.outline="0 solid black";style.outline=save;}if(name.match(/float/i))name=styleFloat;if(!force&&style&&style[name])ret=style[name];else if(defaultView.getComputedStyle){if(name.match(/float/i))name="float";name=name.replace(/([A-Z])/g,"-$1").toLowerCase();var computedStyle=defaultView.getComputedStyle(elem,null);if(computedStyle&&!color(elem))ret=computedStyle.getPropertyValue(name);else{var swap=[],stack=[],a=elem,i=0;for(;a&&color(a);a=a.parentNode)stack.unshift(a);for(;i<stack.length;i++)if(color(stack[i])){swap[i]=stack[i].style.display;stack[i].style.display="block";}ret=name=="display"&&swap[stack.length-1]!=null?"none":(computedStyle&&computedStyle.getPropertyValue(name))||"";for(i=0;i<swap.length;i++)if(swap[i]!=null)stack[i].style.display=swap[i];}if(name=="opacity"&&ret=="")ret="1";}else if(elem.currentStyle){var camelCase=name.replace(/\-(\w)/g,function(all,letter){return letter.toUpperCase();});ret=elem.currentStyle[name]||elem.currentStyle[camelCase];if(!/^\d+(px)?$/i.test(ret)&&/^\d/.test(ret)){var left=style.left,rsLeft=elem.runtimeStyle.left;elem.runtimeStyle.left=elem.currentStyle.left;style.left=ret||0;ret=style.pixelLeft+"px";style.left=left;elem.runtimeStyle.left=rsLeft;}}return ret;},clean:function(elems,context){var ret=[];context=context||document;if(typeof context.createElement=='undefined')context=context.ownerDocument||context[0]&&context[0].ownerDocument||document;jQuery.each(elems,function(i,elem){if(!elem)return;if(elem.constructor==Number)elem+='';if(typeof elem=="string"){elem=elem.replace(/(<(\w+)[^>]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+"></"+tag+">";});var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");var wrap=!tags.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!tags.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!tags.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!tags.indexOf("<td")||!tags.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!tags.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||jQuery.browser.msie&&[1,"div<div>","</div>"]||[0,"",""];div.innerHTML=wrap[1]+elem+wrap[2];while(wrap[0]--)div=div.lastChild;if(jQuery.browser.msie){var tbody=!tags.indexOf("<table")&&tags.indexOf("<tbody")<0?div.firstChild&&div.firstChild.childNodes:wrap[1]=="<table>"&&tags.indexOf("<tbody")<0?div.childNodes:[];for(var j=tbody.length-1;j>=0;--j)if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length)tbody[j].parentNode.removeChild(tbody[j]);if(/^\s/.test(elem))div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);}elem=jQuery.makeArray(div.childNodes);}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select")))return;if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options)ret.push(elem);else
+ret=jQuery.merge(ret,elem);});return ret;},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8)return undefined;var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;name=notxml&&jQuery.props[name]||name;if(elem.tagName){var special=/href|src|style/.test(name);if(name=="selected"&&jQuery.browser.safari)elem.parentNode.selectedIndex;if(name in elem&&notxml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode)throw"type property can't be changed";elem[name]=value;}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name))return elem.getAttributeNode(name).nodeValue;return elem[name];}if(msie&&notxml&&name=="style")return jQuery.attr(elem.style,"cssText",value);if(set)elem.setAttribute(name,""+value);var attr=msie&&notxml&&special?elem.getAttribute(name,2):elem.getAttribute(name);return attr===null?undefined:attr;}if(msie&&name=="opacity"){if(set){elem.zoom=1;elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+''=="NaN"?"":"alpha(opacity="+value*100+")");}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+'':"";}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();});if(set)elem[name]=value;return elem[name];},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");},makeArray:function(array){var ret=[];if(array!=null){var i=array.length;if(i==null||array.split||array.setInterval||array.call)ret[0]=array;else
+while(i)ret[--i]=array[i];}return ret;},inArray:function(elem,array){for(var i=0,length=array.length;i<length;i++)if(array[i]===elem)return i;return-1;},merge:function(first,second){var i=0,elem,pos=first.length;if(jQuery.browser.msie){while(elem=second[i++])if(elem.nodeType!=8)first[pos++]=elem;}else
+while(elem=second[i++])first[pos++]=elem;return first;},unique:function(array){var ret=[],done={};try{for(var i=0,length=array.length;i<length;i++){var id=jQuery.data(array[i]);if(!done[id]){done[id]=true;ret.push(array[i]);}}}catch(e){ret=array;}return ret;},grep:function(elems,callback,inv){var ret=[];for(var i=0,length=elems.length;i<length;i++)if(!inv!=!callback(elems[i],i))ret.push(elems[i]);return ret;},map:function(elems,callback){var ret=[];for(var i=0,length=elems.length;i<length;i++){var value=callback(elems[i],i);if(value!=null)ret[ret.length]=value;}return ret.concat.apply([],ret);}});var userAgent=navigator.userAgent.toLowerCase();jQuery.browser={version:(userAgent.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[])[1],safari:/webkit/.test(userAgent),opera:/opera/.test(userAgent),msie:/msie/.test(userAgent)&&!/opera/.test(userAgent),mozilla:/mozilla/.test(userAgent)&&!/(compatible|webkit)/.test(userAgent)};var styleFloat=jQuery.browser.msie?"styleFloat":"cssFloat";jQuery.extend({boxModel:!jQuery.browser.msie||document.compatMode=="CSS1Compat",props:{"for":"htmlFor","class":"className","float":styleFloat,cssFloat:styleFloat,styleFloat:styleFloat,readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing"}});jQuery.each({parent:function(elem){return elem.parentNode;},parents:function(elem){return jQuery.dir(elem,"parentNode");},next:function(elem){return jQuery.nth(elem,2,"nextSibling");},prev:function(elem){return jQuery.nth(elem,2,"previousSibling");},nextAll:function(elem){return jQuery.dir(elem,"nextSibling");},prevAll:function(elem){return jQuery.dir(elem,"previousSibling");},siblings:function(elem){return jQuery.sibling(elem.parentNode.firstChild,elem);},children:function(elem){return jQuery.sibling(elem.firstChild);},contents:function(elem){return jQuery.nodeName(elem,"iframe")?elem.contentDocument||elem.contentWindow.document:jQuery.makeArray(elem.childNodes);}},function(name,fn){jQuery.fn[name]=function(selector){var ret=jQuery.map(this,fn);if(selector&&typeof selector=="string")ret=jQuery.multiFilter(selector,ret);return this.pushStack(jQuery.unique(ret));};});jQuery.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(name,original){jQuery.fn[name]=function(){var args=arguments;return this.each(function(){for(var i=0,length=args.length;i<length;i++)jQuery(args[i])[original](this);});};});jQuery.each({removeAttr:function(name){jQuery.attr(this,name,"");if(this.nodeType==1)this.removeAttribute(name);},addClass:function(classNames){jQuery.className.add(this,classNames);},removeClass:function(classNames){jQuery.className.remove(this,classNames);},toggleClass:function(classNames){jQuery.className[jQuery.className.has(this,classNames)?"remove":"add"](this,classNames);},remove:function(selector){if(!selector||jQuery.filter(selector,[this]).r.length){jQuery("*",this).add(this).each(function(){jQuery.event.remove(this);jQuery.removeData(this);});if(this.parentNode)this.parentNode.removeChild(this);}},empty:function(){jQuery(">*",this).remove();while(this.firstChild)this.removeChild(this.firstChild);}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);};});jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");};});function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);},"#":function(a,i,m){return a.getAttribute("id")==m[2];},":":{lt:function(a,i,m){return i<m[3]-0;},gt:function(a,i,m){return i>m[3]-0;},nth:function(a,i,m){return m[3]-0==i;},eq:function(a,i,m){return m[3]-0==i;},first:function(a,i){return i==0;},last:function(a,i,m,r){return i==r.length-1;},even:function(a,i){return i%2==0;},odd:function(a,i){return i%2;},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;},"only-child":function(a){return!jQuery.nth(a.parentNode.lastChild,2,"previousSibling");},parent:function(a){return a.firstChild;},empty:function(a){return!a.firstChild;},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";},enabled:function(a){return!a.disabled;},disabled:function(a){return a.disabled;},checked:function(a){return a.checked;},selected:function(a){return a.selected||jQuery.attr(a,"selected");},text:function(a){return"text"==a.type;},radio:function(a){return"radio"==a.type;},checkbox:function(a){return"checkbox"==a.type;},file:function(a){return"file"==a.type;},password:function(a){return"password"==a.type;},submit:function(a){return"submit"==a.type;},image:function(a){return"image"==a.type;},reset:function(a){return"reset"==a.type;},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);},has:function(a,i,m){return jQuery.find(m[3],a).length;},header:function(a){return/h\d/i.test(a.nodeName);},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;}).length;}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];while(expr&&expr!=old){old=expr;var f=jQuery.filter(expr,elems,not);expr=f.t.replace(/^\s*,\s*/,"");cur=not?elems=f.r:jQuery.merge(cur,f.r);}return cur;},find:function(t,context){if(typeof t!="string")return[t];if(context&&context.nodeType!=1&&context.nodeType!=9)return[];context=context||document;var ret=[context],done=[],last,nodeName;while(t&&last!=t){var r=[];last=t;t=jQuery.trim(t);var foundToken=false,re=quickChild,m=re.exec(t);if(m){nodeName=m[1].toUpperCase();for(var i=0;ret[i];i++)for(var c=ret[i].firstChild;c;c=c.nextSibling)if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName))r.push(c);ret=r;t=t.replace(re,"");if(t.indexOf(" ")==0)continue;foundToken=true;}else{re=/^([>+~])\s*(\w*)/i;if((m=re.exec(t))!=null){r=[];var merge={};nodeName=m[2].toUpperCase();m=m[1];for(var j=0,rl=ret.length;j<rl;j++){var n=m=="~"||m=="+"?ret[j].nextSibling:ret[j].firstChild;for(;n;n=n.nextSibling)if(n.nodeType==1){var id=jQuery.data(n);if(m=="~"&&merge[id])break;if(!nodeName||n.nodeName.toUpperCase()==nodeName){if(m=="~")merge[id]=true;r.push(n);}if(m=="+")break;}}ret=r;t=jQuery.trim(t.replace(re,""));foundToken=true;}}if(t&&!foundToken){if(!t.indexOf(",")){if(context==ret[0])ret.shift();done=jQuery.merge(done,ret);r=ret=[context];t=" "+t.substr(1,t.length);}else{var re2=quickID;var m=re2.exec(t);if(m){m=[0,m[2],m[3],m[1]];}else{re2=quickClass;m=re2.exec(t);}m[2]=m[2].replace(/\\/g,"");var elem=ret[ret.length-1];if(m[1]=="#"&&elem&&elem.getElementById&&!jQuery.isXMLDoc(elem)){var oid=elem.getElementById(m[2]);if((jQuery.browser.msie||jQuery.browser.opera)&&oid&&typeof oid.id=="string"&&oid.id!=m[2])oid=jQuery('[@id="'+m[2]+'"]',elem)[0];ret=r=oid&&(!m[3]||jQuery.nodeName(oid,m[3]))?[oid]:[];}else{for(var i=0;ret[i];i++){var tag=m[1]=="#"&&m[3]?m[3]:m[1]!=""||m[0]==""?"*":m[2];if(tag=="*"&&ret[i].nodeName.toLowerCase()=="object")tag="param";r=jQuery.merge(r,ret[i].getElementsByTagName(tag));}if(m[1]==".")r=jQuery.classFilter(r,m[2]);if(m[1]=="#"){var tmp=[];for(var i=0;r[i];i++)if(r[i].getAttribute("id")==m[2]){tmp=[r[i]];break;}r=tmp;}ret=r;}t=t.replace(re2,"");}}if(t){var val=jQuery.filter(t,r);ret=r=val.r;t=jQuery.trim(val.t);}}if(t)ret=[];if(ret&&context==ret[0])ret.shift();done=jQuery.merge(done,ret);return done;},classFilter:function(r,m,not){m=" "+m+" ";var tmp=[];for(var i=0;r[i];i++){var pass=(" "+r[i].className+" ").indexOf(m)>=0;if(!not&&pass||not&&!pass)tmp.push(r[i]);}return tmp;},filter:function(t,r,not){var last;while(t&&t!=last){last=t;var p=jQuery.parse,m;for(var i=0;p[i];i++){m=p[i].exec(t);if(m){t=t.substring(m[0].length);m[2]=m[2].replace(/\\/g,"");break;}}if(!m)break;if(m[1]==":"&&m[2]=="not")r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);else if(m[1]==".")r=jQuery.classFilter(r,m[2],not);else if(m[1]=="["){var tmp=[],type=m[3];for(var i=0,rl=r.length;i<rl;i++){var a=r[i],z=a[jQuery.props[m[2]]||m[2]];if(z==null||/href|src|selected/.test(m[2]))z=jQuery.attr(a,m[2])||'';if((type==""&&!!z||type=="="&&z==m[5]||type=="!="&&z!=m[5]||type=="^="&&z&&!z.indexOf(m[5])||type=="$="&&z.substr(z.length-m[5].length)==m[5]||(type=="*="||type=="~=")&&z.indexOf(m[5])>=0)^not)tmp.push(a);}r=tmp;}else if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;for(var i=0,rl=r.length;i<rl;i++){var node=r[i],parentNode=node.parentNode,id=jQuery.data(parentNode);if(!merge[id]){var c=1;for(var n=parentNode.firstChild;n;n=n.nextSibling)if(n.nodeType==1)n.nodeIndex=c++;merge[id]=true;}var add=false;if(first==0){if(node.nodeIndex==last)add=true;}else if((node.nodeIndex-last)%first==0&&(node.nodeIndex-last)/first>=0)add=true;if(add^not)tmp.push(node);}r=tmp;}else{var fn=jQuery.expr[m[1]];if(typeof fn=="object")fn=fn[m[2]];if(typeof fn=="string")fn=eval("false||function(a,i){return "+fn+";}");r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);},not);}}return{r:r,t:t};},dir:function(elem,dir){var matched=[],cur=elem[dir];while(cur&&cur!=document){if(cur.nodeType==1)matched.push(cur);cur=cur[dir];}return matched;},nth:function(cur,result,dir,elem){result=result||1;var num=0;for(;cur;cur=cur[dir])if(cur.nodeType==1&&++num==result)break;return cur;},sibling:function(n,elem){var r=[];for(;n;n=n.nextSibling){if(n.nodeType==1&&n!=elem)r.push(n);}return r;}});jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8)return;if(jQuery.browser.msie&&elem.setInterval)elem=window;if(!handler.guid)handler.guid=this.guid++;if(data!=undefined){var fn=handler;handler=this.proxy(fn,function(){return fn.apply(this,arguments);});handler.data=data;}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered)return jQuery.event.handle.apply(arguments.callee.elem,arguments);});handle.elem=elem;jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];handler.type=parts[1];var handlers=events[type];if(!handlers){handlers=events[type]={};if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener)elem.addEventListener(type,handle,false);else if(elem.attachEvent)elem.attachEvent("on"+type,handle);}}handlers[handler.guid]=handler;jQuery.event.global[type]=true;});elem=null;},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8)return;var events=jQuery.data(elem,"events"),ret,index;if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)=="."))for(var type in events)this.remove(elem,type+(types||""));else{if(types.type){handler=types.handler;types=types.type;}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];if(events[type]){if(handler)delete events[type][handler.guid];else
+for(handler in events[type])if(!parts[1]||events[type][handler].type==parts[1])delete events[type][handler];for(ret in events[type])break;if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener)elem.removeEventListener(type,jQuery.data(elem,"handle"),false);else if(elem.detachEvent)elem.detachEvent("on"+type,jQuery.data(elem,"handle"));}ret=null;delete events[type];}}});}for(ret in events)break;if(!ret){var handle=jQuery.data(elem,"handle");if(handle)handle.elem=null;jQuery.removeData(elem,"events");jQuery.removeData(elem,"handle");}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);if(type.indexOf("!")>=0){type=type.slice(0,-1);var exclusive=true;}if(!elem){if(this.global[type])jQuery("*").add([window,document]).trigger(type,data);}else{if(elem.nodeType==3||elem.nodeType==8)return undefined;var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});data[0][expando]=true;}data[0].type=type;if(exclusive)data[0].exclusive=true;var handle=jQuery.data(elem,"handle");if(handle)val=handle.apply(elem,data);if((!fn||(jQuery.nodeName(elem,'a')&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false)val=false;if(event)data.shift();if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));if(ret!==undefined)val=ret;}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,'a')&&type=="click")){this.triggered=true;try{elem[type]();}catch(e){}}this.triggered=false;}return val;},handle:function(event){var val,ret,namespace,all,handlers;event=arguments[0]=jQuery.event.fix(event||window.event);namespace=event.type.split(".");event.type=namespace[0];namespace=namespace[1];all=!namespace&&!event.exclusive;handlers=(jQuery.data(this,"events")||{})[event.type];for(var j in handlers){var handler=handlers[j];if(all||handler.type==namespace){event.handler=handler;event.data=handler.data;ret=handler.apply(this,arguments);if(val!==false)val=ret;if(ret===false){event.preventDefault();event.stopPropagation();}}}return val;},fix:function(event){if(event[expando]==true)return event;var originalEvent=event;event={originalEvent:originalEvent};var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");for(var i=props.length;i;i--)event[props[i]]=originalEvent[props[i]];event[expando]=true;event.preventDefault=function(){if(originalEvent.preventDefault)originalEvent.preventDefault();originalEvent.returnValue=false;};event.stopPropagation=function(){if(originalEvent.stopPropagation)originalEvent.stopPropagation();originalEvent.cancelBubble=true;};event.timeStamp=event.timeStamp||now();if(!event.target)event.target=event.srcElement||document;if(event.target.nodeType==3)event.target=event.target.parentNode;if(!event.relatedTarget&&event.fromElement)event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode))event.which=event.charCode||event.keyCode;if(!event.metaKey&&event.ctrlKey)event.metaKey=event.ctrlKey;if(!event.which&&event.button)event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));return event;},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;return proxy;},special:{ready:{setup:function(){bindReady();return;},teardown:function(){return;}},mouseenter:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseenter";return jQuery.event.handle.apply(this,arguments);}},mouseleave:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseleave";return jQuery.event.handle.apply(this,arguments);}}}};jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);});},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);return(fn||data).apply(this,arguments);});return this.each(function(){jQuery.event.add(this,type,one,fn&&data);});},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);});},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);});},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);},toggle:function(fn){var args=arguments,i=1;while(i<args.length)jQuery.event.proxy(fn,args[i++]);return this.click(jQuery.event.proxy(fn,function(event){this.lastToggle=(this.lastToggle||0)%i;event.preventDefault();return args[this.lastToggle++].apply(this,arguments)||false;}));},hover:function(fnOver,fnOut){return this.bind('mouseenter',fnOver).bind('mouseleave',fnOut);},ready:function(fn){bindReady();if(jQuery.isReady)fn.call(document,jQuery);else
+jQuery.readyList.push(function(){return fn.call(this,jQuery);});return this;}});jQuery.extend({isReady:false,readyList:[],ready:function(){if(!jQuery.isReady){jQuery.isReady=true;if(jQuery.readyList){jQuery.each(jQuery.readyList,function(){this.call(document);});jQuery.readyList=null;}jQuery(document).triggerHandler("ready");}}});var readyBound=false;function bindReady(){if(readyBound)return;readyBound=true;if(document.addEventListener&&!jQuery.browser.opera)document.addEventListener("DOMContentLoaded",jQuery.ready,false);if(jQuery.browser.msie&&window==top)(function(){if(jQuery.isReady)return;try{document.documentElement.doScroll("left");}catch(error){setTimeout(arguments.callee,0);return;}jQuery.ready();})();if(jQuery.browser.opera)document.addEventListener("DOMContentLoaded",function(){if(jQuery.isReady)return;for(var i=0;i<document.styleSheets.length;i++)if(document.styleSheets[i].disabled){setTimeout(arguments.callee,0);return;}jQuery.ready();},false);if(jQuery.browser.safari){var numStyles;(function(){if(jQuery.isReady)return;if(document.readyState!="loaded"&&document.readyState!="complete"){setTimeout(arguments.callee,0);return;}if(numStyles===undefined)numStyles=jQuery("style, link[rel=stylesheet]").length;if(document.styleSheets.length!=numStyles){setTimeout(arguments.callee,0);return;}jQuery.ready();})();}jQuery.event.add(window,"load",jQuery.ready);}jQuery.each(("blur,focus,load,resize,scroll,unload,click,dblclick,"+"mousedown,mouseup,mousemove,mouseover,mouseout,change,select,"+"submit,keydown,keypress,keyup,error").split(","),function(i,name){jQuery.fn[name]=function(fn){return fn?this.bind(name,fn):this.trigger(name);};});var withinElement=function(event,elem){var parent=event.relatedTarget;while(parent&&parent!=elem)try{parent=parent.parentNode;}catch(error){parent=elem;}return parent==elem;};jQuery(window).bind("unload",function(){jQuery("*").add(document).unbind();});jQuery.fn.extend({_load:jQuery.fn.load,load:function(url,params,callback){if(typeof url!='string')return this._load(url);var off=url.indexOf(" ");if(off>=0){var selector=url.slice(off,url.length);url=url.slice(0,off);}callback=callback||function(){};var type="GET";if(params)if(jQuery.isFunction(params)){callback=params;params=null;}else{params=jQuery.param(params);type="POST";}var self=this;jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified")self.html(selector?jQuery("<div/>").append(res.responseText.replace(/<script(.|\s)*?\/script>/g,"")).find(selector):res.responseText);self.each(callback,[res.responseText,status,res]);}});return this;},serialize:function(){return jQuery.param(this.serializeArray());},serializeArray:function(){return this.map(function(){return jQuery.nodeName(this,"form")?jQuery.makeArray(this.elements):this;}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password/i.test(this.type));}).map(function(i,elem){var val=jQuery(this).val();return val==null?null:val.constructor==Array?jQuery.map(val,function(val,i){return{name:elem.name,value:val};}):{name:elem.name,value:val};}).get();}});jQuery.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(i,o){jQuery.fn[o]=function(f){return this.bind(o,f);};});var jsc=now();jQuery.extend({get:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data=null;}return jQuery.ajax({type:"GET",url:url,data:data,success:callback,dataType:type});},getScript:function(url,callback){return jQuery.get(url,null,callback,"script");},getJSON:function(url,data,callback){return jQuery.get(url,data,callback,"json");},post:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data={};}return jQuery.ajax({type:"POST",url:url,data:data,success:callback,dataType:type});},ajaxSetup:function(settings){jQuery.extend(jQuery.ajaxSettings,settings);},ajaxSettings:{url:location.href,global:true,type:"GET",timeout:0,contentType:"application/x-www-form-urlencoded",processData:true,async:true,data:null,username:null,password:null,accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(s){s=jQuery.extend(true,s,jQuery.extend(true,{},jQuery.ajaxSettings,s));var jsonp,jsre=/=\?(&|$)/g,status,data,type=s.type.toUpperCase();if(s.data&&s.processData&&typeof s.data!="string")s.data=jQuery.param(s.data);if(s.dataType=="jsonp"){if(type=="GET"){if(!s.url.match(jsre))s.url+=(s.url.match(/\?/)?"&":"?")+(s.jsonp||"callback")+"=?";}else if(!s.data||!s.data.match(jsre))s.data=(s.data?s.data+"&":"")+(s.jsonp||"callback")+"=?";s.dataType="json";}if(s.dataType=="json"&&(s.data&&s.data.match(jsre)||s.url.match(jsre))){jsonp="jsonp"+jsc++;if(s.data)s.data=(s.data+"").replace(jsre,"="+jsonp+"$1");s.url=s.url.replace(jsre,"="+jsonp+"$1");s.dataType="script";window[jsonp]=function(tmp){data=tmp;success();complete();window[jsonp]=undefined;try{delete window[jsonp];}catch(e){}if(head)head.removeChild(script);};}if(s.dataType=="script"&&s.cache==null)s.cache=false;if(s.cache===false&&type=="GET"){var ts=now();var ret=s.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+ts+"$2");s.url=ret+((ret==s.url)?(s.url.match(/\?/)?"&":"?")+"_="+ts:"");}if(s.data&&type=="GET"){s.url+=(s.url.match(/\?/)?"&":"?")+s.data;s.data=null;}if(s.global&&!jQuery.active++)jQuery.event.trigger("ajaxStart");var remote=/^(?:\w+:)?\/\/([^\/?#]+)/;if(s.dataType=="script"&&type=="GET"&&remote.test(s.url)&&remote.exec(s.url)[1]!=location.host){var head=document.getElementsByTagName("head")[0];var script=document.createElement("script");script.src=s.url;if(s.scriptCharset)script.charset=s.scriptCharset;if(!jsonp){var done=false;script.onload=script.onreadystatechange=function(){if(!done&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){done=true;success();complete();head.removeChild(script);}};}head.appendChild(script);return undefined;}var requestDone=false;var xhr=window.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest();if(s.username)xhr.open(type,s.url,s.async,s.username,s.password);else
+xhr.open(type,s.url,s.async);try{if(s.data)xhr.setRequestHeader("Content-Type",s.contentType);if(s.ifModified)xhr.setRequestHeader("If-Modified-Since",jQuery.lastModified[s.url]||"Thu, 01 Jan 1970 00:00:00 GMT");xhr.setRequestHeader("X-Requested-With","XMLHttpRequest");xhr.setRequestHeader("Accept",s.dataType&&s.accepts[s.dataType]?s.accepts[s.dataType]+", */*":s.accepts._default);}catch(e){}if(s.beforeSend&&s.beforeSend(xhr,s)===false){s.global&&jQuery.active--;xhr.abort();return false;}if(s.global)jQuery.event.trigger("ajaxSend",[xhr,s]);var onreadystatechange=function(isTimeout){if(!requestDone&&xhr&&(xhr.readyState==4||isTimeout=="timeout")){requestDone=true;if(ival){clearInterval(ival);ival=null;}status=isTimeout=="timeout"&&"timeout"||!jQuery.httpSuccess(xhr)&&"error"||s.ifModified&&jQuery.httpNotModified(xhr,s.url)&&"notmodified"||"success";if(status=="success"){try{data=jQuery.httpData(xhr,s.dataType,s.dataFilter);}catch(e){status="parsererror";}}if(status=="success"){var modRes;try{modRes=xhr.getResponseHeader("Last-Modified");}catch(e){}if(s.ifModified&&modRes)jQuery.lastModified[s.url]=modRes;if(!jsonp)success();}else
+jQuery.handleError(s,xhr,status);complete();if(s.async)xhr=null;}};if(s.async){var ival=setInterval(onreadystatechange,13);if(s.timeout>0)setTimeout(function(){if(xhr){xhr.abort();if(!requestDone)onreadystatechange("timeout");}},s.timeout);}try{xhr.send(s.data);}catch(e){jQuery.handleError(s,xhr,null,e);}if(!s.async)onreadystatechange();function success(){if(s.success)s.success(data,status);if(s.global)jQuery.event.trigger("ajaxSuccess",[xhr,s]);}function complete(){if(s.complete)s.complete(xhr,status);if(s.global)jQuery.event.trigger("ajaxComplete",[xhr,s]);if(s.global&&!--jQuery.active)jQuery.event.trigger("ajaxStop");}return xhr;},handleError:function(s,xhr,status,e){if(s.error)s.error(xhr,status,e);if(s.global)jQuery.event.trigger("ajaxError",[xhr,s,e]);},active:0,httpSuccess:function(xhr){try{return!xhr.status&&location.protocol=="file:"||(xhr.status>=200&&xhr.status<300)||xhr.status==304||xhr.status==1223||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpNotModified:function(xhr,url){try{var xhrRes=xhr.getResponseHeader("Last-Modified");return xhr.status==304||xhrRes==jQuery.lastModified[url]||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpData:function(xhr,type,filter){var ct=xhr.getResponseHeader("content-type"),xml=type=="xml"||!type&&ct&&ct.indexOf("xml")>=0,data=xml?xhr.responseXML:xhr.responseText;if(xml&&data.documentElement.tagName=="parsererror")throw"parsererror";if(filter)data=filter(data,type);if(type=="script")jQuery.globalEval(data);if(type=="json")data=eval("("+data+")");return data;},param:function(a){var s=[];if(a.constructor==Array||a.jquery)jQuery.each(a,function(){s.push(encodeURIComponent(this.name)+"="+encodeURIComponent(this.value));});else
+for(var j in a)if(a[j]&&a[j].constructor==Array)jQuery.each(a[j],function(){s.push(encodeURIComponent(j)+"="+encodeURIComponent(this));});else
+s.push(encodeURIComponent(j)+"="+encodeURIComponent(jQuery.isFunction(a[j])?a[j]():a[j]));return s.join("&").replace(/%20/g,"+");}});jQuery.fn.extend({show:function(speed,callback){return speed?this.animate({height:"show",width:"show",opacity:"show"},speed,callback):this.filter(":hidden").each(function(){this.style.display=this.oldblock||"";if(jQuery.css(this,"display")=="none"){var elem=jQuery("<"+this.tagName+" />").appendTo("body");this.style.display=elem.css("display");if(this.style.display=="none")this.style.display="block";elem.remove();}}).end();},hide:function(speed,callback){return speed?this.animate({height:"hide",width:"hide",opacity:"hide"},speed,callback):this.filter(":visible").each(function(){this.oldblock=this.oldblock||jQuery.css(this,"display");this.style.display="none";}).end();},_toggle:jQuery.fn.toggle,toggle:function(fn,fn2){return jQuery.isFunction(fn)&&jQuery.isFunction(fn2)?this._toggle.apply(this,arguments):fn?this.animate({height:"toggle",width:"toggle",opacity:"toggle"},fn,fn2):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();});},slideDown:function(speed,callback){return this.animate({height:"show"},speed,callback);},slideUp:function(speed,callback){return this.animate({height:"hide"},speed,callback);},slideToggle:function(speed,callback){return this.animate({height:"toggle"},speed,callback);},fadeIn:function(speed,callback){return this.animate({opacity:"show"},speed,callback);},fadeOut:function(speed,callback){return this.animate({opacity:"hide"},speed,callback);},fadeTo:function(speed,to,callback){return this.animate({opacity:to},speed,callback);},animate:function(prop,speed,easing,callback){var optall=jQuery.speed(speed,easing,callback);return this[optall.queue===false?"each":"queue"](function(){if(this.nodeType!=1)return false;var opt=jQuery.extend({},optall),p,hidden=jQuery(this).is(":hidden"),self=this;for(p in prop){if(prop[p]=="hide"&&hidden||prop[p]=="show"&&!hidden)return opt.complete.call(this);if(p=="height"||p=="width"){opt.display=jQuery.css(this,"display");opt.overflow=this.style.overflow;}}if(opt.overflow!=null)this.style.overflow="hidden";opt.curAnim=jQuery.extend({},prop);jQuery.each(prop,function(name,val){var e=new jQuery.fx(self,opt,name);if(/toggle|show|hide/.test(val))e[val=="toggle"?hidden?"show":"hide":val](prop);else{var parts=val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),start=e.cur(true)||0;if(parts){var end=parseFloat(parts[2]),unit=parts[3]||"px";if(unit!="px"){self.style[name]=(end||1)+unit;start=((end||1)/e.cur(true))*start;self.style[name]=start+unit;}if(parts[1])end=((parts[1]=="-="?-1:1)*end)+start;e.custom(start,end,unit);}else
+e.custom(start,val,"");}});return true;});},queue:function(type,fn){if(jQuery.isFunction(type)||(type&&type.constructor==Array)){fn=type;type="fx";}if(!type||(typeof type=="string"&&!fn))return queue(this[0],type);return this.each(function(){if(fn.constructor==Array)queue(this,type,fn);else{queue(this,type).push(fn);if(queue(this,type).length==1)fn.call(this);}});},stop:function(clearQueue,gotoEnd){var timers=jQuery.timers;if(clearQueue)this.queue([]);this.each(function(){for(var i=timers.length-1;i>=0;i--)if(timers[i].elem==this){if(gotoEnd)timers[i](true);timers.splice(i,1);}});if(!gotoEnd)this.dequeue();return this;}});var queue=function(elem,type,array){if(elem){type=type||"fx";var q=jQuery.data(elem,type+"queue");if(!q||array)q=jQuery.data(elem,type+"queue",jQuery.makeArray(array));}return q;};jQuery.fn.dequeue=function(type){type=type||"fx";return this.each(function(){var q=queue(this,type);q.shift();if(q.length)q[0].call(this);});};jQuery.extend({speed:function(speed,easing,fn){var opt=speed&&speed.constructor==Object?speed:{complete:fn||!fn&&easing||jQuery.isFunction(speed)&&speed,duration:speed,easing:fn&&easing||easing&&easing.constructor!=Function&&easing};opt.duration=(opt.duration&&opt.duration.constructor==Number?opt.duration:jQuery.fx.speeds[opt.duration])||jQuery.fx.speeds.def;opt.old=opt.complete;opt.complete=function(){if(opt.queue!==false)jQuery(this).dequeue();if(jQuery.isFunction(opt.old))opt.old.call(this);};return opt;},easing:{linear:function(p,n,firstNum,diff){return firstNum+diff*p;},swing:function(p,n,firstNum,diff){return((-Math.cos(p*Math.PI)/2)+0.5)*diff+firstNum;}},timers:[],timerId:null,fx:function(elem,options,prop){this.options=options;this.elem=elem;this.prop=prop;if(!options.orig)options.orig={};}});jQuery.fx.prototype={update:function(){if(this.options.step)this.options.step.call(this.elem,this.now,this);(jQuery.fx.step[this.prop]||jQuery.fx.step._default)(this);if(this.prop=="height"||this.prop=="width")this.elem.style.display="block";},cur:function(force){if(this.elem[this.prop]!=null&&this.elem.style[this.prop]==null)return this.elem[this.prop];var r=parseFloat(jQuery.css(this.elem,this.prop,force));return r&&r>-10000?r:parseFloat(jQuery.curCSS(this.elem,this.prop))||0;},custom:function(from,to,unit){this.startTime=now();this.start=from;this.end=to;this.unit=unit||this.unit||"px";this.now=this.start;this.pos=this.state=0;this.update();var self=this;function t(gotoEnd){return self.step(gotoEnd);}t.elem=this.elem;jQuery.timers.push(t);if(jQuery.timerId==null){jQuery.timerId=setInterval(function(){var timers=jQuery.timers;for(var i=0;i<timers.length;i++)if(!timers[i]())timers.splice(i--,1);if(!timers.length){clearInterval(jQuery.timerId);jQuery.timerId=null;}},13);}},show:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);this.options.show=true;this.custom(0,this.cur());if(this.prop=="width"||this.prop=="height")this.elem.style[this.prop]="1px";jQuery(this.elem).show();},hide:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);this.options.hide=true;this.custom(this.cur(),0);},step:function(gotoEnd){var t=now();if(gotoEnd||t>this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var done=true;for(var i in this.options.curAnim)if(this.options.curAnim[i]!==true)done=false;if(done){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(jQuery.css(this.elem,"display")=="none")this.elem.style.display="block";}if(this.options.hide)this.elem.style.display="none";if(this.options.hide||this.options.show)for(var p in this.options.curAnim)jQuery.attr(this.elem.style,p,this.options.orig[p]);}if(done)this.options.complete.call(this.elem);return false;}else{var n=t-this.startTime;this.state=n/this.options.duration;this.pos=jQuery.easing[this.options.easing||(jQuery.easing.swing?"swing":"linear")](this.state,n,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update();}return true;}};jQuery.extend(jQuery.fx,{speeds:{slow:600,fast:200,def:400},step:{scrollLeft:function(fx){fx.elem.scrollLeft=fx.now;},scrollTop:function(fx){fx.elem.scrollTop=fx.now;},opacity:function(fx){jQuery.attr(fx.elem.style,"opacity",fx.now);},_default:function(fx){fx.elem.style[fx.prop]=fx.now+fx.unit;}}});jQuery.fn.offset=function(){var left=0,top=0,elem=this[0],results;if(elem)with(jQuery.browser){var parent=elem.parentNode,offsetChild=elem,offsetParent=elem.offsetParent,doc=elem.ownerDocument,safari2=safari&&parseInt(version)<522&&!/adobeair/i.test(userAgent),css=jQuery.curCSS,fixed=css(elem,"position")=="fixed";if(elem.getBoundingClientRect){var box=elem.getBoundingClientRect();add(box.left+Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),box.top+Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));add(-doc.documentElement.clientLeft,-doc.documentElement.clientTop);}else{add(elem.offsetLeft,elem.offsetTop);while(offsetParent){add(offsetParent.offsetLeft,offsetParent.offsetTop);if(mozilla&&!/^t(able|d|h)$/i.test(offsetParent.tagName)||safari&&!safari2)border(offsetParent);if(!fixed&&css(offsetParent,"position")=="fixed")fixed=true;offsetChild=/^body$/i.test(offsetParent.tagName)?offsetChild:offsetParent;offsetParent=offsetParent.offsetParent;}while(parent&&parent.tagName&&!/^body|html$/i.test(parent.tagName)){if(!/^inline|table.*$/i.test(css(parent,"display")))add(-parent.scrollLeft,-parent.scrollTop);if(mozilla&&css(parent,"overflow")!="visible")border(parent);parent=parent.parentNode;}if((safari2&&(fixed||css(offsetChild,"position")=="absolute"))||(mozilla&&css(offsetChild,"position")!="absolute"))add(-doc.body.offsetLeft,-doc.body.offsetTop);if(fixed)add(Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));}results={top:top,left:left};}function border(elem){add(jQuery.curCSS(elem,"borderLeftWidth",true),jQuery.curCSS(elem,"borderTopWidth",true));}function add(l,t){left+=parseInt(l,10)||0;top+=parseInt(t,10)||0;}return results;};jQuery.fn.extend({position:function(){var left=0,top=0,results;if(this[0]){var offsetParent=this.offsetParent(),offset=this.offset(),parentOffset=/^body|html$/i.test(offsetParent[0].tagName)?{top:0,left:0}:offsetParent.offset();offset.top-=num(this,'marginTop');offset.left-=num(this,'marginLeft');parentOffset.top+=num(offsetParent,'borderTopWidth');parentOffset.left+=num(offsetParent,'borderLeftWidth');results={top:offset.top-parentOffset.top,left:offset.left-parentOffset.left};}return results;},offsetParent:function(){var offsetParent=this[0].offsetParent;while(offsetParent&&(!/^body|html$/i.test(offsetParent.tagName)&&jQuery.css(offsetParent,'position')=='static'))offsetParent=offsetParent.offsetParent;return jQuery(offsetParent);}});jQuery.each(['Left','Top'],function(i,name){var method='scroll'+name;jQuery.fn[method]=function(val){if(!this[0])return;return val!=undefined?this.each(function(){this==window||this==document?window.scrollTo(!i?val:jQuery(window).scrollLeft(),i?val:jQuery(window).scrollTop()):this[method]=val;}):this[0]==window||this[0]==document?self[i?'pageYOffset':'pageXOffset']||jQuery.boxModel&&document.documentElement[method]||document.body[method]:this[0][method];};});jQuery.each(["Height","Width"],function(i,name){var tl=i?"Left":"Top",br=i?"Right":"Bottom";jQuery.fn["inner"+name]=function(){return this[name.toLowerCase()]()+num(this,"padding"+tl)+num(this,"padding"+br);};jQuery.fn["outer"+name]=function(margin){return this["inner"+name]()+num(this,"border"+tl+"Width")+num(this,"border"+br+"Width")+(margin?num(this,"margin"+tl)+num(this,"margin"+br):0);};});})();
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.json.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,35 @@
+(function($){function toIntegersAtLease(n)
+{return n<10?'0'+n:n;}
+Date.prototype.toJSON=function(date)
+{return date.getUTCFullYear()+'-'+
+toIntegersAtLease(date.getUTCMonth()+1)+'-'+
+toIntegersAtLease(date.getUTCDate());};var escapeable=/["\\\x00-\x1f\x7f-\x9f]/g;var meta={'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','"':'\\"','\\':'\\\\'}
+$.quoteString=function(string)
+{if(escapeable.test(string))
+{return'"'+string.replace(escapeable,function(a)
+{var c=meta[a];if(typeof c==='string'){return c;}
+c=a.charCodeAt();return'\\u00'+Math.floor(c/16).toString(16)+(c%16).toString(16);})+'"'}
+return'"'+string+'"';}
+$.toJSON=function(o)
+{var type=typeof(o);if(type=="undefined")
+return"undefined";else if(type=="number"||type=="boolean")
+return o+"";else if(o===null)
+return"null";if(type=="string")
+{return $.quoteString(o);}
+if(type=="object"&&typeof o.toJSON=="function")
+return o.toJSON();if(type!="function"&&typeof(o.length)=="number")
+{var ret=[];for(var i=0;i<o.length;i++){ret.push($.toJSON(o[i]));}
+return"["+ret.join(", ")+"]";}
+if(type=="function"){throw new TypeError("Unable to convert object of type 'function' to json.");}
+ret=[];for(var k in o){var name;var type=typeof(k);if(type=="number")
+name='"'+k+'"';else if(type=="string")
+name=$.quoteString(k);else
+continue;val=$.toJSON(o[k]);if(typeof(val)!="string"){continue;}
+ret.push(name+": "+val);}
+return"{"+ret.join(", ")+"}";}
+$.evalJSON=function(src)
+{return eval("("+src+")");}
+$.secureEvalJSON=function(src)
+{var filtered=src;filtered=filtered.replace(/\\["\\\/bfnrtu]/g,'@');filtered=filtered.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,']');filtered=filtered.replace(/(?:^|:|,)(?:\s*\[)+/g,'');if(/^[\],:{}\s]*$/.test(filtered))
+return eval("("+src+")");else
+throw new SyntaxError("Error parsing JSON, source is not valid.");}})(jQuery);
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.tablesorter.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,874 @@
+/*
+ *
+ * TableSorter 2.0 - Client-side table sorting with ease!
+ * Version 2.0.3
+ * @requires jQuery v1.2.3
+ *
+ * Copyright (c) 2007 Christian Bach
+ * Examples and docs at: http://tablesorter.com
+ * Dual licensed under the MIT and GPL licenses:
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.gnu.org/licenses/gpl.html
+ *
+ */
+/**
+ *
+ * @description Create a sortable table with multi-column sorting capabilitys
+ *
+ * @example $('table').tablesorter();
+ * @desc Create a simple tablesorter interface.
+ *
+ * @example $('table').tablesorter({ sortList:[[0,0],[1,0]] });
+ * @desc Create a tablesorter interface and sort on the first and secound column in ascending order.
+ *
+ * @example $('table').tablesorter({ headers: { 0: { sorter: false}, 1: {sorter: false} } });
+ * @desc Create a tablesorter interface and disableing the first and secound column headers.
+ *
+ * @example $('table').tablesorter({ 0: {sorter:"integer"}, 1: {sorter:"currency"} });
+ * @desc Create a tablesorter interface and set a column parser for the first and secound column.
+ *
+ *
+ * @param Object settings An object literal containing key/value pairs to provide optional settings.
+ *
+ * @option String cssHeader (optional) 			A string of the class name to be appended to sortable tr elements in the thead of the table.
+ * 												Default value: "header"
+ *
+ * @option String cssAsc (optional) 			A string of the class name to be appended to sortable tr elements in the thead on a ascending sort.
+ * 												Default value: "headerSortUp"
+ *
+ * @option String cssDesc (optional) 			A string of the class name to be appended to sortable tr elements in the thead on a descending sort.
+ * 												Default value: "headerSortDown"
+ *
+ * @option String sortInitialOrder (optional) 	A string of the inital sorting order can be asc or desc.
+ * 												Default value: "asc"
+ *
+ * @option String sortMultisortKey (optional) 	A string of the multi-column sort key.
+ * 												Default value: "shiftKey"
+ *
+ * @option String textExtraction (optional) 	A string of the text-extraction method to use.
+ * 												For complex html structures inside td cell set this option to "complex",
+ * 												on large tables the complex option can be slow.
+ * 												Default value: "simple"
+ *
+ * @option Object headers (optional) 			An array containing the forces sorting rules.
+ * 												This option let's you specify a default sorting rule.
+ * 												Default value: null
+ *
+ * @option Array sortList (optional) 			An array containing the forces sorting rules.
+ * 												This option let's you specify a default sorting rule.
+ * 												Default value: null
+ *
+ * @option Array sortForce (optional) 			An array containing forced sorting rules.
+ * 												This option let's you specify a default sorting rule, which is prepended to user-selected rules.
+ * 												Default value: null
+ *
+  * @option Array sortAppend (optional) 			An array containing forced sorting rules.
+ * 												This option let's you specify a default sorting rule, which is appended to user-selected rules.
+ * 												Default value: null
+ *
+ * @option Boolean widthFixed (optional) 		Boolean flag indicating if tablesorter should apply fixed widths to the table columns.
+ * 												This is usefull when using the pager companion plugin.
+ * 												This options requires the dimension jquery plugin.
+ * 												Default value: false
+ *
+ * @option Boolean cancelSelection (optional) 	Boolean flag indicating if tablesorter should cancel selection of the table headers text.
+ * 												Default value: true
+ *
+ * @option Boolean debug (optional) 			Boolean flag indicating if tablesorter should display debuging information usefull for development.
+ *
+ * @type jQuery
+ *
+ * @name tablesorter
+ *
+ * @cat Plugins/Tablesorter
+ *
+ * @author Christian Bach/christian.bach@polyester.se
+ */
+
+var Sortable = {};
+
+(function($) {
+	$.extend({
+		tablesorter: new function() {
+			var parsers = [], widgets = [];
+
+			this.defaults = {
+				cssHeader: "header",
+				cssAsc: "headerSortUp",
+				cssDesc: "headerSortDown",
+				sortInitialOrder: "asc",
+				sortMultiSortKey: "shiftKey",
+				sortForce: null,
+				sortAppend: null,
+				textExtraction: "simple",
+				parsers: {},
+				widgets: [],
+				widgetZebra: {css: ["even","odd"]},
+				headers: {},
+				widthFixed: false,
+				cancelSelection: true,
+				sortList: [],
+				headerList: [],
+				dateFormat: "us",
+				decimal: '.',
+				debug: false
+			};
+
+			/* debuging utils */
+			function benchmark(s,d) {
+				log(s + "," + (new Date().getTime() - d.getTime()) + "ms");
+			}
+
+			this.benchmark = benchmark;
+
+			function log(s) {
+				if (typeof console != "undefined" && typeof console.debug != "undefined") {
+					console.log(s);
+				} else {
+					alert(s);
+				}
+			}
+
+			/* parsers utils */
+			function buildParserCache(table,$headers) {
+
+				if(table.config.debug) { var parsersDebug = ""; }
+
+				var rows = table.tBodies[0].rows;
+
+				if(table.tBodies[0].rows[0]) {
+
+					var list = [], cells = rows[0].cells, l = cells.length;
+
+					for (var i=0;i < l; i++) {
+						var p = false;
+
+						if($.metadata && ($($headers[i]).metadata() && $($headers[i]).metadata().sorter)  ) {
+
+							p = getParserById($($headers[i]).metadata().sorter);
+
+						} else if((table.config.headers[i] && table.config.headers[i].sorter)) {
+
+							p = getParserById(table.config.headers[i].sorter);
+						}
+						if(!p) {
+							p = detectParserForColumn(table,cells[i]);
+						}
+
+						if(table.config.debug) { parsersDebug += "column:" + i + " parser:" +p.id + "\n"; }
+
+						list.push(p);
+					}
+				}
+
+				if(table.config.debug) { log(parsersDebug); }
+
+				return list;
+			};
+
+			function detectParserForColumn(table,node) {
+				var l = parsers.length;
+				for(var i=1; i < l; i++) {
+					if(parsers[i].is($.trim(getElementText(table.config,node)),table,node)) {
+						return parsers[i];
+					}
+				}
+				// 0 is always the generic parser (text)
+				return parsers[0];
+			}
+
+			function getParserById(name) {
+				var l = parsers.length;
+				for(var i=0; i < l; i++) {
+					if(parsers[i].id.toLowerCase() == name.toLowerCase()) {
+						return parsers[i];
+					}
+				}
+				return false;
+			}
+
+			/* utils */
+			function buildCache(table) {
+
+				if(table.config.debug) { var cacheTime = new Date(); }
+
+
+				var totalRows = (table.tBodies[0] && table.tBodies[0].rows.length) || 0,
+					totalCells = (table.tBodies[0].rows[0] && table.tBodies[0].rows[0].cells.length) || 0,
+					parsers = table.config.parsers,
+					cache = {row: [], normalized: []};
+
+					for (var i=0;i < totalRows; ++i) {
+
+						/** Add the table data to main data array */
+						var c = table.tBodies[0].rows[i], cols = [];
+
+						cache.row.push($(c));
+
+						for(var j=0; j < totalCells; ++j) {
+							cols.push(parsers[j].format(getElementText(table.config,c.cells[j]),table,c.cells[j]));
+						}
+
+						cols.push(i); // add position for rowCache
+						cache.normalized.push(cols);
+						cols = null;
+					};
+
+				if(table.config.debug) { benchmark("Building cache for " + totalRows + " rows:", cacheTime); }
+
+				return cache;
+			};
+
+			function getElementText(config,node) {
+
+				if(!node) return "";
+
+				var t = "";
+
+				if(config.textExtraction == "simple") {
+					if(node.childNodes[0] && node.childNodes[0].hasChildNodes()) {
+						t = node.childNodes[0].innerHTML;
+					} else {
+						t = node.innerHTML;
+					}
+				} else {
+					if(typeof(config.textExtraction) == "function") {
+						t = config.textExtraction(node);
+					} else {
+						t = $(node).text();
+					}
+				}
+				return t;
+			}
+
+			function appendToTable(table,cache) {
+
+				if(table.config.debug) {var appendTime = new Date()}
+
+				var c = cache,
+					r = c.row,
+					n= c.normalized,
+					totalRows = n.length,
+					checkCell = (n[0].length-1),
+					tableBody = $(table.tBodies[0]),
+					rows = [];
+
+				for (var i=0;i < totalRows; i++) {
+					rows.push(r[n[i][checkCell]]);
+					if(!table.config.appender) {
+
+						var o = r[n[i][checkCell]];
+						var l = o.length;
+						for(var j=0; j < l; j++) {
+
+							tableBody[0].appendChild(o[j]);
+
+						}
+
+						//tableBody.append(r[n[i][checkCell]]);
+					}
+				}
+
+				if(table.config.appender) {
+
+					table.config.appender(table,rows);
+				}
+
+				rows = null;
+
+				if(table.config.debug) { benchmark("Rebuilt table:", appendTime); }
+
+				//apply table widgets
+				applyWidget(table);
+
+				// trigger sortend
+				setTimeout(function() {
+					$(table).trigger("sortEnd");
+				},0);
+
+			};
+
+			function buildHeaders(table) {
+
+				if(table.config.debug) { var time = new Date(); }
+
+				var meta = ($.metadata) ? true : false, tableHeadersRows = [];
+
+				for(var i = 0; i < table.tHead.rows.length; i++) { tableHeadersRows[i]=0; };
+
+				$tableHeaders = $("thead th",table);
+
+				$tableHeaders.each(function(index) {
+
+					this.count = 0;
+					this.column = index;
+					this.order = formatSortingOrder(table.config.sortInitialOrder);
+
+					if(checkHeaderMetadata(this) || checkHeaderOptions(table,index)) this.sortDisabled = true;
+
+					if(!this.sortDisabled) {
+						$(this).addClass(table.config.cssHeader);
+					}
+
+					// add cell to headerList
+					table.config.headerList[index]= this;
+				});
+
+				if(table.config.debug) { benchmark("Built headers:", time); log($tableHeaders); }
+
+				return $tableHeaders;
+
+			};
+
+		   	function checkCellColSpan(table, rows, row) {
+                var arr = [], r = table.tHead.rows, c = r[row].cells;
+
+				for(var i=0; i < c.length; i++) {
+					var cell = c[i];
+
+					if ( cell.colSpan > 1) {
+						arr = arr.concat(checkCellColSpan(table, headerArr,row++));
+					} else  {
+						if(table.tHead.length == 1 || (cell.rowSpan > 1 || !r[row+1])) {
+							arr.push(cell);
+						}
+						//headerArr[row] = (i+row);
+					}
+				}
+				return arr;
+			};
+
+			function checkHeaderMetadata(cell) {
+				if(($.metadata) && ($(cell).metadata().sorter === false)) { return true; };
+				return false;
+			}
+
+			function checkHeaderOptions(table,i) {
+				if((table.config.headers[i]) && (table.config.headers[i].sorter === false)) { return true; };
+				return false;
+			}
+
+			function applyWidget(table) {
+				var c = table.config.widgets;
+				var l = c.length;
+				for(var i=0; i < l; i++) {
+
+					getWidgetById(c[i]).format(table);
+				}
+
+			}
+
+			function getWidgetById(name) {
+				var l = widgets.length;
+				for(var i=0; i < l; i++) {
+					if(widgets[i].id.toLowerCase() == name.toLowerCase() ) {
+						return widgets[i];
+					}
+				}
+			};
+
+			function formatSortingOrder(v) {
+
+				if(typeof(v) != "Number") {
+					i = (v.toLowerCase() == "desc") ? 1 : 0;
+				} else {
+					i = (v == (0 || 1)) ? v : 0;
+				}
+				return i;
+			}
+
+			function isValueInArray(v, a) {
+				var l = a.length;
+				for(var i=0; i < l; i++) {
+					if(a[i][0] == v) {
+						return true;
+					}
+				}
+				return false;
+			}
+
+			function setHeadersCss(table,$headers, list, css) {
+				// remove all header information
+				$headers.removeClass(css[0]).removeClass(css[1]);
+
+				var h = [];
+				$headers.each(function(offset) {
+						if(!this.sortDisabled) {
+							h[this.column] = $(this);
+						}
+				});
+
+				var l = list.length;
+				for(var i=0; i < l; i++) {
+					h[list[i][0]].addClass(css[list[i][1]]);
+				}
+			}
+
+			function fixColumnWidth(table,$headers) {
+				var c = table.config;
+				if(c.widthFixed) {
+					var colgroup = $('<colgroup>');
+					$("tr:first td",table.tBodies[0]).each(function() {
+						colgroup.append($('<col>').css('width',$(this).width()));
+					});
+					$(table).prepend(colgroup);
+				};
+			}
+
+			function updateHeaderSortCount(table,sortList) {
+				var c = table.config, l = sortList.length;
+				for(var i=0; i < l; i++) {
+					var s = sortList[i], o = c.headerList[s[0]];
+					o.count = s[1];
+					o.count++;
+				}
+			}
+
+			/* sorting methods */
+			function multisort(table,sortList,cache) {
+
+				if(table.config.debug) { var sortTime = new Date(); }
+
+				var dynamicExp = "var sortWrapper = function(a,b) {", l = sortList.length;
+
+				for(var i=0; i < l; i++) {
+
+					var c = sortList[i][0];
+					var order = sortList[i][1];
+					var s = (getCachedSortType(table.config.parsers,c) == "text") ? ((order == 0) ? "sortText" : "sortTextDesc") : ((order == 0) ? "sortNumeric" : "sortNumericDesc");
+					var e = "e" + i;
+					dynamicExp += "var " + e + " = " + s + "(a[" + c + "],b[" + c + "]); ";
+					dynamicExp += "if(" + e + ") { return " + e + "; } ";
+					dynamicExp += "else { ";
+				}
+
+				// if value is the same keep orignal order
+				var orgOrderCol = cache.normalized[0].length - 1;
+				dynamicExp += "return a[" + orgOrderCol + "]-b[" + orgOrderCol + "];";
+
+				for(var i=0; i < l; i++) {
+					dynamicExp += "}; ";
+				}
+
+				dynamicExp += "return 0; ";
+				dynamicExp += "}; ";
+
+				eval(dynamicExp);
+
+				cache.normalized.sort(sortWrapper);
+
+				if(table.config.debug) { benchmark("Sorting on " + sortList.toString() + " and dir " + order+ " time:", sortTime); }
+
+				return cache;
+			};
+
+			function sortText(a,b) {
+				return ((a < b) ? -1 : ((a > b) ? 1 : 0));
+			};
+
+			function sortTextDesc(a,b) {
+				return ((b < a) ? -1 : ((b > a) ? 1 : 0));
+			};
+
+	 		function sortNumeric(a,b) {
+				return a-b;
+			};
+
+			function sortNumericDesc(a,b) {
+				return b-a;
+			};
+
+			function getCachedSortType(parsers,i) {
+				return parsers[i].type;
+			};
+
+			/* public methods */
+			this.construct = function(settings) {
+
+				return this.each(function() {
+
+					if(!this.tHead || !this.tBodies) return;
+
+					var $this, $document,$headers, cache, config, shiftDown = 0, sortOrder;
+
+					this.config = {};
+
+					config = $.extend(this.config, $.tablesorter.defaults, settings);
+
+					// store common expression for speed
+					$this = $(this);
+
+					// build headers
+					$headers = buildHeaders(this);
+
+					// try to auto detect column type, and store in tables config
+					this.config.parsers = buildParserCache(this,$headers);
+
+					// build the cache for the tbody cells
+					cache = buildCache(this);
+
+					// get the css class names, could be done else where.
+					var sortCSS = [config.cssDesc,config.cssAsc];
+
+					// fixate columns if the users supplies the fixedWidth option
+					fixColumnWidth(this);
+
+					// apply event handling to headers
+					// this is to big, perhaps break it out?
+					$headers.click(function(e) {
+
+						$this.trigger("sortStart");
+
+						var totalRows = ($this[0].tBodies[0] && $this[0].tBodies[0].rows.length) || 0;
+
+						if(!this.sortDisabled && totalRows > 0) {
+
+
+							// store exp, for speed
+							var $cell = $(this);
+
+							// get current column index
+							var i = this.column;
+
+							// get current column sort order
+							this.order = this.count++ % 2;
+
+							// user only whants to sort on one column
+							if(!e[config.sortMultiSortKey]) {
+
+								// flush the sort list
+								config.sortList = [];
+
+								if(config.sortForce != null) {
+									var a = config.sortForce;
+									for(var j=0; j < a.length; j++) {
+										if(a[j][0] != i) {
+											config.sortList.push(a[j]);
+										}
+									}
+								}
+
+								// add column to sort list
+								config.sortList.push([i,this.order]);
+
+							// multi column sorting
+							} else {
+								// the user has clicked on an all ready sortet column.
+								if(isValueInArray(i,config.sortList)) {
+
+									// revers the sorting direction for all tables.
+									for(var j=0; j < config.sortList.length; j++) {
+										var s = config.sortList[j], o = config.headerList[s[0]];
+										if(s[0] == i) {
+											o.count = s[1];
+											o.count++;
+											s[1] = o.count % 2;
+										}
+									}
+								} else {
+									// add column to sort list array
+									config.sortList.push([i,this.order]);
+								}
+							};
+							setTimeout(function() {
+								//set css for headers
+								setHeadersCss($this[0],$headers,config.sortList,sortCSS);
+								appendToTable($this[0],multisort($this[0],config.sortList,cache));
+							},1);
+							// stop normal event by returning false
+							return false;
+						}
+					// cancel selection
+					}).mousedown(function() {
+						if(config.cancelSelection) {
+							this.onselectstart = function() {return false};
+							return false;
+						}
+					});
+
+					// apply easy methods that trigger binded events
+					$this.bind("update",function() {
+
+						// rebuild parsers.
+						this.config.parsers = buildParserCache(this,$headers);
+
+						// rebuild the cache map
+						cache = buildCache(this);
+
+					}).bind("sorton",function(e,list) {
+
+						$(this).trigger("sortStart");
+
+						config.sortList = list;
+
+						// update and store the sortlist
+						var sortList = config.sortList;
+
+						// update header count index
+						updateHeaderSortCount(this,sortList);
+
+						//set css for headers
+						setHeadersCss(this,$headers,sortList,sortCSS);
+
+
+						// sort the table and append it to the dom
+						appendToTable(this,multisort(this,sortList,cache));
+
+					}).bind("appendCache",function() {
+
+						appendToTable(this,cache);
+
+					}).bind("applyWidgetId",function(e,id) {
+
+						getWidgetById(id).format(this);
+
+					}).bind("applyWidgets",function() {
+						// apply widgets
+						applyWidget(this);
+					});
+
+					if($.metadata && ($(this).metadata() && $(this).metadata().sortlist)) {
+						config.sortList = $(this).metadata().sortlist;
+					}
+					// if user has supplied a sort list to constructor.
+					if(config.sortList.length > 0) {
+						$this.trigger("sorton",[config.sortList]);
+					}
+
+					// apply widgets
+					applyWidget(this);
+				});
+			};
+
+			this.addParser = function(parser) {
+				var l = parsers.length, a = true;
+				for(var i=0; i < l; i++) {
+					if(parsers[i].id.toLowerCase() == parser.id.toLowerCase()) {
+						a = false;
+					}
+				}
+				if(a) { parsers.push(parser); };
+			};
+
+			this.addWidget = function(widget) {
+				widgets.push(widget);
+			};
+
+			this.formatFloat = function(s) {
+				var i = parseFloat(s);
+				return (isNaN(i)) ? 0 : i;
+			};
+			this.formatInt = function(s) {
+				var i = parseInt(s);
+				return (isNaN(i)) ? 0 : i;
+			};
+
+			this.isDigit = function(s,config) {
+				var DECIMAL = '\\' + config.decimal;
+				var exp = '/(^[+]?0(' + DECIMAL +'0+)?$)|(^([-+]?[1-9][0-9]*)$)|(^([-+]?((0?|[1-9][0-9]*)' + DECIMAL +'(0*[1-9][0-9]*)))$)|(^[-+]?[1-9]+[0-9]*' + DECIMAL +'0+$)/';
+				return RegExp(exp).test($.trim(s));
+			};
+
+			this.clearTableBody = function(table) {
+				if($.browser.msie) {
+					function empty() {
+						while ( this.firstChild ) this.removeChild( this.firstChild );
+					}
+					empty.apply(table.tBodies[0]);
+				} else {
+					table.tBodies[0].innerHTML = "";
+				}
+			};
+		}
+	});
+
+	// extend plugin scope
+	$.fn.extend({
+        tablesorter: $.tablesorter.construct
+	});
+
+	var ts = $.tablesorter;
+
+	// add default parsers
+	ts.addParser({
+		id: "text",
+		is: function(s) {
+			return true;
+		},
+		format: function(s) {
+			return $.trim(s.toLowerCase());
+		},
+		type: "text"
+	});
+
+
+	ts.addParser({
+	    id: "json",
+	    is: function(s) {
+	        return s.startsWith('json:');
+	    },
+	    format: function(s,table,cell) {
+		return evalJSON(s.slice(5));
+	    },
+	  type: "text"
+	});
+
+     ts.addParser({
+		id: "digit",
+		is: function(s,table) {
+			var c = table.config;
+			return $.tablesorter.isDigit(s,c);
+		},
+		format: function(s) {
+			return $.tablesorter.formatFloat(s);
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "currency",
+		is: function(s) {
+			return /^[£$€?.]/.test(s);
+		},
+		format: function(s) {
+			return $.tablesorter.formatFloat(s.replace(new RegExp(/[^0-9.]/g),""));
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "ipAddress",
+		is: function(s) {
+			return /^\d{2,3}[\.]\d{2,3}[\.]\d{2,3}[\.]\d{2,3}$/.test(s);
+		},
+		format: function(s) {
+			var a = s.split("."), r = "", l = a.length;
+			for(var i = 0; i < l; i++) {
+				var item = a[i];
+			   	if(item.length == 2) {
+					r += "0" + item;
+			   	} else {
+					r += item;
+			   	}
+			}
+			return $.tablesorter.formatFloat(r);
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "url",
+		is: function(s) {
+			return /^(https?|ftp|file):\/\/$/.test(s);
+		},
+		format: function(s) {
+			return jQuery.trim(s.replace(new RegExp(/(https?|ftp|file):\/\//),''));
+		},
+		type: "text"
+	});
+
+	ts.addParser({
+		id: "isoDate",
+		is: function(s) {
+			return /^\d{4}[\/-]\d{1,2}[\/-]\d{1,2}$/.test(s);
+		},
+		format: function(s) {
+			return $.tablesorter.formatFloat((s != "") ? new Date(s.replace(new RegExp(/-/g),"/")).getTime() : "0");
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "percent",
+		is: function(s) {
+			return /\%$/.test($.trim(s));
+		},
+		format: function(s) {
+			return $.tablesorter.formatFloat(s.replace(new RegExp(/%/g),""));
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "usLongDate",
+		is: function(s) {
+			return s.match(new RegExp(/^[A-Za-z]{3,10}\.? [0-9]{1,2}, ([0-9]{4}|'?[0-9]{2}) (([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(AM|PM)))$/)); //'
+		},
+		format: function(s) {
+			return $.tablesorter.formatFloat(new Date(s).getTime());
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+		id: "shortDate",
+		is: function(s) {
+			return /\d{1,2}[\/\-]\d{1,2}[\/\-]\d{2,4}/.test(s);
+		},
+		format: function(s,table) {
+			var c = table.config;
+			s = s.replace(/\-/g,"/");
+			if(c.dateFormat == "us") {
+				// reformat the string in ISO format
+				s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$1/$2");
+			} else if(c.dateFormat == "uk") {
+				//reformat the string in ISO format
+				s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$2/$1");
+			} else if(c.dateFormat == "dd/mm/yy" || c.dateFormat == "dd-mm-yy") {
+				s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{2})/, "$1/$2/$3");
+			}
+			return $.tablesorter.formatFloat(new Date(s).getTime());
+		},
+		type: "numeric"
+	});
+
+	ts.addParser({
+	    id: "time",
+	    is: function(s) {
+	        return /^(([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(am|pm)))$/.test(s);
+	    },
+	    format: function(s) {
+	        return $.tablesorter.formatFloat(new Date("2000/01/01 " + s).getTime());
+	    },
+	  type: "numeric"
+	});
+
+
+	ts.addParser({
+	    id: "metadata",
+	    is: function(s) {
+	        return false;
+	    },
+	    format: function(s,table,cell) {
+			var c = table.config, p = (!c.parserMetadataName) ? 'sortValue' : c.parserMetadataName;
+	        return $(cell).metadata()[p];
+	    },
+	  type: "numeric"
+	});
+
+
+	// add default widgets
+	ts.addWidget({
+		id: "zebra",
+		format: function(table) {
+			if(table.config.debug) { var time = new Date(); }
+			$("tr:visible",table.tBodies[0])
+	        .filter(':even')
+	        .removeClass(table.config.widgetZebra.css[1]).addClass(table.config.widgetZebra.css[0])
+	        .end().filter(':odd')
+	        .removeClass(table.config.widgetZebra.css[0]).addClass(table.config.widgetZebra.css[1]);
+			if(table.config.debug) { $.tablesorter.benchmark("Applying Zebra widget", time); }
+		}
+	});
+})(jQuery);
+
+
+function cubicwebSortValueExtraction(node){
+    return jQuery(node).attr('cubicweb:sortvalue');
+}
+
+Sortable.sortTables = function() {
+   jQuery("table.listing").tablesorter({textExtraction: cubicwebSortValueExtraction});
+}
+
+jQuery(document).ready(Sortable.sortTables);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.treeview.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,71 @@
+.treeview, .treeview ul { 
+  padding: 0;
+  margin: 0;
+  list-style: none;
+}
+
+.treeview ul {
+  background-color: white;
+  margin-top: 4px;
+}
+
+.treeview .hitarea {
+  background: url(treeview-default.gif) -64px -25px no-repeat;
+  height: 16px;
+  width: 16px;
+  margin-left: -16px; 
+  float: left;
+  cursor: pointer;
+}
+/* fix for IE6 */
+* html .hitarea {
+  display: inline;
+  float:none;
+}
+
+.treeview li { 
+  margin: 0;
+  padding: 3px 0pt 3px 16px;
+}
+
+.treeview a.selected {
+  background-color: #eee;
+}
+
+#treecontrol { margin: 1em 0; display: none; }
+
+.treeview .hover { color: red; cursor: pointer; }
+
+.treeview li { background: url(treeview-default-line.gif) 0 0 no-repeat; }
+.treeview li.collapsable, .treeview li.expandable { background-position: 0 -176px; }
+
+.treeview .expandable-hitarea { background-position: -80px -3px; }
+
+.treeview li.last { background-position: 0 -1766px }
+.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(treeview-default.gif); }  
+.treeview li.lastCollapsable { background-position: 0 -111px }
+.treeview li.lastExpandable { background-position: -32px -67px }
+
+.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; }
+
+.treeview-red li { background-image: url(treeview-red-line.gif); }
+.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(treeview-red.gif); } 
+
+.treeview-black li { background-image: url(treeview-black-line.gif); }
+.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(treeview-black.gif); }  
+
+.treeview-gray li { background-image: url(treeview-gray-line.gif); }
+.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(treeview-gray.gif); } 
+
+.treeview-famfamfam li { background-image: url(treeview-famfamfam-line.gif); }
+.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(treeview-famfamfam.gif); } 
+
+
+.filetree li { padding: 3px 0 2px 16px; }
+.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; }
+.filetree span.folder { background: url(folder.gif) 0 0 no-repeat; }
+.filetree li.expandable span.folder { background: url(folder-closed.gif) 0 0 no-repeat; }
+.filetree span.file { background: url(file.gif) 0 0 no-repeat; }
+
+/* added by adim */
+ul.placeholder { display: none; }
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.treeview.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+/*
+ * Treeview 1.4 - jQuery plugin to hide and show branches of a tree
+ * 
+ * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/
+ * http://docs.jquery.com/Plugins/Treeview
+ *
+ * Copyright (c) 2007 Jörn Zaefferer
+ *
+ * Dual licensed under the MIT and GPL licenses:
+ *   http://www.opensource.org/licenses/mit-license.php
+ *   http://www.gnu.org/licenses/gpl.html
+ *
+ * Revision: $Id: jquery.treeview.js 4684 2008-02-07 19:08:06Z joern.zaefferer $
+ *
+ */;(function($){$.extend($.fn,{swapClass:function(c1,c2){var c1Elements=this.filter('.'+c1);this.filter('.'+c2).removeClass(c2).addClass(c1);c1Elements.removeClass(c1).addClass(c2);return this;},replaceClass:function(c1,c2){return this.filter('.'+c1).removeClass(c1).addClass(c2).end();},hoverClass:function(className){className=className||"hover";return this.hover(function(){$(this).addClass(className);},function(){$(this).removeClass(className);});},heightToggle:function(animated,callback){animated?this.animate({height:"toggle"},animated,callback):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();if(callback)callback.apply(this,arguments);});},heightHide:function(animated,callback){if(animated){this.animate({height:"hide"},animated,callback);}else{this.hide();if(callback)this.each(callback);}},prepareBranches:function(settings){if(!settings.prerendered){this.filter(":last-child:not(ul)").addClass(CLASSES.last);this.filter((settings.collapsed?"":"."+CLASSES.closed)+":not(."+CLASSES.open+")").find(">ul").hide();}return this.filter(":has(>ul)");},applyClasses:function(settings,toggler){this.filter(":has(>ul):not(:has(>a))").find(">span").click(function(event){toggler.apply($(this).next());}).add($("a",this)).hoverClass();if(!settings.prerendered){this.filter(":has(>ul:hidden)").addClass(CLASSES.expandable).replaceClass(CLASSES.last,CLASSES.lastExpandable);this.not(":has(>ul:hidden)").addClass(CLASSES.collapsable).replaceClass(CLASSES.last,CLASSES.lastCollapsable);this.prepend("<div class=\""+CLASSES.hitarea+"\"/>").find("div."+CLASSES.hitarea).each(function(){var classes="";$.each($(this).parent().attr("class").split(" "),function(){classes+=this+"-hitarea ";});$(this).addClass(classes);});}this.find("div."+CLASSES.hitarea).click(toggler);},treeview:function(settings){settings=$.extend({cookieId:"treeview"},settings);if(settings.add){return this.trigger("add",[settings.add]);}if(settings.toggle){var callback=settings.toggle;settings.toggle=function(){return callback.apply($(this).parent()[0],arguments);};}function treeController(tree,control){function handler(filter){return function(){toggler.apply($("div."+CLASSES.hitarea,tree).filter(function(){return filter?$(this).parent("."+filter).length:true;}));return false;};}$("a:eq(0)",control).click(handler(CLASSES.collapsable));$("a:eq(1)",control).click(handler(CLASSES.expandable));$("a:eq(2)",control).click(handler());}function toggler(){$(this).parent().find(">.hitarea").swapClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).swapClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().swapClass(CLASSES.collapsable,CLASSES.expandable).swapClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightToggle(settings.animated,settings.toggle);if(settings.unique){$(this).parent().siblings().find(">.hitarea").replaceClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).replaceClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().replaceClass(CLASSES.collapsable,CLASSES.expandable).replaceClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightHide(settings.animated,settings.toggle);}}function serialize(){function binary(arg){return arg?1:0;}var data=[];branches.each(function(i,e){data[i]=$(e).is(":has(>ul:visible)")?1:0;});$.cookie(settings.cookieId,data.join(""));}function deserialize(){var stored=$.cookie(settings.cookieId);if(stored){var data=stored.split("");branches.each(function(i,e){$(e).find(">ul")[parseInt(data[i])?"show":"hide"]();});}}this.addClass("treeview");var branches=this.find("li").prepareBranches(settings);switch(settings.persist){case"cookie":var toggleCallback=settings.toggle;settings.toggle=function(){serialize();if(toggleCallback){toggleCallback.apply(this,arguments);}};deserialize();break;case"location":var current=this.find("a").filter(function(){return this.href.toLowerCase()==location.href.toLowerCase();});if(current.length){current.addClass("selected").parents("ul, li").add(current.next()).show();}break;}branches.applyClasses(settings,toggler);if(settings.control){treeController(this,settings.control);$(settings.control).show();}return this.bind("add",function(event,branches){$(branches).prev().removeClass(CLASSES.last).removeClass(CLASSES.lastCollapsable).removeClass(CLASSES.lastExpandable).find(">.hitarea").removeClass(CLASSES.lastCollapsableHitarea).removeClass(CLASSES.lastExpandableHitarea);$(branches).find("li").andSelf().prepareBranches(settings).applyClasses(settings,toggler);});}});var CLASSES=$.fn.treeview.classes={open:"open",closed:"closed",expandable:"expandable",expandableHitarea:"expandable-hitarea",lastExpandableHitarea:"lastExpandable-hitarea",collapsable:"collapsable",collapsableHitarea:"collapsable-hitarea",lastCollapsableHitarea:"lastCollapsable-hitarea",lastCollapsable:"lastCollapsable",lastExpandable:"lastExpandable",last:"last",hitarea:"hitarea"};$.fn.Treeview=$.fn.treeview;})(jQuery);
\ No newline at end of file
Binary file web/data/liveclipboard-icon.png has changed
Binary file web/data/loading.gif has changed
Binary file web/data/logo.png has changed
Binary file web/data/logo.xcf has changed
Binary file web/data/mail.gif has changed
Binary file web/data/microformats-button.png has changed
Binary file web/data/microformats-icon.png has changed
Binary file web/data/minus.gif has changed
Binary file web/data/no-check-no-border.png has changed
Binary file web/data/nomail.gif has changed
Binary file web/data/nomail.xcf has changed
Binary file web/data/plus.gif has changed
Binary file web/data/puce.png has changed
Binary file web/data/puce_down.png has changed
Binary file web/data/puce_down_black.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/pygments.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,61 @@
+/* generated using HtmlFormatter.get_style_defs() */
+
+.c { color: #408080; font-style: italic } /* Comment */
+.err { border: 1px solid #FF0000 } /* Error */
+.k { color: #008000; font-weight: bold } /* Keyword */
+.o { color: #666666 } /* Operator */
+.cm { color: #408080; font-style: italic } /* Comment.Multiline */
+.cp { color: #BC7A00 } /* Comment.Preproc */
+.c1 { color: #408080; font-style: italic } /* Comment.Single */
+.cs { color: #408080; font-style: italic } /* Comment.Special */
+.gd { color: #A00000 } /* Generic.Deleted */
+.ge { font-style: italic } /* Generic.Emph */
+.gr { color: #FF0000 } /* Generic.Error */
+.gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.gi { color: #00A000 } /* Generic.Inserted */
+.go { color: #808080 } /* Generic.Output */
+.gp { color: #000080; font-weight: bold } /* Generic.Prompt */
+.gs { font-weight: bold } /* Generic.Strong */
+.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.gt { color: #0040D0 } /* Generic.Traceback */
+.kc { color: #008000; font-weight: bold } /* Keyword.Constant */
+.kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
+.kp { color: #008000 } /* Keyword.Pseudo */
+.kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
+.kt { color: #B00040 } /* Keyword.Type */
+.m { color: #666666 } /* Literal.Number */
+.s { color: #BA2121 } /* Literal.String */
+.na { color: #7D9029 } /* Name.Attribute */
+.nb { color: #008000 } /* Name.Builtin */
+.nc { color: #0000FF; font-weight: bold } /* Name.Class */
+.no { color: #880000 } /* Name.Constant */
+.nd { color: #AA22FF } /* Name.Decorator */
+.ni { color: #999999; font-weight: bold } /* Name.Entity */
+.ne { color: #D2413A; font-weight: bold } /* Name.Exception */
+.nf { color: #0000FF } /* Name.Function */
+.nl { color: #A0A000 } /* Name.Label */
+.nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
+.nt { color: #008000; font-weight: bold } /* Name.Tag */
+.nv { color: #19177C } /* Name.Variable */
+.ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
+.w { color: #bbbbbb } /* Text.Whitespace */
+.mf { color: #666666 } /* Literal.Number.Float */
+.mh { color: #666666 } /* Literal.Number.Hex */
+.mi { color: #666666 } /* Literal.Number.Integer */
+.mo { color: #666666 } /* Literal.Number.Oct */
+.sb { color: #BA2121 } /* Literal.String.Backtick */
+.sc { color: #BA2121 } /* Literal.String.Char */
+.sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
+.s2 { color: #BA2121 } /* Literal.String.Double */
+.se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
+.sh { color: #BA2121 } /* Literal.String.Heredoc */
+.si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
+.sx { color: #008000 } /* Literal.String.Other */
+.sr { color: #BB6688 } /* Literal.String.Regex */
+.s1 { color: #BA2121 } /* Literal.String.Single */
+.ss { color: #19177C } /* Literal.String.Symbol */
+.bp { color: #008000 } /* Name.Builtin.Pseudo */
+.vc { color: #19177C } /* Name.Variable.Class */
+.vg { color: #19177C } /* Name.Variable.Global */
+.vi { color: #19177C } /* Name.Variable.Instance */
+.il { color: #666666 } /* Literal.Number.Integer.Long */
Binary file web/data/required.png has changed
Binary file web/data/rss-button.png has changed
Binary file web/data/rss.png has changed
Binary file web/data/search.png has changed
Binary file web/data/sendcancel.png has changed
Binary file web/data/sendok.png has changed
Binary file web/data/shadow.gif has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/timeline-bundle.css	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,232 @@
+
+
+/*------------------- Horizontal / Vertical lines ----------------*/
+
+/* style for ethers */
+.timeline-ether-lines{border-color:#666; border-style:dotted; position:absolute;}
+
+.timeline-horizontal .timeline-ether-lines{border-width:0 0 0 1px; height:100%; top: 0; width: 1px;}
+
+.timeline-vertical .timeline-ether-lines{border-width:1px 0 0; height:1px; left: 0; width: 100%;}
+
+
+
+
+/*---------------- Weekends ---------------------------*/
+
+.timeline-ether-weekends{
+	position:absolute;
+	background-color:#FFFFE0;
+}
+
+.timeline-vertical .timeline-ether-weekends{left:0;width:100%;}
+
+.timeline-horizontal .timeline-ether-weekends{top:0; height:100%;}
+
+
+
+/*-------------------------- HIGHLIGHT DECORATORS -------------------*/
+.timeline-highlight-decorator,
+.timeline-highlight-point-decorator{
+	position:absolute;
+	overflow:hidden;
+}
+.timeline-horizontal .timeline-highlight-point-decorator,
+.timeline-horizontal .timeline-highlight-decorator{
+	width:10px;
+	top:0;
+    height:100%;
+}
+
+.timeline-vertical .timeline-highlight-point-decorator,
+.timeline-vertical .timeline-highlight-decorator{
+	height:10px;
+	width:100%;
+	left:0;
+}
+
+.timeline-highlight-decorator{background-color:#FFC080;}
+.timeline-highlight-point-decorator{background-color:#ff5;}
+
+
+
+/*---------------------------- LABELS -------------------------*/
+.timeline-highlight-label{position:absolute;overflow:hidden;font-size:200%;font-weight:bold;color:#999;}
+
+
+/*---------------- VERTICAL LABEL -------------------*/
+.timeline-horizontal .timeline-highlight-label{top:0;height:100%;}
+.timeline-horizontal .timeline-highlight-label td{vertical-align:middle;}
+.timeline-horizontal .timeline-highlight-label-start{text-align:right;}
+.timeline-horizontal .timeline-highlight-label-end{text-align:left;}
+
+
+/*---------------- HORIZONTAL LABEL -------------------*/
+.timeline-vertical .timeline-highlight-label{left:0;width:100%;}
+.timeline-vertical .timeline-highlight-label td{vertical-align:top;}
+.timeline-vertical .timeline-highlight-label-start{text-align:center;}
+.timeline-vertical .timeline-highlight-label-end{text-align:center;}
+
+
+
+/*-------------------------------- DATE LABELS --------------------------------*/
+.timeline-date-label{position:absolute; border:solid #aaa; color:#aaa;	width:5em; height:1.5em;}
+.timeline-date-label-em{color:#000;}
+
+/* horizontal */
+.timeline-horizontal .timeline-date-label{padding-left:2px;}
+.timeline-horizontal .timeline-date-label{border-width:0 0 0 1px;}
+.timeline-horizontal .timeline-date-label-em{height:2em}
+
+/* vertical */
+.timeline-vertical .timeline-date-label{padding-top:2px;}
+.timeline-vertical .timeline-date-label{border-width:1px 0 0;}
+.timeline-vertical .timeline-date-label-em{width:7em}
+
+/*------------------------------- Ether.highlight -------------------------*/
+.timeline-ether-highlight{position:absolute; background-color:#fff;}
+.timeline-horizontal .timeline-ether-highlight{top:2px;}
+.timeline-vertical .timeline-ether-highlight{left:2px;}
+
+
+
+/*------------------------------ EVENTS ------------------------------------*/
+.timeline-event-icon, .timeline-event-label,.timeline-event-tape{
+	position:absolute;
+	cursor:pointer;
+}
+
+.timeline-event-tape,
+.timeline-small-event-tape,
+.timeline-small-event-icon{
+	background-color:#58A0DC;
+	overflow:hidden;
+}
+
+.timeline-small-event-tape,
+.timeline-small-event-icon{
+	position:absolute;
+}
+
+.timeline-event-tape{height:4px;}
+
+.timeline-small-event-tape{height:2px;}
+.timeline-small-event-icon{width:1px; height:6px;}
+ 
+ 
+
+/*--------------------------------- TIMELINE-------------------------*/
+.timeline-ether-bg{width:100%; height:100%;}
+.timeline-band-0 .timeline-ether-bg{background-color:#eee}
+.timeline-band-1 .timeline-ether-bg{background-color:#ddd}
+.timeline-band-2 .timeline-ether-bg{background-color:#ccc}
+.timeline-band-3 .timeline-ether-bg{background-color:#aaa}
+.timeline-duration-event {
+    position: absolute;
+    overflow: hidden;
+    border: 1px solid blue;
+}
+
+.timeline-instant-event2 {
+    position: absolute;
+    overflow: hidden;
+    border-left: 1px solid blue;
+    padding-left: 2px;
+}
+
+.timeline-instant-event {
+    position: absolute;
+    overflow: hidden;
+}
+
+.timeline-event-bubble-title {
+    font-weight: bold;
+    border-bottom: 1px solid #888;
+    margin-bottom: 0.5em;
+}
+
+.timeline-event-bubble-body {
+}
+
+.timeline-event-bubble-wiki {
+    margin:     0.5em;
+    text-align: right;
+    color:      #A0A040;
+}
+.timeline-event-bubble-wiki a {
+    color:      #A0A040;
+}
+
+.timeline-event-bubble-time {
+    color: #aaa;
+}
+
+.timeline-event-bubble-image {
+    float: right;
+    padding-left: 5px;
+    padding-bottom: 5px;
+}.timeline-container {
+    position: relative;
+    overflow: hidden;
+}
+
+.timeline-copyright {
+    position: absolute;
+    bottom: 0px;
+    left: 0px;
+    z-index: 1000;
+    cursor: pointer;
+}
+
+.timeline-message-container {
+    position:   absolute;
+    top:        30%;
+    left:       35%;
+    right:      35%;
+    z-index:    1000;
+    display:    none;
+}
+.timeline-message {
+    font-size:      120%;
+    font-weight:    bold;
+    text-align:     center;
+}
+.timeline-message img {
+    vertical-align: middle;
+}
+
+.timeline-band {
+    position:   absolute;
+    background: #eee;
+    z-index:    10;
+}
+
+.timeline-band-inner {
+    position: relative;
+    width: 100%;
+    height: 100%;
+}
+
+.timeline-band-input {
+    position:   absolute;
+    width:      1em;
+    height:     1em;
+    overflow:   hidden;
+    z-index:    0;
+}
+.timeline-band-input input{
+    width:      0;
+}
+
+.timeline-band-layer {
+    position:   absolute;
+    width:      100%;
+    height:     100%;
+}
+
+.timeline-band-layer-inner {
+    position:   relative;
+    width:      100%;
+    height:     100%;
+}
+
Binary file web/data/timeline/blue-circle.png has changed
Binary file web/data/timeline/bubble-arrows.png has changed
Binary file web/data/timeline/bubble-body-and-arrows.png has changed
Binary file web/data/timeline/bubble-body.png has changed
Binary file web/data/timeline/bubble-bottom-arrow.png has changed
Binary file web/data/timeline/bubble-bottom-left.png has changed
Binary file web/data/timeline/bubble-bottom-right.png has changed
Binary file web/data/timeline/bubble-bottom.png has changed
Binary file web/data/timeline/bubble-left-arrow.png has changed
Binary file web/data/timeline/bubble-left.png has changed
Binary file web/data/timeline/bubble-right-arrow.png has changed
Binary file web/data/timeline/bubble-right.png has changed
Binary file web/data/timeline/bubble-top-arrow.png has changed
Binary file web/data/timeline/bubble-top-left.png has changed
Binary file web/data/timeline/bubble-top-right.png has changed
Binary file web/data/timeline/bubble-top.png has changed
Binary file web/data/timeline/close-button.png has changed
Binary file web/data/timeline/copyright-vertical.png has changed
Binary file web/data/timeline/copyright.png has changed
Binary file web/data/timeline/dark-blue-circle.png has changed
Binary file web/data/timeline/dark-green-circle.png has changed
Binary file web/data/timeline/dark-red-circle.png has changed
Binary file web/data/timeline/dull-blue-circle.png has changed
Binary file web/data/timeline/dull-green-circle.png has changed
Binary file web/data/timeline/dull-red-circle.png has changed
Binary file web/data/timeline/gray-circle.png has changed
Binary file web/data/timeline/green-circle.png has changed
Binary file web/data/timeline/message-bottom-left.png has changed
Binary file web/data/timeline/message-bottom-right.png has changed
Binary file web/data/timeline/message-left.png has changed
Binary file web/data/timeline/message-right.png has changed
Binary file web/data/timeline/message-top-left.png has changed
Binary file web/data/timeline/message-top-right.png has changed
Binary file web/data/timeline/message.png has changed
Binary file web/data/timeline/progress-running.gif has changed
Binary file web/data/timeline/red-circle.png has changed
Binary file web/data/timeline/sundial.png has changed
Binary file web/data/timeline/top-bubble.png has changed
Binary file web/data/treeview-black-line.gif has changed
Binary file web/data/treeview-black.gif has changed
Binary file web/data/treeview-default-line.gif has changed
Binary file web/data/treeview-default.gif has changed
Binary file web/data/treeview-famfamfam-line.gif has changed
Binary file web/data/treeview-famfamfam.gif has changed
Binary file web/data/treeview-gray-line.gif has changed
Binary file web/data/treeview-gray.gif has changed
Binary file web/data/treeview-red-line.gif has changed
Binary file web/data/treeview-red.gif has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/facet.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,563 @@
+"""contains utility functions and some visual component to restrict results of
+a search
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from itertools import chain
+from copy import deepcopy
+
+from logilab.mtconverter import html_escape
+
+from logilab.common.graph import has_path
+from logilab.common.decorators import cached
+from logilab.common.compat import all
+
+from rql import parse, nodes
+
+from cubicweb import Unauthorized, typed_eid
+from cubicweb.common.selectors import contextprop_selector, one_has_relation_selector
+from cubicweb.common.registerers import priority_registerer
+from cubicweb.common.appobject import AppRsetObject
+from cubicweb.common.utils import AcceptMixIn
+from cubicweb.web.htmlwidgets import HTMLWidget
+
+## rqlst manipulation functions used by facets ################################
+
+def prepare_facets_rqlst(rqlst, args=None):
+    """prepare a syntax tree to generate facet filters
+    
+    * remove ORDERBY clause
+    * cleanup selection (remove everything)
+    * undefine unnecessary variables
+    * set DISTINCT
+    * unset LIMIT/OFFSET
+    """
+    if len(rqlst.children) > 1:
+        raise NotImplementedError('FIXME: union not yet supported')
+    select = rqlst.children[0]
+    mainvar = filtered_variable(select)
+    select.set_limit(None)
+    select.set_offset(None)
+    baserql = select.as_string(kwargs=args)
+    # cleanup sort terms
+    select.remove_sort_terms()
+    # selection: only vocabulary entity
+    for term in select.selection[:]:
+        select.remove_selected(term)
+    # remove unbound variables which only have some type restriction
+    for dvar in select.defined_vars.values():
+        if not (dvar is mainvar or dvar.stinfo['relations']):
+            select.undefine_variable(dvar)
+    # global tree config: DISTINCT, LIMIT, OFFSET
+    select.set_distinct(True)
+    return mainvar, baserql
+
+def filtered_variable(rqlst):
+    vref = rqlst.selection[0].iget_nodes(nodes.VariableRef).next()
+    return vref.variable
+
+
+def get_facet(req, facetid, rqlst, mainvar):
+    return req.vreg.object_by_id('facets', facetid, req, rqlst=rqlst,
+                                 filtered_variable=mainvar)
+    
+
+def filter_hiddens(w, **kwargs):
+    for key, val in kwargs.items():
+        w(u'<input type="hidden" name="%s" value="%s" />' % (
+            key, html_escape(val)))
+
+
+def _may_be_removed(rel, schema, mainvar):
+    """if the given relation may be removed from the tree, return the variable
+    on the other side of `mainvar`, else return None
+    Conditions:
+    * the relation is an attribute selection of the main variable
+    * the relation is optional relation linked to the main variable
+    * the relation is a mandatory relation linked to the main variable
+      without any restriction on the other variable
+    """
+    lhs, rhs = rel.get_variable_parts()
+    rschema = schema.rschema(rel.r_type)
+    if lhs.variable is mainvar:
+        try:
+            ovar = rhs.variable
+        except AttributeError:
+            # constant restriction
+            # XXX: X title LOWER(T) if it makes sense?
+            return None
+        if rschema.is_final():
+            if len(ovar.stinfo['relations']) == 1:
+                # attribute selection
+                return ovar
+            return None
+        opt = 'right'
+        cardidx = 0
+    elif getattr(rhs, 'variable', None) is mainvar:
+        ovar = lhs.variable
+        opt = 'left'
+        cardidx = 1
+    else:
+        # not directly linked to the main variable
+        return None
+    if rel.optional in (opt, 'both'):
+        # optional relation
+        return ovar
+    if all(rschema.rproperty(s, o, 'cardinality')[cardidx] in '1+'
+           for s,o in rschema.iter_rdefs()):
+        # mandatory relation without any restriction on the other variable
+        for orel in ovar.stinfo['relations']:
+            if rel is orel:
+                continue
+            if _may_be_removed(orel, schema, ovar) is None:
+                return None
+        return ovar
+    return None
+
+def _add_rtype_relation(rqlst, mainvar, rtype, role):
+    newvar = rqlst.make_variable()
+    if role == 'object':
+        rel = rqlst.add_relation(newvar, rtype, mainvar)
+    else:
+        rel = rqlst.add_relation(mainvar, rtype, newvar)
+    return newvar, rel
+
+def _prepare_vocabulary_rqlst(rqlst, mainvar, rtype, role):
+    """prepare a syntax tree to generate a filter vocabulary rql using the given
+    relation:
+    * create a variable to filter on this relation
+    * add the relation
+    * add the new variable to GROUPBY clause if necessary
+    * add the new variable to the selection
+    """
+    newvar, rel = _add_rtype_relation(rqlst, mainvar, rtype, role)
+    if rqlst.groupby:
+        rqlst.add_group_var(newvar)
+    rqlst.add_selected(newvar)
+    return newvar, rel
+        
+def _remove_relation(rqlst, rel, var):
+    """remove a constraint relation from the syntax tree"""
+    # remove the relation
+    rqlst.remove_node(rel)
+    # remove relations where the filtered variable appears on the
+    # lhs and rhs is a constant restriction
+    extra = []
+    for vrel in var.stinfo['relations']:
+        if vrel is rel:
+            continue
+        if vrel.children[0].variable is var:
+            if not vrel.children[1].get_nodes(nodes.Constant):
+                extra.append(vrel)
+            rqlst.remove_node(vrel)
+    return extra
+
+def _set_orderby(rqlst, newvar, sortasc, sortfuncname):
+    if sortfuncname is None:
+        rqlst.add_sort_var(newvar, sortasc)
+    else:
+        vref = nodes.variable_ref(newvar)
+        vref.register_reference()
+        sortfunc = nodes.Function(sortfuncname)
+        sortfunc.append(vref)
+        term = nodes.SortTerm(sortfunc, sortasc)
+        rqlst.add_sort_term(term)
+
+def insert_attr_select_relation(rqlst, mainvar, rtype, role, attrname,
+                                sortfuncname=None, sortasc=True):
+    """modify a syntax tree to retrieve only relevant attribute `attr` of `var`"""
+    _cleanup_rqlst(rqlst, mainvar)
+    var, mainrel = _prepare_vocabulary_rqlst(rqlst, mainvar, rtype, role)
+    # not found, create one
+    attrvar = rqlst.make_variable()
+    attrrel = rqlst.add_relation(var, attrname, attrvar)
+    # if query is grouped, we have to add the attribute variable
+    if rqlst.groupby:
+        if not attrvar in rqlst.groupby:
+            rqlst.add_group_var(attrvar)
+    _set_orderby(rqlst, attrvar, sortasc, sortfuncname)
+    # add attribute variable to selection
+    rqlst.add_selected(attrvar)
+    # add is restriction if necessary
+    if not mainvar.stinfo['typerels']:
+        etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
+        rqlst.add_type_restriction(mainvar, etypes)
+    return var
+
+def _cleanup_rqlst(rqlst, mainvar):
+    """cleanup tree from unnecessary restriction:
+    * attribute selection
+    * optional relations linked to the main variable
+    * mandatory relations linked to the main variable
+    """
+    if rqlst.where is None:
+        return
+    schema = rqlst.root.schema
+    toremove = set()
+    vargraph = deepcopy(rqlst.vargraph) # graph representing links between variable
+    for rel in rqlst.where.get_nodes(nodes.Relation):
+        ovar = _may_be_removed(rel, schema, mainvar)
+        if ovar is not None:
+            toremove.add(ovar)
+    removed = set()
+    while toremove:
+        trvar = toremove.pop()
+        trvarname = trvar.name
+        # remove paths using this variable from the graph
+        linkedvars = vargraph.pop(trvarname)
+        for ovarname in linkedvars:
+            vargraph[ovarname].remove(trvarname)
+        # remove relation using this variable
+        for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']):
+            if rel in removed:
+                # already removed
+                continue
+            rqlst.remove_node(rel)
+            removed.add(rel)
+        # cleanup groupby clause
+        if rqlst.groupby:
+            for vref in rqlst.groupby[:]:
+                if vref.name == trvarname:
+                    rqlst.remove_group_var(vref)
+        # we can also remove all variables which are linked to this variable
+        # and have no path to the main variable
+        for ovarname in linkedvars:
+            if not has_path(vargraph, ovarname, mainvar.name):
+                toremove.add(rqlst.defined_vars[ovarname])            
+
+        
+        
+## base facet classes #########################################################
+class AbstractFacet(AcceptMixIn, AppRsetObject):
+    __registerer__ = priority_registerer
+    __abstract__ = True
+    __registry__ = 'facets'
+    property_defs = {
+        _('visible'): dict(type='Boolean', default=True,
+                           help=_('display the box or not')),
+        _('order'):   dict(type='Int', default=99,
+                           help=_('display order of the box')),
+        _('context'): dict(type='String', default=None,
+                           # None <-> both
+                           vocabulary=(_('tablefilter'), _('facetbox'), None),
+                           help=_('context where this box should be displayed')),
+        }
+    visible = True
+    context = None
+    needs_update = False
+    start_unfolded = True
+    
+    @classmethod
+    def selected(cls, req, rset=None, rqlst=None, context=None,
+                 filtered_variable=None):
+        assert rset is not None or rqlst is not None
+        assert filtered_variable
+        instance = super(AbstractFacet, cls).selected(req, rset)
+        #instance = AppRsetObject.selected(req, rset)
+        #instance.__class__ = cls
+        # facet retreived using `object_by_id` from an ajax call
+        if rset is None:
+            instance.init_from_form(rqlst=rqlst)
+        # facet retreived from `select` using the result set to filter
+        else:
+            instance.init_from_rset()
+        instance.filtered_variable = filtered_variable
+        return instance
+
+    def init_from_rset(self):
+        self.rqlst = self.rset.syntax_tree().children[0]
+
+    def init_from_form(self, rqlst):
+        self.rqlst = rqlst
+
+    @property
+    def operator(self):
+        # OR between selected values by default
+        return self.req.form.get(self.id + '_andor', 'OR')
+    
+    def get_widget(self):
+        """return the widget instance to use to display this facet
+        """
+        raise NotImplementedError
+    
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        raise NotImplementedError
+    
+
+class VocabularyFacet(AbstractFacet):
+    needs_update = True
+    
+    def get_widget(self):
+        """return the widget instance to use to display this facet
+
+        default implentation expects a .vocabulary method on the facet and
+        return a combobox displaying this vocabulary
+        """
+        vocab = self.vocabulary()
+        if len(vocab) <= 1:
+            return None
+        wdg = FacetVocabularyWidget(self)
+        selected = frozenset(typed_eid(eid) for eid in self.req.list_form_param(self.id))
+        for label, value in vocab:
+            if value is None:
+                wdg.append(FacetSeparator(label))
+            else:
+                wdg.append(FacetItem(label, value, value in selected))
+        return wdg
+    
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        raise NotImplementedError
+    
+    def possible_values(self):
+        """return a list of possible values (as string since it's used to
+        compare to a form value in javascript) for this facet
+        """
+        raise NotImplementedError
+
+    def support_and(self):
+        return False
+    
+    def rqlexec(self, rql, args=None, cachekey=None):
+        try:
+            return self.req.execute(rql, args, cachekey)
+        except Unauthorized:
+            return []
+        
+
+class RelationFacet(VocabularyFacet):
+    __selectors__ = (one_has_relation_selector, contextprop_selector)
+    # class attributes to configure the relation facet
+    rtype = None
+    role = 'subject'
+    target_attr = 'eid'
+    # set this to a stored procedure name if you want to sort on the result of
+    # this function's result instead of direct value
+    sortfunc = None
+    # ascendant/descendant sorting
+    sortasc = True
+    
+    @property
+    def title(self):
+        return display_name(self.req, self.rtype, form=self.role)        
+
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        rqlst = self.rqlst
+        rqlst.save_state()
+        try:
+            mainvar = self.filtered_variable
+            insert_attr_select_relation(rqlst, mainvar, self.rtype, self.role,
+                                        self.target_attr, self.sortfunc, self.sortasc)
+            rset = self.rqlexec(rqlst.as_string(), self.rset.args, self.rset.cachekey)
+        finally:
+            rqlst.recover()
+        return self.rset_vocabulary(rset)
+    
+    def possible_values(self):
+        """return a list of possible values (as string since it's used to
+        compare to a form value in javascript) for this facet
+        """
+        rqlst = self.rqlst
+        rqlst.save_state()
+        try:
+            _cleanup_rqlst(rqlst, self.filtered_variable)
+            _prepare_vocabulary_rqlst(rqlst, self.filtered_variable, self.rtype, self.role)
+            return [str(x) for x, in self.rqlexec(rqlst.as_string())]
+        finally:
+            rqlst.recover()
+    
+    def rset_vocabulary(self, rset):
+        _ = self.req._
+        return [(_(label), eid) for eid, label in rset]
+
+    @cached
+    def support_and(self):
+        rschema = self.schema.rschema(self.rtype)
+        if self.role == 'subject':
+            cardidx = 0
+        else:
+            cardidx = 1
+        # XXX when called via ajax, no rset to compute possible types
+        possibletypes = self.rset and self.rset.column_types(0)
+        for subjtype, objtype in rschema.iter_rdefs():
+            if possibletypes is not None:
+                if self.role == 'subject':
+                    if not subjtype in possibletypes:
+                        continue
+                elif not objtype in possibletypes:
+                    continue
+            if rschema.rproperty(subjtype, objtype, 'cardinality')[cardidx] in '+*':
+                return True
+        return False
+
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self.req.form.get(self.id)
+        if not value:
+            return
+        mainvar = self.filtered_variable
+        restrvar = _add_rtype_relation(self.rqlst, mainvar, self.rtype, self.role)[0]
+        if isinstance(value, basestring):
+            # only one value selected
+            self.rqlst.add_eid_restriction(restrvar, value)
+        elif self.operator == 'OR':
+            #  multiple values with OR operator
+            # set_distinct only if rtype cardinality is > 1
+            if self.support_and():
+                self.rqlst.set_distinct(True)
+            self.rqlst.add_eid_restriction(restrvar, value)
+        else:
+            # multiple values with AND operator
+            self.rqlst.add_eid_restriction(restrvar, value.pop())
+            while value:
+                restrvar = _add_rtype_relation(self.rqlst, mainvar, self.rtype, self.role)[0]
+                self.rqlst.add_eid_restriction(restrvar, value.pop())
+
+
+class AttributeFacet(RelationFacet):
+    # attribute type
+    attrtype = 'String'
+    
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        rqlst = self.rqlst
+        rqlst.save_state()
+        try:
+            mainvar = self.filtered_variable
+            _cleanup_rqlst(rqlst, mainvar)
+            newvar, rel = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role)
+            _set_orderby(rqlst, newvar, self.sortasc, self.sortfunc)
+            rset = self.rqlexec(rqlst.as_string(), self.rset.args,
+                                self.rset.cachekey)
+        finally:
+            rqlst.recover()
+        return self.rset_vocabulary(rset)
+    
+    def rset_vocabulary(self, rset):
+        _ = self.req._
+        return [(_(value), value) for value, in rset]
+
+    def support_and(self):
+        return False
+            
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self.req.form.get(self.id)
+        if not value:
+            return
+        mainvar = self.filtered_variable
+        self.rqlst.add_constant_restriction(mainvar, self.rtype, value,
+                                            self.attrtype)
+
+
+        
+class FilterRQLBuilder(object):
+    """called by javascript to get a rql string from filter form"""
+
+    def __init__(self, req):
+        self.req = req
+                
+    def build_rql(self):#, tablefilter=False):
+        form = self.req.form
+        facetids = form['facets'].split(',')
+        select = parse(form['baserql']).children[0] # XXX Union unsupported yet
+        mainvar = filtered_variable(select)
+        toupdate = []
+        for facetid in facetids:
+            facet = get_facet(self.req, facetid, select, mainvar)
+            facet.add_rql_restrictions()
+            if facet.needs_update:
+                toupdate.append(facetid)
+        return select.as_string(), toupdate
+
+        
+## html widets ################################################################
+
+class FacetVocabularyWidget(HTMLWidget):
+    
+    def __init__(self, facet):
+        self.facet = facet
+        self.items = []
+
+    def append(self, item):
+        self.items.append(item)
+            
+    def _render(self):
+        title = html_escape(self.facet.title)
+        facetid = html_escape(self.facet.id)
+        self.w(u'<div id="%s" class="facet">\n' % facetid)
+        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+               (html_escape(facetid), title))
+        if self.facet.support_and():
+            _ = self.facet.req._
+            self.w(u'''<select name="%s" class="radio facetOperator" title="%s">
+  <option value="OR">%s</option>
+  <option value="AND">%s</option>
+</select>''' % (facetid + '_andor', _('and/or between different values'),
+                _('OR'), _('AND')))
+        if self.facet.start_unfolded:
+            cssclass = ''
+        else:
+            cssclass = ' hidden'
+        self.w(u'<div class="facetBody%s">\n' % cssclass)
+        for item in self.items:
+            item.render(self.w)
+        self.w(u'</div>\n')
+        self.w(u'</div>\n')
+
+        
+class FacetStringWidget(HTMLWidget):
+    def __init__(self, facet):
+        self.facet = facet
+        self.value = None
+
+    def _render(self):
+        title = html_escape(self.facet.title)
+        facetid = html_escape(self.facet.id)
+        self.w(u'<div id="%s" class="facet">\n' % facetid)
+        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+               (facetid, title))
+        self.w(u'<input name="%s" type="text" value="%s" />\n' % (facetid, self.value or u''))
+        self.w(u'</div>\n')
+
+
+class FacetItem(HTMLWidget):
+
+    selected_img = "http://static.simile.mit.edu/exhibit/api-2.0/images/black-check.png"
+    unselected_img = "http://static.simile.mit.edu/exhibit/api-2.0/images/no-check-no-border.png"
+
+    def __init__(self, label, value, selected=False):
+        self.label = label
+        self.value = value
+        self.selected = selected
+
+    def _render(self):
+        if self.selected:
+            cssclass = ' facetValueSelected'
+            imgsrc = self.selected_img
+        else:
+            cssclass = ''
+            imgsrc = self.unselected_img            
+        self.w(u'<div class="facetValue facetCheckBox%s" cubicweb:value="%s">\n'
+               % (cssclass, html_escape(unicode(self.value))))
+        self.w(u'<img src="%s" />&nbsp;' % imgsrc)
+        self.w(u'<a href="javascript: {}">%s</a>' % html_escape(self.label))
+        self.w(u'</div>')
+
+
+class FacetSeparator(HTMLWidget):
+    def __init__(self, label=None):
+        self.label = label or u'&nbsp;'
+        
+    def _render(self):
+        pass
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/form.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,253 @@
+"""abstract form classes for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from simplejson import dumps
+
+from logilab.mtconverter import html_escape
+
+from cubicweb import typed_eid
+from cubicweb.common.selectors import req_form_params_selector
+from cubicweb.common.registerers import accepts_registerer
+from cubicweb.common.view import NOINDEX, NOFOLLOW, View, EntityView, AnyRsetView
+from cubicweb.web import stdmsgs
+from cubicweb.web.httpcache import NoHTTPCacheManager
+from cubicweb.web.controller import redirect_params
+
+
+def relation_id(eid, rtype, target, reid):
+    if target == 'subject':
+        return u'%s:%s:%s' % (eid, rtype, reid)
+    return u'%s:%s:%s' % (reid, rtype, eid)
+
+
+class FormMixIn(object):
+    """abstract form mix-in"""
+    category = 'form'
+    controller = 'edit'
+    domid = 'entityForm'
+    
+    http_cache_manager = NoHTTPCacheManager
+    add_to_breadcrumbs = False
+    skip_relations = set()
+    
+    def __init__(self, req, rset):
+        super(FormMixIn, self).__init__(req, rset)
+        self.maxrelitems = self.req.property_value('navigation.related-limit')
+        self.maxcomboitems = self.req.property_value('navigation.combobox-limit')
+        self.force_display = not not req.form.get('__force_display')
+        # get validation session data which may have been previously set.
+        # deleting validation errors here breaks form reloading (errors are
+        # no more available), they have to be deleted by application's publish
+        # method on successful commit
+        formurl = req.url()
+        forminfo = req.get_session_data(formurl)
+        if forminfo:
+            req.data['formvalues'] = forminfo['values']
+            req.data['formerrors'] = errex = forminfo['errors']
+            req.data['displayederrors'] = set()
+            # if some validation error occured on entity creation, we have to
+            # get the original variable name from its attributed eid
+            foreid = errex.entity
+            for var, eid in forminfo['eidmap'].items():
+                if foreid == eid:
+                    errex.eid = var
+                    break
+            else:
+                errex.eid = foreid
+        
+    def html_headers(self):
+        """return a list of html headers (eg something to be inserted between
+        <head> and </head> of the returned page
+
+        by default forms are neither indexed nor followed
+        """
+        return [NOINDEX, NOFOLLOW]
+        
+    def linkable(self):
+        """override since forms are usually linked by an action,
+        so we don't want them to be listed by appli.possible_views
+        """
+        return False
+
+    @property
+    def limit(self):
+        if self.force_display:
+            return None
+        return self.maxrelitems + 1
+
+    def need_multipart(self, entity, categories=('primary', 'secondary')):
+        """return a boolean indicating if form's enctype should be multipart
+        """
+        for rschema, _, x in entity.relations_by_category(categories):
+            if entity.get_widget(rschema, x).need_multipart:
+                return True
+        # let's find if any of our inlined entities needs multipart
+        for rschema, targettypes, x in entity.relations_by_category('inlineview'):
+            assert len(targettypes) == 1, \
+                   "I'm not able to deal with several targets and inlineview"
+            ttype = targettypes[0]
+            inlined_entity = self.vreg.etype_class(ttype)(self.req, None, None)
+            for irschema, _, x in inlined_entity.relations_by_category(categories):
+                if inlined_entity.get_widget(irschema, x).need_multipart:
+                    return True
+        return False
+
+    def error_message(self):
+        """return formatted error message
+
+        This method should be called once inlined field errors has been consumed
+        """
+        errex = self.req.data.get('formerrors')
+        # get extra errors
+        if errex is not None:
+            errormsg = self.req._('please correct the following errors:')
+            displayed = self.req.data['displayederrors']
+            errors = sorted((field, err) for field, err in errex.errors.items()
+                            if not field in displayed)
+            if errors:
+                if len(errors) > 1:
+                    templstr = '<li>%s</li>\n' 
+                else:
+                    templstr = '&nbsp;%s\n'
+                for field, err in errors:
+                    if field is None:
+                        errormsg += templstr % err
+                    else:
+                        errormsg += templstr % '%s: %s' % (self.req._(field), err)
+                if len(errors) > 1:
+                    errormsg = '<ul>%s</ul>' % errormsg
+            return u'<div class="errorMessage">%s</div>' % errormsg
+        return u''
+    
+    def restore_pending_inserts(self, entity, cell=False):
+        """used to restore edition page as it was before clicking on
+        'search for <some entity type>'
+        
+        """
+        eid = entity.eid
+        cell = cell and "div_insert_" or "tr"
+        pending_inserts = set(self.req.get_pending_inserts(eid))
+        for pendingid in pending_inserts:
+            eidfrom, rtype, eidto = pendingid.split(':')
+            if typed_eid(eidfrom) == entity.eid: # subject
+                label = display_name(self.req, rtype, 'subject')
+                reid = eidto
+            else:
+                label = display_name(self.req, rtype, 'object')
+                reid = eidfrom
+            jscall = "javascript: cancelPendingInsert('%s', '%s', null, %s);" \
+                     % (pendingid, cell, eid)
+            rset = self.req.eid_rset(reid)
+            eview = self.view('text', rset, row=0)
+            # XXX find a clean way to handle baskets
+            if rset.description[0][0] == 'Basket':
+                eview = '%s (%s)' % (eview, display_name(self.req, 'Basket'))
+            yield rtype, pendingid, jscall, label, reid, eview
+        
+    
+    def force_display_link(self):
+        return (u'<span class="invisible">' 
+                u'[<a href="javascript: window.location.href+=\'&amp;__force_display=1\'">%s</a>]'
+                u'</span>' % self.req._('view all'))
+    
+    def relations_table(self, entity):
+        """yiels 3-tuples (rtype, target, related_list)
+        where <related_list> itself a list of :
+          - node_id (will be the entity element's DOM id)
+          - appropriate javascript's togglePendingDelete() function call
+          - status 'pendingdelete' or ''
+          - oneline view of related entity
+        """
+        eid = entity.eid
+        pending_deletes = self.req.get_pending_deletes(eid)
+        # XXX (adim) : quick fix to get Folder relations
+        for label, rschema, target in entity.srelations_by_category(('generic', 'metadata'), 'add'):
+            if rschema in self.skip_relations:
+                continue
+            relatedrset = entity.related(rschema, target, limit=self.limit)
+            toggable_rel_link = self.toggable_relation_link_func(rschema)
+            related = []
+            for row in xrange(relatedrset.rowcount):
+                nodeid = relation_id(eid, rschema, target, relatedrset[row][0])
+                if nodeid in pending_deletes:
+                    status = u'pendingDelete'
+                    label = '+'
+                else:
+                    status = u''
+                    label = 'x'
+                dellink = toggable_rel_link(eid, nodeid, label)
+                eview = self.view('oneline', relatedrset, row=row)
+                related.append((nodeid, dellink, status, eview))
+            yield (rschema, target, related)
+        
+    def toggable_relation_link_func(self, rschema):
+        if not rschema.has_perm(self.req, 'delete'):
+            return lambda x, y, z: u''
+        return toggable_relation_link
+
+
+    def redirect_url(self, entity=None):
+        """return a url to use as next direction if there are some information
+        specified in current form params, else return the result the reset_url
+        method which should be defined in concrete classes
+        """
+        rparams = redirect_params(self.req.form)
+        if rparams:
+            return self.build_url('view', **rparams)
+        return self.reset_url(entity)
+
+    def reset_url(self, entity):
+        raise NotImplementedError('implement me in concrete classes')
+
+    BUTTON_STR = u'<input class="validateButton" type="submit" name="%s" value="%s" tabindex="%s"/>'
+    ACTION_SUBMIT_STR = u'<input class="validateButton" type="button" onclick="postForm(\'%s\', \'%s\', \'%s\')" value="%s" tabindex="%s"/>'
+
+    def button_ok(self, label=None, tabindex=None):
+        label = self.req._(label or stdmsgs.BUTTON_OK).capitalize()
+        return self.BUTTON_STR % ('defaultsubmit', label, tabindex or 2)
+    
+    def button_apply(self, label=None, tabindex=None):
+        label = self.req._(label or stdmsgs.BUTTON_APPLY).capitalize()
+        return self.ACTION_SUBMIT_STR % ('__action_apply', label, self.domid, label, tabindex or 3)
+
+    def button_delete(self, label=None, tabindex=None):
+        label = self.req._(label or stdmsgs.BUTTON_DELETE).capitalize()
+        return self.ACTION_SUBMIT_STR % ('__action_delete', label, self.domid, label, tabindex or 3)
+    
+    def button_cancel(self, label=None, tabindex=None):
+        label = self.req._(label or stdmsgs.BUTTON_CANCEL).capitalize()
+        return self.ACTION_SUBMIT_STR % ('__action_cancel', label, self.domid, label, tabindex or 4)
+    
+    def button_reset(self, label=None, tabindex=None):
+        label = self.req._(label or stdmsgs.BUTTON_CANCEL).capitalize()
+        return u'<input class="validateButton" type="reset" value="%s" tabindex="%s"/>' % (
+            label, tabindex or 4)
+        
+def toggable_relation_link(eid, nodeid, label='x'):
+    js = u"javascript: togglePendingDelete('%s', %s);" % (nodeid, html_escape(dumps(eid)))
+    return u'[<a class="handle" href="%s" id="handle%s">%s</a>]' % (js, nodeid, label)
+
+
+class Form(FormMixIn, View):
+    """base class for forms. Apply by default according to request form
+    parameters specified using the `form_params` class attribute which
+    should list necessary parameters in the form to be accepted.
+    """
+    __registerer__ = accepts_registerer
+    __select__ = classmethod(req_form_params_selector)
+
+    form_params = ()
+
+class EntityForm(FormMixIn, EntityView):
+    """base class for forms applying on an entity (i.e. uniform result set)
+    """
+
+class AnyRsetForm(FormMixIn, AnyRsetView):
+    """base class for forms applying on any empty result sets
+    """
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/htmlwidgets.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,314 @@
+"""html widgets
+
+those are in cubicweb.common since we need to know available widgets at schema
+serialization time
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.utils import UStringIO
+from cubicweb.common.uilib import toggle_action
+
+class HTMLWidget(object):
+
+    def _initialize_stream(self, w=None):
+        if w:
+            self.w = w
+        else:
+            self._stream = UStringIO()
+            self.w = self._stream.write
+
+    def _render(self):
+        raise NotImplementedError
+
+    def render(self, w=None):
+        self._initialize_stream(w)
+        self._render()
+        if w is None:
+            return self._stream.getvalue()
+
+    def is_empty(self):
+        return False
+
+
+class BoxWidget(HTMLWidget):
+    def __init__(self, title, id, items=None, _class="boxFrame",
+                 islist=True, shadow=True, escape=True):
+        self.title = title
+        self.id = id
+        self.items = items or []
+        self._class = _class
+        self.islist = islist
+        self.shadow = shadow
+        self.escape = escape
+
+    def __len__(self):
+        return len(self.items)
+
+    def is_empty(self):
+        return len(self) == 0
+
+    def append(self, item):
+        self.items.append(item)
+
+    main_div_class = 'boxContent'
+    listing_class = 'boxListing'
+    
+    def box_begin_content(self):
+        self.w(u'<div class="%s">\n' % self.main_div_class)
+        if self.islist:
+            self.w(u'<ul class="%s">' % self.listing_class)
+
+    def box_end_content(self):
+        if self.islist:
+            self.w(u'</ul>\n')
+        self.w(u'</div>\n')
+        if self.shadow:
+            self.w(u'<div class="shadow">&nbsp;</div>')
+        
+    def _render(self):
+        if self.id:
+            self.w(u'<div class="%s" id="%s">' % (self._class, self.id))
+        else:
+            self.w(u'<div class="%s">' % self._class)            
+        if self.title:
+            if self.escape:
+                title = '<span>%s</span>' % html_escape(self.title)
+            else:
+                title = '<span>%s</span>' % self.title
+            self.w(u'<div class="boxTitle">%s</div>' % title)
+        if self.items:
+            self.box_begin_content()
+            for item in self.items:
+                item.render(self.w)
+            self.box_end_content()
+        self.w(u'</div>')
+
+
+class SideBoxWidget(BoxWidget):
+    """default CubicWeb's sidebox widget"""
+    main_div_class = u'sideBoxBody'
+    listing_class = ''
+
+    def __init__(self, title, id=None):
+        super(SideBoxWidget, self).__init__(title, id=id, _class='sideBox',
+                                            shadow=False)
+
+                                            
+class MenuWidget(BoxWidget):
+    main_div_class = 'menuContent'
+    listing_class = 'menuListing'
+
+    def box_end_content(self):
+        if self.islist:
+            self.w(u'</ul>\n')
+        self.w(u'</div>\n')
+    
+
+class RawBoxItem(HTMLWidget):
+    """a simpe box item displaying raw data"""
+    def __init__(self, label, liclass=None):
+        self.label = label
+        self.liclass = liclass
+
+    def _start_li(self):
+        if self.liclass is None:
+            return u'<li>'
+        else:
+            return u'<li class="%s">' % self.liclass
+        
+        return self.label
+    
+    def _render(self):
+        self.w(u'%s%s</li>' % (self._start_li(), self.label))
+
+
+class BoxMenu(RawBoxItem):
+    """a menu in a box"""
+    link_class = 'boxMenu'
+    
+    def __init__(self, label, items=None, isitem=True, liclass=None, ident=None,
+                 link_class=None):
+        super(BoxMenu, self).__init__(label, liclass)
+        self.items = items or []
+        self.isitem = isitem
+        self.ident = ident or u'boxmenu_%s' % label.replace(' ', '_').replace("'", '')
+        if link_class:
+            self.link_class = link_class
+            
+    def append(self, item):
+        self.items.append(item)
+
+    def _begin_menu(self, ident):
+        self.w(u'<ul id="%s" class="hidden">' % ident)
+
+    def _end_menu(self):
+        self.w(u'</ul>')
+        
+    def _render(self):
+        if self.isitem:
+            self.w(self._start_li())
+        ident = self.ident
+        self.w(u'<a href="%s" class="%s">%s</a>' % (
+            toggle_action(ident), self.link_class, self.label))
+        self._begin_menu(ident)
+        for item in self.items:
+            item.render(self.w)
+        self._end_menu()
+        if self.isitem:
+            self.w(u'</li>')
+
+
+class PopupBoxMenu(BoxMenu):
+    """like BoxMenu but uses div and specific css class
+    in order to behave like a popup menu
+    """
+    link_class = 'popupMenu'
+
+    def _begin_menu(self, ident):
+        self.w(u'<div class="popupWrapper"><div id="%s" class="hidden popup"><ul>' % ident)
+
+    def _end_menu(self):
+        self.w(u'</ul></div></div>')
+
+
+class BoxField(HTMLWidget):
+    """couples label / value meant to be displayed in a box"""
+    def __init__(self, label, value):
+        self.label = label
+        self.value = value
+    
+    def _render(self):
+        self.w(u'<li><div><span class="label">%s</span>&nbsp;'
+               u'<span class="value">%s</span></div></li>' 
+               % (self.label, self.value))
+
+class BoxSeparator(HTMLWidget):
+    """a menu separator"""
+    
+    def _render(self):
+        self.w(u'</ul><hr class="boxSeparator"/><ul>')
+
+
+class BoxLink(HTMLWidget):
+    """a link in a box"""
+    def __init__(self, href, label, _class='', title='', ident='', escape=False):
+        self.href = href
+        if escape:
+            self.label = html_escape(label)
+        else:
+            self.label = label
+        self._class = _class or ''
+        self.title = title
+        self.ident = ident
+
+    def _render(self):
+        link = u'<a href="%s" title="%s">%s</a>' % (
+            html_escape(self.href), html_escape(self.title), self.label)
+        if self.ident:
+            self.w(u'<li id="%s" class="%s">%s</li>\n' % (self.ident, self._class, link))
+        else:
+            self.w(u'<li class="%s">%s</li>\n' % (self._class, link))
+
+
+class BoxHtml(HTMLWidget):
+    """a form in a box"""
+    def __init__(self, rawhtml):
+        self.rawhtml = rawhtml
+
+    def _render(self):
+        self.w(self.rawhtml)
+
+
+class TableColumn(object):
+    def __init__(self, name, rset_sortcol):
+        """
+        :param name: the column's name
+        :param rset_sortcol: the model's column used to sort this column view
+        """
+        self.name = name
+        self.cellrenderers = []
+        self.rset_sortcol = rset_sortcol
+        self.cell_attrs = {}
+
+    def append_renderer(self, cellvid, colindex):
+        # XXX (adim) : why do we need colindex here ?
+        self.cellrenderers.append( (cellvid, colindex) )
+
+    def add_attr(self, attr, value):
+        self.cell_attrs[attr] = value
+
+
+class TableWidget(HTMLWidget):
+
+    highlight = "onmouseover=\"addElementClass(this, 'highlighted');\" " \
+                "onmouseout=\"removeElementClass(this, 'highlighted');\""
+    
+    def __init__(self, model):
+        self.model = model
+        self.columns = []
+
+    def append_column(self, column):
+        """
+        :type column: TableColumn
+        """
+        self.columns.append(column)
+
+    def _render(self):
+        self.w(u'<table class="listing">')
+        self.w(u'<thead>')
+        self.w(u'<tr class="header">')
+        for column in self.columns:
+            attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.iteritems())
+            self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name))
+        self.w(u'</tr>')
+        self.w(u'</thead><tbody>')
+        for rowindex, row in enumerate(self.model.get_rows()):
+            klass = (rowindex%2==1) and 'odd' or 'even'
+            self.w(u'<tr class="%s" %s>' % (klass, self.highlight))
+            for column, sortvalue in self.itercols(rowindex):
+                attrs = dict(column.cell_attrs)
+                attrs["cubicweb:sortvalue"] = 'json:' + sortvalue
+                attrs = ('%s="%s"' % (name, value) for name, value in attrs.iteritems())
+                self.w(u'<td %s>' % (' '.join(attrs)))
+                for cellvid, colindex in column.cellrenderers:
+                    self.model.render(cellvid, rowindex, colindex, w=self.w)
+                self.w(u'</td>')
+            self.w(u'</tr>')
+        self.w(u'</tbody>')
+        self.w(u'</table>')
+
+    def itercols(self, rowindex):
+        for column in self.columns:
+            yield column, self.model.sortvalue(rowindex, column.rset_sortcol)
+
+
+class ProgressBarWidget(HTMLWidget):
+    """display a progress bar widget"""
+    def __init__(self, done, todo, total):
+        self.done = done
+        self.todo = todo
+        self.total = total
+
+    def _render(self):
+        try:
+            pourcent = self.done*100./self.total
+        except ZeroDivisionError:
+            pourcent = 0
+        real_pourcent = pourcent
+        if pourcent > 100 :
+            color = 'done'
+            pourcent = 100
+        elif self.todo + self.done > self.total :
+            color = 'overpassed'
+        else:
+            color = 'inprogress'
+        if pourcent < 0:
+            pourcent = 0
+        self.w(u'<div class="progressbarback" title="%i %%">' % real_pourcent)
+        self.w(u'<div class="progressbar %s" style="width: %spx; align: left;" ></div>' % (color, pourcent))
+        self.w(u'</div>')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/httpcache.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,131 @@
+"""HTTP cache managers
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from mx.DateTime import DateTimeFromTicks, now, gmtime
+
+# time delta usable to convert localized time to GMT time
+GMTOFFSET = - (now() - gmtime())
+
+class NoHTTPCacheManager(object):
+    """default cache manager: set no-cache cache control policy"""
+    def __init__(self, view):
+        self.view = view
+        self.req = view.req
+        self.rset = view.rset
+
+    def set_headers(self):
+        self.req.set_header('Cache-control', 'no-cache')
+
+class MaxAgeHTTPCacheManager(NoHTTPCacheManager):
+    """max-age cache manager: set max-age cache control policy, with max-age
+    specified with the `cache_max_age` attribute of the view
+    """
+    def set_headers(self):
+        self.req.set_header('Cache-control',
+                            'max-age=%s' % self.view.cache_max_age)
+
+class EtagHTTPCacheManager(NoHTTPCacheManager):
+    """etag based cache manager for startup views
+
+    * etag is generated using the view name and the user's groups
+    * set policy to 'must-revalidate' and expires to the current time to force
+      revalidation on each request
+    """
+    # GMT time required
+    date_format = "%a, %d %b %Y %H:%M:%S GMT"
+
+    def etag(self):
+        return self.view.id + '/' + ','.join(sorted(self.req.user.groups))
+    
+    def max_age(self):
+        # 0 to actually force revalidation
+        return 0
+    
+    def last_modified(self):
+        return self.view.last_modified()
+    
+    def set_headers(self):
+        req = self.req
+        try:
+            req.set_header('Etag', '"%s"' % self.etag())
+        except NoEtag:
+            self.req.set_header('Cache-control', 'no-cache')
+            return
+        req.set_header('Cache-control',
+                       'must-revalidate;max-age=%s' % self.max_age())
+        mdate = self.last_modified()
+        req.set_header('Last-modified', mdate.strftime(self.date_format))
+
+class EntityHTTPCacheManager(EtagHTTPCacheManager):
+    """etag based cache manager for view displaying a single entity
+
+    * etag is generated using entity's eid, the view name and the user's groups
+    * get last modified time from the entity definition (this may not be the
+      entity's modification time since a view may include some related entities
+      with a modification time to consider) using the `last_modified` method
+    """
+    def etag(self):
+        if self.rset is None or len(self.rset) == 0: # entity startup view for instance
+            return super(EntityHTTPCacheManager, self).etag()
+        if len(self.rset) > 1:
+            raise NoEtag()
+        etag = super(EntityHTTPCacheManager, self).etag()
+        eid = self.rset[0][0]
+        if self.req.user.owns(eid):
+            etag += ',owners'
+        return str(eid) + '/' + etag
+
+
+class NoEtag(Exception):
+    """an etag can't be generated"""
+
+__all__ = ('GMTOFFSET',
+           'NoHTTPCacheManager', 'MaxAgeHTTPCacheManager',
+           'EtagHTTPCacheManager', 'EntityHTTPCacheManager')
+
+# monkey patching, so view doesn't depends on this module and we have all
+# http cache related logic here
+
+from cubicweb.common import view
+
+def set_http_cache_headers(self):
+    self.http_cache_manager(self).set_headers()
+view.View.set_http_cache_headers = set_http_cache_headers
+
+def last_modified(self):
+    """return the date/time where this view should be considered as
+    modified. Take care of possible related objects modifications.
+
+    /!\ must return GMT time /!\
+    """
+    # XXX check view module's file modification time in dev mod ?
+    ctime = gmtime()
+    if self.cache_max_age:
+        mtime = self.req.header_if_modified_since()
+        if mtime:
+            if (ctime - mtime).seconds > self.cache_max_age:
+                mtime = ctime
+        else:
+            mtime = ctime
+    else:
+        mtime = ctime
+    # mtime = ctime will force page rerendering
+    return mtime
+view.View.last_modified = last_modified
+
+# configure default caching
+view.View.http_cache_manager = NoHTTPCacheManager
+# max-age=0 to actually force revalidation when needed
+view.View.cache_max_age = 0
+
+
+view.EntityView.http_cache_manager = EntityHTTPCacheManager
+
+view.StartupView.http_cache_manager = MaxAgeHTTPCacheManager
+view.StartupView.cache_max_age = 60*60*2 # stay in http cache for 2 hours by default 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/request.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,648 @@
+"""abstract class for http request
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import Cookie
+import sha
+import time
+import random
+import base64
+from urlparse import urlsplit
+from itertools import count
+
+from rql.utils import rqlvar_maker
+
+from logilab.common.decorators import cached
+
+# XXX move _MARKER here once AppObject.external_resource has been removed
+from cubicweb.dbapi import DBAPIRequest
+from cubicweb.common.appobject import _MARKER 
+from cubicweb.common.mail import header
+from cubicweb.common.uilib import remove_html_tags
+from cubicweb.common.utils import SizeConstrainedList, HTMLHead
+from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit, RequestError,
+                       StatusResponse)
+
+
+def list_form_param(form, param, pop=False):
+    """get param from form parameters and return its value as a list,
+    skipping internal markers if any
+
+    * if the parameter isn't defined, return an empty list
+    * if the parameter is a single (unicode) value, return a list
+      containing that value
+    * if the parameter is already a list or tuple, just skip internal
+      markers
+
+    if pop is True, the parameter is removed from the form dictionnary
+    """
+    if pop:
+        try:
+            value = form.pop(param)
+        except KeyError:
+            return []
+    else:
+        value = form.get(param, ())
+    if value is None:
+        value = ()
+    elif not isinstance(value, (list, tuple)):
+        value = [value]
+    return [v for v in value if v != INTERNAL_FIELD_VALUE]
+
+
+
+class CubicWebRequestBase(DBAPIRequest):
+    """abstract HTTP request, should be extended according to the HTTP backend"""    
+    
+    def __init__(self, vreg, https, form=None):
+        super(CubicWebRequestBase, self).__init__(vreg)
+        self.message = None
+        self.authmode = vreg.config['auth-mode']
+        self.https = https
+        # raw html headers that can be added from any view
+        self.html_headers = HTMLHead()
+        # form parameters
+        self.setup_params(form)
+        # dictionnary that may be used to store request data that has to be
+        # shared among various components used to publish the request (views,
+        # controller, application...)
+        self.data = {}
+        # search state: 'normal' or 'linksearch' (eg searching for an object
+        # to create a relation with another)
+        self.search_state = ('normal',) 
+        # tabindex generator
+        self.tabindexgen = count()
+        self.next_tabindex = self.tabindexgen.next
+        # page id, set by htmlheader template
+        self.pageid = None
+        self.varmaker = rqlvar_maker()
+        self.datadir_url = self._datadir_url()
+
+    def set_connection(self, cnx, user=None):
+        """method called by the session handler when the user is authenticated
+        or an anonymous connection is open
+        """
+        super(CubicWebRequestBase, self).set_connection(cnx, user)
+        # get request language:
+        vreg = self.vreg
+        if self.user:
+            try:
+                # 1. user specified language
+                lang = vreg.typed_value('ui.language',
+                                        self.user.properties['ui.language'])
+                self.set_language(lang)
+                return
+            except KeyError, ex:
+                pass
+        if vreg.config['language-negociation']:
+            # 2. http negociated language
+            for lang in self.header_accept_language():
+                if lang in self.translations:
+                    self.set_language(lang)
+                    return
+        # 3. default language
+        self.set_default_language(vreg)
+            
+    def set_language(self, lang):
+        self._ = self.__ = self.translations[lang]
+        self.lang = lang
+        self.debug('request language: %s', lang)
+        
+    # input form parameters management ########################################
+    
+    # common form parameters which should be protected against html values
+    # XXX can't add 'eid' for instance since it may be multivalued
+    # dont put rql as well, if query contains < and > it will be corrupted!
+    no_script_form_params = set(('vid', 
+                                 'etype', 
+                                 'vtitle', 'title',
+                                 '__message',
+                                 '__redirectvid', '__redirectrql'))
+        
+    def setup_params(self, params):
+        """WARNING: we're intentionaly leaving INTERNAL_FIELD_VALUE here
+
+        subclasses should overrides to 
+        """
+        if params is None:
+            params = {}
+        self.form = params
+        encoding = self.encoding
+        for k, v in params.items():
+            if isinstance(v, (tuple, list)):
+                v = [unicode(x, encoding) for x in v]
+                if len(v) == 1:
+                    v = v[0]
+            if k in self.no_script_form_params:
+                v = self.no_script_form_param(k, value=v)
+            if isinstance(v, str):
+                v = unicode(v, encoding)
+            if k == '__message':
+                self.set_message(v)
+                del self.form[k]
+            else:
+                self.form[k] = v
+    
+    def no_script_form_param(self, param, default=None, value=None):
+        """ensure there is no script in a user form param
+
+        by default return a cleaned string instead of raising a security
+        exception
+
+        this method should be called on every user input (form at least) fields
+        that are at some point inserted in a generated html page to protect
+        against script kiddies
+        """
+        if value is None:
+            value = self.form.get(param, default)
+        if not value is default and value:
+            # safety belt for strange urls like http://...?vtitle=yo&vtitle=yo
+            if isinstance(value, (list, tuple)):
+                self.error('no_script_form_param got a list (%s). Who generated the URL ?',
+                           repr(value))
+                value = value[0]
+            return remove_html_tags(value)
+        return value
+        
+    def list_form_param(self, param, form=None, pop=False):
+        """get param from form parameters and return its value as a list,
+        skipping internal markers if any
+        
+        * if the parameter isn't defined, return an empty list
+        * if the parameter is a single (unicode) value, return a list
+          containing that value
+        * if the parameter is already a list or tuple, just skip internal
+          markers
+
+        if pop is True, the parameter is removed from the form dictionnary
+        """
+        if form is None:
+            form = self.form
+        return list_form_param(form, param, pop)            
+    
+
+    def reset_headers(self):
+        """used by AutomaticWebTest to clear html headers between tests on
+        the same resultset
+        """
+        self.html_headers = HTMLHead()
+        return self
+
+    # web state helpers #######################################################
+    
+    def set_message(self, msg):
+        assert isinstance(msg, unicode)
+        self.message = msg
+    
+    def update_search_state(self):
+        """update the current search state"""
+        searchstate = self.form.get('__mode')
+        if not searchstate:
+            searchstate = self.get_session_data('search_state', 'normal')
+        self.set_search_state(searchstate)
+
+    def set_search_state(self, searchstate):
+        """set a new search state"""
+        if searchstate is None or searchstate == 'normal':
+            self.search_state = (searchstate or 'normal',)
+        else:
+            self.search_state = ('linksearch', searchstate.split(':'))
+            assert len(self.search_state[-1]) == 4
+        self.set_session_data('search_state', searchstate)
+
+    def update_breadcrumbs(self):
+        """stores the last visisted page in session data"""
+        searchstate = self.get_session_data('search_state')
+        if searchstate == 'normal':
+            breadcrumbs = self.get_session_data('breadcrumbs', None)
+            if breadcrumbs is None:
+                breadcrumbs = SizeConstrainedList(10)
+                self.set_session_data('breadcrumbs', breadcrumbs)
+            breadcrumbs.append(self.url())
+
+    def last_visited_page(self):
+        breadcrumbs = self.get_session_data('breadcrumbs', None)
+        if breadcrumbs:
+            return breadcrumbs.pop()
+        return self.base_url()
+
+    def register_onetime_callback(self, func, *args):
+        cbname = 'cb_%s' % (
+            sha.sha('%s%s%s%s' % (time.time(), func.__name__,
+                                  random.random(), 
+                                  self.user.login)).hexdigest())
+        def _cb(req):
+            try:
+                ret = func(req, *args)
+            except TypeError:
+                from warnings import warn
+                warn('user callback should now take request as argument')
+                ret = func(*args)            
+            self.unregister_callback(self.pageid, cbname)
+            return ret
+        self.set_page_data(cbname, _cb)
+        return cbname
+    
+    def unregister_callback(self, pageid, cbname):
+        assert pageid is not None
+        assert cbname.startswith('cb_')
+        self.info('unregistering callback %s for pageid %s', cbname, pageid)
+        self.del_page_data(cbname)
+
+    def clear_user_callbacks(self):
+        if self.cnx is not None:
+            sessdata = self.session_data()
+            callbacks = [key for key in sessdata if key.startswith('cb_')]
+            for callback in callbacks:
+                self.del_session_data(callback)
+    
+    # web edition helpers #####################################################
+    
+    @cached # so it's writed only once
+    def fckeditor_config(self):
+        self.html_headers.define_var('fcklang', self.lang)
+        self.html_headers.define_var('fckconfigpath',
+                                     self.build_url('data/fckcwconfig.js'))
+
+    def edited_eids(self, withtype=False):
+        """return a list of edited eids"""
+        yielded = False
+        # warning: use .keys since the caller may change `form`
+        form = self.form
+        try:
+            eids = form['eid']
+        except KeyError:
+            raise NothingToEdit(None, {None: self._('no selected entities')})
+        if isinstance(eids, basestring):
+            eids = (eids,)
+        for peid in eids:
+            if withtype:
+                typekey = '__type:%s' % peid
+                assert typekey in form, 'no entity type specified'
+                yield peid, form[typekey]
+            else:
+                yield peid
+            yielded = True
+        if not yielded:
+            raise NothingToEdit(None, {None: self._('no selected entities')})
+
+    # minparams=3 by default: at least eid, __type, and some params to change
+    def extract_entity_params(self, eid, minparams=3):
+        """extract form parameters relative to the given eid"""
+        params = {}
+        eid = str(eid)
+        form = self.form
+        for param in form:
+            try:
+                name, peid = param.split(':', 1)
+            except ValueError:
+                if not param.startswith('__') and param != "eid":
+                    self.warning('param %s mis-formatted', param)
+                continue
+            if peid == eid:
+                value = form[param]
+                if value == INTERNAL_FIELD_VALUE:
+                    value = None
+                params[name] = value
+        params['eid'] = eid
+        if len(params) < minparams:
+            print eid, params
+            raise RequestError(self._('missing parameters for entity %s') % eid)
+        return params
+    
+    def get_pending_operations(self, entity, relname, role):
+        operations = {'insert' : [], 'delete' : []}
+        for optype in ('insert', 'delete'):
+            data = self.get_session_data('pending_%s' % optype) or ()
+            for eidfrom, rel, eidto in data:
+                if relname == rel:
+                    if role == 'subject' and entity.eid == eidfrom:
+                        operations[optype].append(eidto)
+                    if role == 'object' and entity.eid == eidto:
+                        operations[optype].append(eidfrom)
+        return operations
+    
+    def get_pending_inserts(self, eid=None):
+        """shortcut to access req's pending_insert entry
+
+        This is where are stored relations being added while editing
+        an entity. This used to be stored in a temporary cookie.
+        """
+        pending = self.get_session_data('pending_insert') or ()
+        return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
+                if eid is None or eid in (subj, obj)]
+
+    def get_pending_deletes(self, eid=None):
+        """shortcut to access req's pending_delete entry
+
+        This is where are stored relations being removed while editing
+        an entity. This used to be stored in a temporary cookie.
+        """
+        pending = self.get_session_data('pending_delete') or ()
+        return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
+                if eid is None or eid in (subj, obj)]
+
+    def remove_pending_operations(self):
+        """shortcut to clear req's pending_{delete,insert} entries
+
+        This is needed when the edition is completed (whether it's validated
+        or cancelled)
+        """
+        self.del_session_data('pending_insert')
+        self.del_session_data('pending_delete')
+
+    def cancel_edition(self, errorurl):
+        """remove pending operations and `errorurl`'s specific stored data
+        """
+        self.del_session_data(errorurl)
+        self.remove_pending_operations()
+    
+    # high level methods for HTTP headers management ##########################
+
+    # must be cached since login/password are popped from the form dictionary
+    # and this method may be called multiple times during authentication
+    @cached
+    def get_authorization(self):
+        """Parse and return the Authorization header"""
+        if self.authmode == "cookie":
+            try:
+                user = self.form.pop("__login")
+                passwd = self.form.pop("__password", '')
+                return user, passwd.encode('UTF8')
+            except KeyError:
+                self.debug('no login/password in form params')
+                return None, None
+        else:
+            return self.header_authorization()
+    
+    def get_cookie(self):
+        """retrieve request cookies, returns an empty cookie if not found"""
+        try:
+            return Cookie.SimpleCookie(self.get_header('Cookie'))
+        except KeyError:
+            return Cookie.SimpleCookie()
+
+    def set_cookie(self, cookie, key, maxage=300):
+        """set / update a cookie key
+
+        by default, cookie will be available for the next 5 minutes.
+        Give maxage = None to have a "session" cookie expiring when the
+        client close its browser
+        """
+        morsel = cookie[key]
+        if maxage is not None:
+            morsel['Max-Age'] = maxage
+        # make sure cookie is set on the correct path
+        morsel['path'] = self.base_url_path()
+        self.add_header('Set-Cookie', morsel.OutputString())
+
+    def remove_cookie(self, cookie, key):
+        """remove a cookie by expiring it"""
+        morsel = cookie[key]
+        morsel['Max-Age'] = 0
+        # The only way to set up cookie age for IE is to use an old "expired"
+        # syntax. IE doesn't support Max-Age there is no library support for
+        # managing 
+        # ===> Do _NOT_ comment this line :
+        morsel['expires'] = 'Thu, 01-Jan-1970 00:00:00 GMT'
+        self.add_header('Set-Cookie', morsel.OutputString())
+
+    def set_content_type(self, content_type, filename=None, encoding=None):
+        """set output content type for this request. An optional filename
+        may be given
+        """
+        if content_type.startswith('text/'):
+            content_type += ';charset=' + (encoding or self.encoding)
+        self.set_header('content-type', content_type)
+        if filename:
+            if isinstance(filename, unicode):
+                filename = header(filename).encode()
+            self.set_header('content-disposition', 'inline; filename=%s'
+                            % filename)
+
+    # high level methods for HTML headers management ##########################
+
+    def add_js(self, jsfiles, localfile=True):
+        """specify a list of JS files to include in the HTML headers
+        :param jsfiles: a JS filename or a list of JS filenames
+        :param localfile: if True, the default data dir prefix is added to the
+                          JS filename
+        """
+        if isinstance(jsfiles, basestring):
+            jsfiles = (jsfiles,)
+        for jsfile in jsfiles:
+            if localfile:
+                jsfile = self.datadir_url + jsfile
+            self.html_headers.add_js(jsfile)
+
+    def add_css(self, cssfiles, media=u'all', localfile=True, ieonly=False):
+        """specify a CSS file to include in the HTML headers
+        :param cssfiles: a CSS filename or a list of CSS filenames
+        :param media: the CSS's media if necessary
+        :param localfile: if True, the default data dir prefix is added to the
+                          CSS filename
+        """
+        if isinstance(cssfiles, basestring):
+            cssfiles = (cssfiles,)
+        if ieonly:
+            if self.ie_browser():
+                add_css = self.html_headers.add_ie_css
+            else:
+                return # no need to do anything on non IE browsers
+        else:
+            add_css = self.html_headers.add_css
+        for cssfile in cssfiles:
+            if localfile:
+                cssfile = self.datadir_url + cssfile
+            add_css(cssfile, media)
+    
+    # urls/path management ####################################################
+    
+    def url(self, includeparams=True):
+        """return currently accessed url"""
+        return self.base_url() + self.relative_path(includeparams)
+
+    def _datadir_url(self):
+        """return url of the application's data directory"""
+        return self.base_url() + 'data%s/' % self.vreg.config.instance_md5_version()
+    
+    def selected(self, url):
+        """return True if the url is equivalent to currently accessed url"""
+        reqpath = self.relative_path().lower()
+        baselen = len(self.base_url())
+        return (reqpath == url[baselen:].lower())
+
+    def base_url_prepend_host(self, hostname):
+        protocol, roothost = urlsplit(self.base_url())[:2]
+        if roothost.startswith('www.'):
+            roothost = roothost[4:]
+        return '%s://%s.%s' % (protocol, hostname, roothost)
+
+    def base_url_path(self):
+        """returns the absolute path of the base url"""
+        return urlsplit(self.base_url())[2]
+        
+    @cached
+    def from_controller(self):
+        """return the id (string) of the controller issuing the request"""
+        controller = self.relative_path(False).split('/', 1)[0]
+        registered_controllers = (ctrl.id for ctrl in
+                                  self.vreg.registry_objects('controllers'))
+        if controller in registered_controllers:
+            return controller
+        return 'view'
+    
+    def external_resource(self, rid, default=_MARKER):
+        """return a path to an external resource, using its identifier
+
+        raise KeyError  if the resource is not defined
+        """
+        try:
+            value = self.vreg.config.ext_resources[rid]
+        except KeyError:
+            if default is _MARKER:
+                raise
+            return default
+        if value is None:
+            return None
+        baseurl = self.datadir_url[:-1] # remove trailing /
+        if isinstance(value, list):
+            return [v.replace('DATADIR', baseurl) for v in value]
+        return value.replace('DATADIR', baseurl)
+    external_resource = cached(external_resource, keyarg=1)
+
+    def validate_cache(self):
+        """raise a `DirectResponse` exception if a cached page along the way
+        exists and is still usable.
+
+        calls the client-dependant implementation of `_validate_cache`
+        """
+        self._validate_cache()
+        if self.http_method() == 'HEAD':
+            raise StatusResponse(200, '')
+        
+    # abstract methods to override according to the web front-end #############
+        
+    def http_method(self):
+        """returns 'POST', 'GET', 'HEAD', etc."""
+        raise NotImplementedError()
+
+    def _validate_cache(self):
+        """raise a `DirectResponse` exception if a cached page along the way
+        exists and is still usable
+        """
+        raise NotImplementedError()
+        
+    def relative_path(self, includeparams=True):
+        """return the normalized path of the request (ie at least relative
+        to the application's root, but some other normalization may be needed
+        so that the returned path may be used to compare to generated urls
+
+        :param includeparams:
+           boolean indicating if GET form parameters should be kept in the path
+        """
+        raise NotImplementedError()
+
+    def get_header(self, header, default=None):
+        """return the value associated with the given input HTTP header,
+        raise KeyError if the header is not set
+        """
+        raise NotImplementedError()
+
+    def set_header(self, header, value):
+        """set an output HTTP header"""
+        raise NotImplementedError()
+
+    def add_header(self, header, value):
+        """add an output HTTP header"""
+        raise NotImplementedError()
+    
+    def remove_header(self, header):
+        """remove an output HTTP header"""
+        raise NotImplementedError()
+        
+    def header_authorization(self):
+        """returns a couple (auth-type, auth-value)"""
+        auth = self.get_header("Authorization", None)
+        if auth:
+            scheme, rest = auth.split(' ', 1)
+            scheme = scheme.lower()
+            try:
+                assert scheme == "basic"
+                user, passwd = base64.decodestring(rest).split(":", 1)
+                # XXX HTTP header encoding: use email.Header?
+                return user.decode('UTF8'), passwd
+            except Exception, ex:
+                self.debug('bad authorization %s (%s: %s)',
+                           auth, ex.__class__.__name__, ex)
+        return None, None
+
+    def header_accept_language(self):
+        """returns an ordered list of preferred languages"""
+        acceptedlangs = self.get_header('Accept-Language', '')
+        langs = []
+        for langinfo in acceptedlangs.split(','):
+            try:
+                lang, score = langinfo.split(';')
+                score = float(score[2:]) # remove 'q='
+            except ValueError:
+                lang = langinfo
+                score = 1.0
+            lang = lang.split('-')[0]
+            langs.append( (score, lang) )
+        langs.sort(reverse=True)
+        return (lang for (score, lang) in langs)
+
+    def header_if_modified_since(self):
+        """If the HTTP header If-modified-since is set, return the equivalent
+        mx date time value (GMT), else return None
+        """
+        raise NotImplementedError()
+    
+    # page data management ####################################################
+
+    def get_page_data(self, key, default=None):
+        """return value associated to `key` in curernt page data"""
+        page_data = self.cnx.get_session_data(self.pageid, {})
+        return page_data.get(key, default)
+        
+    def set_page_data(self, key, value):
+        """set value associated to `key` in current page data"""
+        self.html_headers.add_unload_pagedata()
+        page_data = self.cnx.get_session_data(self.pageid, {})
+        page_data[key] = value
+        return self.cnx.set_session_data(self.pageid, page_data)
+        
+    def del_page_data(self, key=None):
+        """remove value associated to `key` in current page data
+        if `key` is None, all page data will be cleared
+        """
+        if key is None:
+            self.cnx.del_session_data(self.pageid)
+        else:
+            page_data = self.cnx.get_session_data(self.pageid, {})
+            page_data.pop(key, None)
+            self.cnx.set_session_data(self.pageid, page_data)
+
+    # user-agent detection ####################################################
+
+    @cached
+    def useragent(self):
+        return self.get_header('User-Agent', None)
+
+    def ie_browser(self):
+        useragent = self.useragent()
+        return useragent and 'MSIE' in useragent
+    
+    def xhtml_browser(self):
+        useragent = self.useragent()
+        if useragent and ('MSIE' in useragent or 'KHTML' in useragent):
+            return False
+        return True
+
+from cubicweb import set_log_methods
+set_log_methods(CubicWebRequestBase, LOGGER)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/bootstrap_packages	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+efile, eblog, eclasstags
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/schema/Personne.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+nom    ivarchar(64) NOT NULL
+prenom ivarchar(64)
+sexe   char(1) DEFAULT 'M' 
+promo  choice('bon','pasbon')
+titre  ivarchar(128)
+ass    varchar(128)
+web    varchar(128)
+tel    integer
+fax    integer
+datenaiss datetime
+test   boolean 
+description text
+salary float
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/schema/Societe.sql	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+nom  ivarchar(64)
+web varchar(128)
+tel  integer
+fax  integer
+rncs varchar(32)
+ad1  varchar(128)
+ad2  varchar(128)
+ad3  varchar(128)
+cp   varchar(12)
+ville varchar(32)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/schema/relations.rel	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+Personne travaille Societe
+EUser connait Personne
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/schema/testschema.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+class Salesterm(EntityType):
+    described_by_test = SubjectRelation('File', cardinality='1*', composite='subject')
+    amount = Int(constraints=[IntervalBoundConstraint(0, 100)])
+    
+class tags(RelationDefinition):
+    subject = 'Tag'
+    object = ('BlogEntry', 'EUser')
+
+class checked_by(RelationType):
+    subject = 'BlogEntry'
+    object = 'EUser'
+    cardinality = '?*'
+    permissions = {
+        'add': ('managers',),
+        'read': ('managers', 'users'),
+        'delete': ('managers',),
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/data/views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,26 @@
+from cubicweb.web import Redirect
+from cubicweb.web.application import CubicWebPublisher
+
+# proof of concept : monkey patch publish method so that if we are in an
+# anonymous session and __fblogin is found is req.form, the user with the
+# given login is created if necessary and then a session is opened for that
+# user
+# NOTE: this require "cookie" authentication mode
+def auto_login_publish(self, path, req):
+    if (req.cnx is None or req.cnx.anonymous_connection) and req.form.get('__fblogin'):
+        login = password = req.form.pop('__fblogin')
+        self.repo.register_user(login, password)
+        req.form['__login'] = login
+        req.form['__password'] = password
+        if req.cnx:
+            req.cnx.close()
+        req.cnx = None
+        try:
+            self.session_handler.set_session(req)
+        except Redirect:
+            pass
+        assert req.user.login == login
+    return orig_publish(self, path, req)
+    
+orig_publish = CubicWebPublisher.main_publish
+CubicWebPublisher.main_publish = auto_login_publish
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/jstest_python.jst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+
+crosscheck.addTest({
+
+    setup: function() {
+        crosscheck.load("testutils.js");
+        crosscheck.load("../data/jquery.js");
+        crosscheck.load("../data/compat.js");
+        crosscheck.load("../data/python.js");
+    },
+
+    test_basic_number_parsing: function () {
+	var d = strptime('2008/08/08', '%Y/%m/%d');
+	assertArrayEquals(datetuple(d), [2008, 8, 8, 0, 0])
+	d = strptime('2008/8/8', '%Y/%m/%d');
+	assertArrayEquals(datetuple(d), [2008, 8, 8, 0, 0])
+	d = strptime('8/8/8', '%Y/%m/%d');
+	assertArrayEquals(datetuple(d), [8, 8, 8, 0, 0])
+	d = strptime('0/8/8', '%Y/%m/%d');
+	assertArrayEquals(datetuple(d), [0, 8, 8, 0, 0])
+	d = strptime('-10/8/8', '%Y/%m/%d');
+	assertArrayEquals(datetuple(d), [-10, 8, 8, 0, 0])
+	d = strptime('-35000', '%Y');
+	assertArrayEquals(datetuple(d), [-35000, 1, 1, 0, 0])
+    },
+
+    test_custom_format_parsing: function () {
+	var d = strptime('2008-08-08', '%Y-%m-%d');
+	assertArrayEquals(datetuple(d), [2008, 8, 8, 0, 0])
+ 	d = strptime('2008 - !  08: 08', '%Y - !  %m: %d');
+ 	assertArrayEquals(datetuple(d), [2008, 8, 8, 0, 0])
+ 	d = strptime('2008-08-08 12:14', '%Y-%m-%d %H:%M');
+ 	assertArrayEquals(datetuple(d), [2008, 8, 8, 12, 14])
+ 	d = strptime('2008-08-08 1:14', '%Y-%m-%d %H:%M');
+ 	assertArrayEquals(datetuple(d), [2008, 8, 8, 1, 14])
+ 	d = strptime('2008-08-08 01:14', '%Y-%m-%d %H:%M');
+ 	assertArrayEquals(datetuple(d), [2008, 8, 8, 1, 14])
+   }
+//,
+//
+//  test_gregorian_parsing: function() {
+//     var d = parseGregorianDateTime("May 28 0100 09:00:00 GMT");
+//     assertArrayEquals(datetuple(d), [100, 5, 28, 10, 0]);
+//     d = parseGregorianDateTime("May 28 0099 09:00:00 GMT");
+//     assertArrayEquals(datetuple(d), [99, 5, 28, 10, 0]);
+//   }
+
+})
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/runtests.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+# -*- coding: ISO-8859-1 -*-
+"""Script used to fire all tests"""
+
+__revision__ = '$Id: runtests.py,v 1.1 2005-06-17 14:09:18 adim Exp $'
+
+from logilab.common.testlib import main
+
+if __name__ == '__main__':
+    import sys, os
+    main(os.path.dirname(sys.argv[0]) or '.')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/test_views.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,75 @@
+"""automatic tests"""
+
+from mx.DateTime import now
+
+from cubicweb.devtools.testlib import WebTest, AutomaticWebTest
+from cubicweb.common.view import AnyRsetView
+
+AutomaticWebTest.application_rql = [
+    'Any L,F WHERE E is EUser, E login L, E firstname F',
+    'Any L,F,E WHERE E is EUser, E login L, E firstname F',
+    'Any COUNT(X) WHERE X is EUser',
+    ]
+
+class ComposityCopy(WebTest):
+
+    def test_regr_copy_view(self):
+        """regression test: make sure we can ask a copy of a
+        composite entity
+        """
+        rset = self.execute('EUser X WHERE X login "admin"')
+        self.view('copy', rset)
+
+
+
+class SomeView(AnyRsetView):
+    id = 'someview'
+    
+    def call(self):
+        self.req.add_js('spam.js')
+        self.req.add_js('spam.js')
+
+
+class ManualWebTests(WebTest):
+    def setup_database(self):
+        self.auto_populate(10)
+
+    def test_manual_tests(self):
+        rset = self.execute('Any P,F,S WHERE P is EUser, P firstname F, P surname S')
+        self.view('table', rset, template=None, displayfilter=True, displaycols=[0,2])
+        rset = self.execute('Any P,F,S WHERE P is EUser, P firstname F, P surname S LIMIT 1')
+        rset.req.form['rtype'] = 'firstname'
+        self.view('editrelation', rset, template=None, htmlcheck=False)
+        rset.req.form['rtype'] = 'use_email'
+        self.view('editrelation', rset, template=None, htmlcheck=False)
+        
+
+    def test_sortable_js_added(self):
+        rset = self.execute('EUser X')
+        # sortable.js should not be included by default
+        self.failIf('jquery.tablesorter.js' in self.view('oneline', rset))
+        # but should be included by the tableview
+        rset = self.execute('Any P,F,S WHERE P is EUser, P firstname F, P surname S LIMIT 1')
+        self.failUnless('jquery.tablesorter.js' in self.view('table', rset))
+
+    def test_js_added_only_once(self):
+        self.vreg.register_vobject_class(SomeView)
+        rset = self.execute('EUser X')
+        source = self.view('someview', rset).source
+        self.assertEquals(source.count('spam.js'), 1)
+
+
+
+class ExplicitViewsTest(WebTest):
+    
+    def test_unrelateddivs(self):
+        rset = self.execute('Any X WHERE X is EUser, X login "admin"')
+        group = self.add_entity('EGroup', name=u'R&D')
+        req = self.request(relation='in_group_subject')
+        self.view('unrelateddivs', rset, req)
+        
+        
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/testutils.js	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,29 @@
+function datetuple(d) {
+    return [d.getFullYear(), d.getMonth()+1, d.getDate(), 
+	    d.getHours(), d.getMinutes()];
+}
+    
+function pprint(obj) {
+    print('{');
+    for(k in obj) {
+	print('  ' + k + ' = ' + obj[k]);
+    }
+    print('}');
+}
+
+function arrayrepr(array) {
+    return '[' + array.join(', ') + ']';
+}
+    
+function assertArrayEquals(array1, array2) {
+    if (array1.length != array2.length) {
+	throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', '));
+    }
+    for (var i=0; i<array1.length; i++) {
+	if (array1[i] != array2[i]) {
+	    
+	    throw new crosscheck.AssertionFailure(arrayrepr(array1) + ' and ' + arrayrepr(array2)
+						 + ' differs at index ' + i);
+	}
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_application.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,391 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for cubicweb.web.application"""
+
+from logilab.common.testlib import TestCase, unittest_main
+import base64, Cookie
+
+import sys
+from urllib import unquote
+from logilab.common.decorators import clear_cache
+
+from cubicweb.web import Redirect, AuthenticationError, ExplicitLogin, INTERNAL_FIELD_VALUE
+from cubicweb.web.views.basecontrollers import ViewController
+from cubicweb.devtools._apptest import FakeRequest
+
+class FakeMapping:
+    """emulates a mapping module"""
+    def __init__(self):
+        self.ENTITIES_MAP = {}
+        self.ATTRIBUTES_MAP = {}
+        self.RELATIONS_MAP = {}
+
+class MockCursor:
+    def __init__(self):
+        self.executed = []
+    def execute(self, rql, args=None, cachekey=None):
+        args = args or {}
+        self.executed.append(rql % args)
+
+
+class FakeController(ViewController):
+
+    def __init__(self, form=None):
+        self.req = FakeRequest()
+        self.req.form = form or {}
+        self._cursor = self.req.cursor = MockCursor()
+
+    def new_cursor(self):
+        self._cursor = self.req.cursor = MockCursor()
+
+    def set_form(self, form):
+        self.req.form = form
+
+
+class RequestBaseTC(TestCase):
+    def setUp(self):
+        self.req = FakeRequest()
+        
+
+    def test_list_arg(self):
+        """tests the list_arg() function"""
+        list_arg = self.req.list_form_param
+        self.assertEquals(list_arg('arg3', {}), [])
+        d = {'arg1' : "value1",
+             'arg2' : ('foo', INTERNAL_FIELD_VALUE,),
+             'arg3' : ['bar']}
+        self.assertEquals(list_arg('arg1', d, True), ['value1'])
+        self.assertEquals(d, {'arg2' : ('foo', INTERNAL_FIELD_VALUE), 'arg3' : ['bar'],})
+        self.assertEquals(list_arg('arg2', d, True), ['foo'])
+        self.assertEquals({'arg3' : ['bar'],}, d)
+        self.assertEquals(list_arg('arg3', d), ['bar',])
+        self.assertEquals({'arg3' : ['bar'],}, d)
+
+
+    def test_from_controller(self):
+        self.assertEquals(self.req.from_controller(), 'view')
+        req = FakeRequest(url='project?vid=list')
+        # this assertion is just to make sure that relative_path can be
+        # correctly computed as it is used in from_controller()
+        self.assertEquals(req.relative_path(False), 'project')
+        self.assertEquals(req.from_controller(), 'view')
+        # test on a valid non-view controller
+        req = FakeRequest(url='login?x=1&y=2')
+        self.assertEquals(req.relative_path(False), 'login')
+        self.assertEquals(req.from_controller(), 'login')
+
+        
+class UtilsTC(TestCase):
+    """test suite for misc application utilities"""
+
+    def setUp(self):
+        self.ctrl = FakeController()
+    
+    #def test_which_mapping(self):
+    #    """tests which mapping is used (application or core)"""
+    #    init_mapping()
+    #    from cubicweb.common import mapping
+    #    self.assertEquals(mapping.MAPPING_USED, 'core')
+    #    sys.modules['mapping'] = FakeMapping()
+    #    init_mapping()
+    #    self.assertEquals(mapping.MAPPING_USED, 'application')
+    #    del sys.modules['mapping']
+
+    def test_execute_linkto(self):
+        """tests the execute_linkto() function"""
+        self.assertEquals(self.ctrl.execute_linkto(), None)
+        self.assertEquals(self.ctrl._cursor.executed,
+                          [])
+        
+        self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:object',
+                              'eid': 8})
+        self.ctrl.execute_linkto()
+        self.assertEquals(self.ctrl._cursor.executed,
+                          ['SET Y works_for X WHERE X eid 8, Y eid %s' % i
+                           for i in (12, 13, 14)])
+
+        self.ctrl.new_cursor()
+        self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject',
+                              'eid': 8})
+        self.ctrl.execute_linkto()
+        self.assertEquals(self.ctrl._cursor.executed,
+                          ['SET X works_for Y WHERE X eid 8, Y eid %s' % i
+                           for i in (12, 13, 14)])
+        
+
+        self.ctrl.new_cursor()
+        self.ctrl.req.form = {'__linkto' : 'works_for:12_13_14:object'}
+        self.ctrl.execute_linkto(eid=8)
+        self.assertEquals(self.ctrl._cursor.executed,
+                          ['SET Y works_for X WHERE X eid 8, Y eid %s' % i
+                           for i in (12, 13, 14)])
+
+        self.ctrl.new_cursor()
+        self.ctrl.set_form({'__linkto' : 'works_for:12_13_14:subject'})
+        self.ctrl.execute_linkto(eid=8)
+        self.assertEquals(self.ctrl._cursor.executed,
+                          ['SET X works_for Y WHERE X eid 8, Y eid %s' % i
+                           for i in (12, 13, 14)])
+
+
+from cubicweb.devtools.apptest import EnvBasedTC
+
+
+class ApplicationTC(EnvBasedTC):
+
+    def publish(self, req, path='view'):
+        return self.app.publish(path, req)
+
+    def expect_redirect(self, callback, req):
+        try:
+            res = callback(req)
+            print res
+        except Redirect, ex:
+            try:
+                path, params = ex.location.split('?', 1)
+            except ValueError:
+                path = ex.location
+                params = {}
+            else:
+                cleanup = lambda p: (p[0], unquote(p[1]))
+                params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
+            path = path[len(req.base_url()):]
+            return path, params
+        else:
+            self.fail('expected a Redirect exception')
+        
+    def expect_redirect_publish(self, req, path='view'):
+        return self.expect_redirect(lambda x: self.publish(x, path), req)
+    
+    def test_cnx_user_groups_sync(self):
+        user = self.user()
+        self.assertEquals(user.groups, set(('managers',)))
+        self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+        user = self.user()
+        self.assertEquals(user.groups, set(('managers',)))
+        self.commit()
+        user = self.user()
+        self.assertEquals(user.groups, set(('managers', 'guests')))
+        # cleanup
+        self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+        self.commit()
+    
+    def test_nonregr_publish1(self):
+        req = self.request(u'EEType X WHERE X final FALSE, X meta FALSE')
+        self.app.publish('view', req)
+        
+    def test_nonregr_publish2(self):
+        req = self.request(u'Any count(N) WHERE N todo_by U, N is Note, U eid %s'
+                           % self.user().eid)
+        self.app.publish('view', req)
+        
+    def test_publish_validation_error(self):
+        req = self.request()
+        user = self.user()
+        req.form = {
+            'eid':       `user.eid`,
+            '__type:'+`user.eid`:    'EUser',
+            'login:'+`user.eid`:     '', # ERROR: no login specified
+            'edits-login:'+`user.eid`: unicode(user.login),
+             # just a sample, missing some necessary information for real life
+            '__errorurl': 'view?vid=edition...'
+            }
+        path, params = self.expect_redirect_publish(req, 'edit')
+        forminfo = req.get_session_data('view?vid=edition...')
+        eidmap = forminfo['eidmap']
+        self.assertEquals(eidmap, {})
+        values = forminfo['values']
+        self.assertEquals(values['login:'+`user.eid`], '')
+        self.assertEquals(values['edits-login:'+`user.eid`], user.login)
+        self.assertEquals(values['eid'], `user.eid`)
+        errors = forminfo['errors']
+        self.assertEquals(errors.entity, user.eid)
+        self.assertEquals(errors.errors['login'], 'required attribute')
+
+
+    def test_validation_error_dont_loose_subentity_data(self):
+        """test creation of two linked entities
+        """        
+        req = self.request()
+        form = {'eid': ['X', 'Y'],
+                '__type:X': 'EUser',
+                # missing required field
+                'login:X': u'', 'edits-login:X': '', 
+                'surname:X': u'Mr Ouaoua', 'edits-surname:X': '',
+                '__type:Y': 'EmailAddress',
+                # but email address is set
+                'address:Y': u'bougloup@logilab.fr', 'edits-address:Y': '',
+                'alias:Y': u'', 'edits-alias:Y': '',
+                'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE,
+                # necessary to get validation error handling
+                '__errorurl': 'view?vid=edition...',
+                }
+        req.form = form
+        # monkey patch edited_eid to ensure both entities are edited, not only X
+        req.edited_eids = lambda : ('Y', 'X')
+        path, params = self.expect_redirect_publish(req, 'edit')
+        forminfo = req.get_session_data('view?vid=edition...')
+        self.assertUnorderedIterableEquals(forminfo['eidmap'].keys(), ['X', 'Y'])
+        self.assertEquals(forminfo['errors'].entity, forminfo['eidmap']['X'])
+        self.assertEquals(forminfo['errors'].errors, {'login': 'required attribute',
+                                                      'upassword': 'required attribute'})
+        self.assertEquals(forminfo['values'], form)
+        
+    def _test_cleaned(self, kwargs, injected, cleaned):
+        req = self.request(**kwargs)
+        page = self.app.publish('view', req)
+        self.failIf(injected in page, (kwargs, injected))
+        self.failUnless(cleaned in page, (kwargs, cleaned))
+        
+    def test_nonregr_script_kiddies(self):
+        """test against current script injection"""
+        injected = '<i>toto</i>'
+        cleaned = 'toto'
+        for kwargs in ({'__message': injected},
+                       {'vid': injected},
+                       {'vtitle': injected},
+                       ):
+            yield self._test_cleaned, kwargs, injected, cleaned
+        
+    def test_site_wide_eproperties_sync(self):
+        # XXX work in all-in-one configuration but not in twisted for instance
+        # in which case we need a kindof repo -> http server notification
+        # protocol
+        vreg = self.app.vreg
+        # default value
+        self.assertEquals(vreg.property_value('ui.language'), 'en')
+        self.execute('INSERT EProperty X: X value "fr", X pkey "ui.language"')
+        self.assertEquals(vreg.property_value('ui.language'), 'en')
+        self.commit()
+        self.assertEquals(vreg.property_value('ui.language'), 'fr')
+        self.execute('SET X value "de" WHERE X pkey "ui.language"')
+        self.assertEquals(vreg.property_value('ui.language'), 'fr')
+        self.commit()
+        self.assertEquals(vreg.property_value('ui.language'), 'de')
+        self.execute('DELETE EProperty X WHERE X pkey "ui.language"')
+        self.assertEquals(vreg.property_value('ui.language'), 'de')
+        self.commit()
+        self.assertEquals(vreg.property_value('ui.language'), 'en')
+
+    def test_fb_login_concept(self):
+        """see data/views.py"""
+        self.set_option('auth-mode', 'cookie')
+        self.set_option('anonymous-user', 'anon')
+        self.login('anon')
+        req = self.request()
+        origcnx = req.cnx
+        req.form['__fblogin'] = u'turlututu'
+        page = self.publish(req)
+        self.failIf(req.cnx is origcnx)
+        self.assertEquals(req.user.login, 'turlututu')
+        self.failUnless('turlututu' in page, page)
+        
+    # authentication tests ####################################################
+
+    def _init_auth(self, authmode, anonuser=None):
+        self.set_option('auth-mode', authmode)
+        self.set_option('anonymous-user', anonuser)
+        req = self.request()
+        origcnx = req.cnx
+        req.cnx = None
+        sh = self.app.session_handler
+        # not properly cleaned between tests
+        self.open_sessions = sh.session_manager._sessions = {} 
+        return req, origcnx
+
+    def _test_auth_succeed(self, req, origcnx):
+        sh = self.app.session_handler
+        path, params = self.expect_redirect(lambda x: self.app.connect(x), req)
+        cnx = req.cnx
+        self.assertEquals(len(self.open_sessions), 1, self.open_sessions)
+        self.assertEquals(cnx.login, origcnx.login)
+        self.assertEquals(cnx.password, origcnx.password)
+        self.assertEquals(cnx.anonymous_connection, False) 
+        self.assertEquals(path, 'view')
+        self.assertEquals(params, {'__message': 'welcome %s !' % origcnx.login})
+    
+    def _test_auth_fail(self, req):
+        self.assertRaises(AuthenticationError, self.app.connect, req)
+        self.assertEquals(req.cnx, None)
+        self.assertEquals(len(self.open_sessions), 0) 
+        clear_cache(req, 'get_authorization')
+        
+    def test_http_auth_no_anon(self):
+        req, origcnx = self._init_auth('http')
+        self._test_auth_fail(req)
+        self.assertRaises(ExplicitLogin, self.publish, req, 'login')
+        self.assertEquals(req.cnx, None)
+        authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.password))
+        req._headers['Authorization'] = 'basic %s' % authstr
+        self._test_auth_succeed(req, origcnx)
+        self.assertRaises(AuthenticationError, self.publish, req, 'logout')
+        self.assertEquals(len(self.open_sessions), 0) 
+
+    def test_cookie_auth_no_anon(self):
+        req, origcnx = self._init_auth('cookie')
+        self._test_auth_fail(req)
+        form = self.publish(req, 'login')
+        self.failUnless('__login' in form)
+        self.failUnless('__password' in form)
+        self.assertEquals(req.cnx, None)
+        req.form['__login'] = origcnx.login
+        req.form['__password'] = origcnx.password
+        self._test_auth_succeed(req, origcnx)
+        self.assertRaises(AuthenticationError, self.publish, req, 'logout')
+        self.assertEquals(len(self.open_sessions), 0) 
+
+    def _test_auth_anon(self, req):
+        self.app.connect(req)
+        acnx = req.cnx
+        self.assertEquals(len(self.open_sessions), 1)
+        self.assertEquals(acnx.login, 'anon') 
+        self.assertEquals(acnx.password, 'anon') 
+        self.failUnless(acnx.anonymous_connection)
+        self._reset_cookie(req)
+        
+    def _reset_cookie(self, req):
+        # preparing the suite of the test
+        # set session id in cookie
+        cookie = Cookie.SimpleCookie()
+        cookie['__session'] = req.cnx.sessionid
+        req._headers['Cookie'] = cookie['__session'].OutputString()
+        clear_cache(req, 'get_authorization')
+        # reset cnx as if it was a new incoming request
+        req.cnx = None
+        
+    def _test_anon_auth_fail(self, req):
+        self.assertEquals(len(self.open_sessions), 1) 
+        self.app.connect(req)
+        self.assertEquals(req.message, 'authentication failure')
+        self.assertEquals(req.cnx.anonymous_connection, True)
+        self.assertEquals(len(self.open_sessions), 1) 
+        self._reset_cookie(req)
+        
+    def test_http_auth_anon_allowed(self):
+        req, origcnx = self._init_auth('http', 'anon')
+        self._test_auth_anon(req)
+        authstr = base64.encodestring('toto:pouet')
+        req._headers['Authorization'] = 'basic %s' % authstr
+        self._test_anon_auth_fail(req)
+        authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.password))
+        req._headers['Authorization'] = 'basic %s' % authstr
+        self._test_auth_succeed(req, origcnx)
+        self.assertRaises(AuthenticationError, self.publish, req, 'logout')
+        self.assertEquals(len(self.open_sessions), 0) 
+        
+    def test_cookie_auth_anon_allowed(self):
+        req, origcnx = self._init_auth('cookie', 'anon')
+        self._test_auth_anon(req)
+        req.form['__login'] = 'toto'
+        req.form['__password'] = 'pouet'
+        self._test_anon_auth_fail(req)
+        req.form['__login'] = origcnx.login
+        req.form['__password'] = origcnx.password
+        self._test_auth_succeed(req, origcnx)
+        self.assertRaises(AuthenticationError, self.publish, req, 'logout')
+        self.assertEquals(len(self.open_sessions), 0) 
+
+    
+
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_controller.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,33 @@
+"""cubicweb.web.controller unit tests
+
+"""
+
+from mx.DateTime import DateTimeType, DateTimeDeltaType
+
+from logilab.common.testlib import unittest_main
+
+from cubicweb.devtools import apptest
+
+class BaseControllerTC(apptest.ControllerTC):
+    def test_parse_datetime(self):
+        self.assertIsInstance(self.ctrl.parse_datetime('2006/06/24 12:18'), DateTimeType)
+        self.assertIsInstance(self.ctrl.parse_datetime('2006/06/24'), DateTimeType)
+        self.assertIsInstance(self.ctrl.parse_datetime('2006/06/24 12:18', 'Datetime'), DateTimeType)
+        self.assertIsInstance(self.ctrl.parse_datetime('2006/06/24', 'Datetime'), DateTimeType)
+        self.assertIsInstance(self.ctrl.parse_datetime('2006/06/24', 'Date'), DateTimeType)
+        self.assertIsInstance(self.ctrl.parse_datetime('12:18', 'Time'), DateTimeDeltaType)
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '2006/06/24 12:188', 'Datetime')
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '2006/06/240', 'Datetime')
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '2006/06/24 12:18', 'Date')
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '2006/24/06', 'Date')
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '2006/06/240', 'Date')
+        self.assertRaises(ValueError,
+                          self.ctrl.parse_datetime, '12:188', 'Time')
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_magicsearch.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,224 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for magic_search service"""
+
+import sys
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from rql import BadRQLQuery, RQLSyntaxError
+
+from cubicweb.devtools.apptest import EnvBasedTC, TestEnvironment
+
+
+translations = {
+    u'EUser' : u"Utilisateur",
+#    u'Workcase' : u"Affaire",
+    u'EmailAddress' : u"Adresse",
+#    u'Division' : u"Division",
+#    u'Comment' : u"Commentaire",
+    u'name' : u"nom",
+    u'alias' : u"nom",
+    u'surname' : u"nom",
+    u'firstname' : u"prénom",
+    u'state' : u"état",
+#    u'subject' : u"sujet",
+    u'address' : u"adresse",
+    u'use_email' : u"adel",
+    }
+def _translate(msgid):
+    return translations.get(msgid, msgid)
+
+
+from cubicweb.web.views.magicsearch import translate_rql_tree, QSPreProcessor, QueryTranslator
+
+class QueryTranslatorTC(EnvBasedTC):
+    """test suite for QueryTranslatorTC"""
+    
+    def setUp(self):
+        super(QueryTranslatorTC, self).setUp()
+        self.req = self.env.create_request()
+        self.vreg.config.translations = {'en': _translate}
+        proc = self.vreg.select_component('magicsearch', self.req)
+        self.proc = [p for p in proc.processors if isinstance(p, QueryTranslator)][0]
+
+    def test_basic_translations(self):
+        """tests basic translations (no ambiguities)"""
+        rql = "Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'"
+        rql, = self.proc.preprocess_query(rql, self.req)
+        self.assertEquals(rql, "Any C WHERE C is EmailAddress, P use_email C, C address 'Logilab'")
+
+    def test_ambiguous_translations(self):
+        """tests possibly ambiguous translations"""
+        rql = "Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'"
+        rql, = self.proc.preprocess_query(rql, self.req)
+        self.assertEquals(rql, "Any P WHERE P use_email C, C is EmailAddress, C alias 'Logilab'")
+        rql = "Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'"
+        rql, = self.proc.preprocess_query(rql, self.req)
+        self.assertEquals(rql, "Any P WHERE P is EUser, P use_email C, P surname 'Smith'")
+
+
+class QSPreProcessorTC(EnvBasedTC):
+    """test suite for QSPreProcessor"""
+    def setUp(self):
+        super(QSPreProcessorTC, self).setUp()
+        self.vreg.config.translations = {'en': _translate}
+        self.req = self.request()
+        proc = self.vreg.select_component('magicsearch', self.req)
+        self.proc = [p for p in proc.processors if isinstance(p, QSPreProcessor)][0]
+        self.proc.req = self.req
+
+    def test_entity_translation(self):
+        """tests QSPreProcessor._get_entity_name()"""
+        translate = self.proc._get_entity_type
+        self.assertEquals(translate(u'EmailAddress'), "EmailAddress")
+        self.assertEquals(translate(u'emailaddress'), "EmailAddress")
+        self.assertEquals(translate(u'Adresse'), "EmailAddress")
+        self.assertEquals(translate(u'adresse'), "EmailAddress")
+        self.assertRaises(BadRQLQuery, translate, 'whatever')
+
+    def test_attribute_translation(self):
+        """tests QSPreProcessor._get_attribute_name"""
+        translate = self.proc._get_attribute_name
+        eschema = self.schema.eschema('EUser')
+        self.assertEquals(translate(u'prénom', eschema), "firstname")
+        self.assertEquals(translate(u'nom', eschema), 'surname')
+        #self.assert_(translate(u'nom') in ('name', 'surname'))
+        eschema = self.schema.eschema('EmailAddress')        
+        self.assertEquals(translate(u'adresse', eschema), "address")
+        self.assertEquals(translate(u'nom', eschema), 'alias')
+        # should fail if the name is not an attribute for the given entity schema
+        self.assertRaises(BadRQLQuery, translate, 'whatever', eschema)
+        self.assertRaises(BadRQLQuery, translate, 'prénom', eschema)
+
+    def test_one_word_query(self):
+        """tests the 'one word shortcut queries'"""
+        transform = self.proc._one_word_query
+        self.assertEquals(transform('123'),
+                          ('Any X WHERE X eid %(x)s', {'x': 123}, 'x'))
+        self.assertEquals(transform('EUser'),
+                          ('EUser E',))
+        self.assertEquals(transform('Utilisateur'),
+                          ('EUser E',))
+        self.assertEquals(transform('Adresse'),
+                          ('EmailAddress E',))
+        self.assertEquals(transform('adresse'),
+                          ('EmailAddress E',))
+        self.assertRaises(BadRQLQuery, transform, 'Workcases')
+
+    def test_two_words_query(self):
+        """tests the 'two words shortcut queries'"""
+        transform = self.proc._two_words_query
+        self.assertEquals(transform('EUser', 'E'),
+                          ("EUser E",))
+        self.assertEquals(transform('EUser', 'Smith'),
+                          ('EUser E WHERE E has_text %(text)s', {'text': 'Smith'}))
+        self.assertEquals(transform('utilisateur', 'Smith'),
+                          ('EUser E WHERE E has_text %(text)s', {'text': 'Smith'}))
+        self.assertEquals(transform(u'adresse', 'Logilab'),
+                          ('EmailAddress E WHERE E has_text %(text)s', {'text': 'Logilab'}))
+        self.assertEquals(transform(u'adresse', 'Logi%'),
+                          ('EmailAddress E WHERE E alias LIKE %(text)s', {'text': 'Logi%'}))
+        self.assertRaises(BadRQLQuery, transform, "pers", "taratata")
+        #self.assertEquals(transform('EUser', '%mi'), 'EUser E WHERE P surname LIKE "%mi"')
+
+    def test_three_words_query(self):
+        """tests the 'three words shortcut queries'"""
+        transform = self.proc._three_words_query
+        self.assertEquals(transform('utilisateur', u'prénom', 'cubicweb'),
+                          ('EUser E WHERE E firstname %(text)s', {'text': 'cubicweb'}))
+        self.assertEquals(transform('utilisateur', 'nom', 'cubicweb'),
+                          ('EUser E WHERE E surname %(text)s', {'text': 'cubicweb'}))
+        self.assertEquals(transform(u'adresse', 'nom', 'cubicweb'),
+                          ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'}))
+        self.assertEquals(transform('EmailAddress', 'nom', 'cubicweb'),
+                          ('EmailAddress E WHERE E alias %(text)s', {'text': 'cubicweb'})) 
+        self.assertEquals(transform('utilisateur', u'prénom', 'cubicweb%'),
+                          ('EUser E WHERE E firstname LIKE %(text)s', {'text': 'cubicweb%'}))
+        # expanded shortcuts
+        self.assertEquals(transform('EUser', 'use_email', 'Logilab'),
+                          ('EUser E WHERE E use_email E1, E1 has_text %(text)s', {'text': 'Logilab'}))
+        self.assertEquals(transform('EUser', 'use_email', '%Logilab'),
+                          ('EUser E WHERE E use_email E1, E1 alias LIKE %(text)s', {'text': '%Logilab'}))
+        self.assertRaises(BadRQLQuery, transform, 'word1', 'word2', 'word3')
+        
+    def test_multiple_words_query(self):
+        """tests multiple_words_query()"""
+        self.assertEquals(self.proc._multiple_words_query(['a', 'b', 'c', 'd', 'e']),
+                          ('a b c d e',))
+
+    def test_quoted_queries(self):
+        """tests how quoted queries are handled"""
+        queries = [
+            (u'Adresse "My own EmailAddress"', ('EmailAddress E WHERE E has_text %(text)s', {'text': u'My own EmailAddress'})),
+            (u'Utilisateur prénom "Jean Paul"', ('EUser E WHERE E firstname %(text)s', {'text': 'Jean Paul'})),
+            (u'Utilisateur firstname "Jean Paul"', ('EUser E WHERE E firstname %(text)s', {'text': 'Jean Paul'})),
+            (u'EUser firstname "Jean Paul"', ('EUser E WHERE E firstname %(text)s', {'text': 'Jean Paul'})),
+            ]
+        transform = self.proc._quoted_words_query
+        for query, expected in queries:
+            self.assertEquals(transform(query), expected)
+        self.assertRaises(BadRQLQuery, transform, "unquoted rql")
+        self.assertRaises(BadRQLQuery, transform, 'pers "Jean Paul"')
+        self.assertRaises(BadRQLQuery, transform, 'EUser firstname other "Jean Paul"')
+    
+    def test_process_query(self):
+        """tests how queries are processed"""
+        queries = [
+            (u'Utilisateur', (u"EUser E",)),
+            (u'Utilisateur P', (u"EUser P",)),
+            (u'Utilisateur cubicweb', (u'EUser E WHERE E has_text %(text)s', {'text': u'cubicweb'})),
+            (u'EUser prénom cubicweb', (u'EUser E WHERE E firstname %(text)s', {'text': 'cubicweb'},)),
+            (u'Any X WHERE X is Something', (u"Any X WHERE X is Something",)),
+            ]
+        for query, expected in queries:
+            self.assertEquals(self.proc.preprocess_query(query, self.req), expected)
+        
+
+
+## Processor Chains tests ############################################
+        
+
+class ProcessorChainTC(EnvBasedTC):
+    """test suite for magic_search's processor chains"""
+
+    def setUp(self):
+        super(ProcessorChainTC, self).setUp()
+        self.vreg.config.translations = {'en': _translate}
+        self.req = self.request()
+        self.proc = self.vreg.select_component('magicsearch', self.req)
+
+    def test_main_preprocessor_chain(self):
+        """tests QUERY_PROCESSOR"""
+        queries = [
+            (u'foo',
+             ("Any X WHERE X has_text %(text)s", {'text': u'foo'})),
+            # XXX this sounds like a language translator test...
+            # and it fail
+            (u'Utilisateur Smith',
+             ('EUser E WHERE E has_text %(text)s', {'text': u'Smith'})),
+            (u'utilisateur nom Smith',
+             ('EUser E WHERE E surname %(text)s', {'text': u'Smith'})),
+            (u'Any P WHERE P is Utilisateur, P nom "Smith"',
+             ('Any P WHERE P is EUser, P surname "Smith"', None)),
+            ]
+        for query, expected in queries:
+            rset = self.proc.process_query(query, self.req)
+            self.assertEquals((rset.rql, rset.args), expected)
+
+    def test_iso88591_fulltext(self):
+        """we must be able to type accentuated characters in the search field"""
+        rset = self.proc.process_query(u'écrire', self.req)
+        self.assertEquals(rset.rql, "Any X WHERE X has_text %(text)s")
+        self.assertEquals(rset.args, {'text': u'écrire'})
+
+    def test_explicit_component(self):
+        self.assertRaises(RQLSyntaxError,
+                          self.proc.process_query, u'rql: EUser E WHERE E noattr "Smith",', self.req)
+        self.assertRaises(BadRQLQuery,
+                          self.proc.process_query, u'rql: EUser E WHERE E noattr "Smith"', self.req)
+        rset = self.proc.process_query(u'text: utilisateur Smith', self.req)
+        self.assertEquals(rset.rql, 'Any X WHERE X has_text %(text)s')
+        self.assertEquals(rset.args, {'text': u'utilisateur Smith'})
+                          
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_urlpublisher.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+"""Unit tests for url publishing service"""
+
+import re
+
+from logilab.common.testlib import unittest_main
+
+from cubicweb.devtools.apptest import EnvBasedTC
+from cubicweb.devtools._apptest import FakeRequest
+
+from cubicweb.rset import ResultSet
+from cubicweb.web import NotFound, Redirect
+from cubicweb.web.views.urlrewrite import SimpleReqRewriter
+
+
+class URLPublisherTC(EnvBasedTC):
+    """test suite for QSPreProcessor"""
+
+    def setup_database(self):
+        self.create_user(u'ÿsaÿe')
+        b = self.add_entity('BlogEntry', title=u'hell\'o', content=u'blabla')
+        c = self.add_entity('Tag', name=u'yo') # take care: Tag's name normalized to lower case
+        self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}, 'b')
+                          
+    def process(self, url):
+        req = self.req = self.request()
+        return self.env.app.url_resolver.process(req, url)
+        
+    def test_raw_path(self):
+        """tests raw path resolution'"""
+        self.assertEquals(self.process('view'), ('view', None))
+        self.assertEquals(self.process('edit'), ('edit', None))
+        self.assertRaises(NotFound, self.process, 'whatever')
+
+    def test_eid_path(self):
+        """tests eid path resolution"""
+        self.assertIsInstance(self.process('123')[1], ResultSet)
+        self.assertEquals(len(self.process('123')[1]), 1)
+        self.assertRaises(NotFound, self.process, '123/345')
+        self.assertRaises(NotFound, self.process, 'not_eid')
+
+    def test_rest_path(self):
+        """tests the rest path resolution"""
+        ctrl, rset = self.process('EUser')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(rset.description[0][0], 'EUser')
+        self.assertEquals(rset.printable_rql(),
+                          "Any X,AA,AB,AC,AD ORDERBY AA WHERE X is EUser, X login AA, X firstname AB, X surname AC, X modification_date AD")
+        ctrl, rset = self.process('EUser/login/admin')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.description[0][0], 'EUser')
+        self.assertEquals(rset.printable_rql(), 'Any X WHERE X is EUser, X login "admin"')
+        ctrl, rset = self.process('euser/admin')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.description[0][0], 'EUser')
+        self.assertEquals(rset.printable_rql(), 'Any X WHERE X is EUser, X login "admin"')
+        ctrl, rset = self.process('euser/eid/%s'%rset[0][0])
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.description[0][0], 'EUser')
+        self.assertEquals(rset.printable_rql(), 'Any X WHERE X is EUser, X eid 5')
+        # test non-ascii paths
+        ctrl, rset = self.process('EUser/login/%C3%BFsa%C3%BFe')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.description[0][0], 'EUser')
+        self.assertEquals(rset.printable_rql(), u'Any X WHERE X is EUser, X login "ÿsaÿe"')
+        # test quoted paths
+        ctrl, rset = self.process('BlogEntry/title/hell%27o')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset.description[0][0], 'BlogEntry')
+        self.assertEquals(rset.printable_rql(), u'Any X WHERE X is BlogEntry, X title "hell\'o"')
+        # errors
+        self.assertRaises(NotFound, self.process, 'EUser/eid/30000')
+        self.assertRaises(NotFound, self.process, 'Workcases')
+        self.assertRaises(NotFound, self.process, 'EUser/inexistant_attribute/joe')
+    
+    def test_action_path(self):
+        """tests the action path resolution"""
+        self.assertRaises(Redirect, self.process, '1/edit')
+        self.assertRaises(Redirect, self.process, 'Tag/name/yo/edit')
+        self.assertRaises(Redirect, self.process, 'Tag/yo/edit')
+        self.assertRaises(NotFound, self.process, 'view/edit')
+        self.assertRaises(NotFound, self.process, '1/non_action')
+        self.assertRaises(NotFound, self.process, 'EUser/login/admin/non_action')
+
+
+    def test_regexp_path(self):
+        """tests the regexp path resolution"""
+        ctrl, rset = self.process('add/Task')
+        self.assertEquals(ctrl, 'view')
+        self.assertEquals(rset, None) 
+        self.assertEquals(self.req.form, {'etype' : "Task", 'vid' : "creation"})
+        self.assertRaises(NotFound, self.process, 'add/foo/bar')
+
+
+    def test_nonascii_path(self):
+        oldrules = SimpleReqRewriter.rules
+        SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo')),]
+        try:
+            path = str(FakeRequest().url_quote(u'été'))
+            ctrl, rset = self.process(path)
+            self.assertEquals(rset, None) 
+            self.assertEquals(self.req.form, {'vid' : "foo"})
+        finally:
+            SimpleReqRewriter.rules = oldrules
+            
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_urlrewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,101 @@
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools._apptest import FakeRequest
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.web.views.urlrewrite import SimpleReqRewriter, SchemaBasedRewriter, rgx, rgx_action
+
+
+class UrlRewriteTC(TestCase):
+
+    def test_auto_extend_rules(self):
+        class Rewriter(SimpleReqRewriter):
+            rules = [
+                ('foo', dict(rql='Foo F')),
+                ('/index', dict(vid='index2')),
+                ]
+        rules = []
+        for pattern, values in Rewriter.rules:
+            if hasattr(pattern, 'pattern'):
+                pattern = pattern.pattern
+            rules.append((pattern, values))
+        self.assertListEquals(rules, [
+            ('foo' , dict(rql='Foo F')),
+            ('/index' , dict(vid='index2')),
+            ('/schema', {'vid': 'schema'}),
+            ('/myprefs', dict(vid='epropertiesform')),
+            ('/siteconfig', dict(vid='systemepropertiesform')),
+            ('/manage', dict(vid='manage')),
+            ('/notfound', {'vid': '404'}),
+            ('/error', {'vid': 'error'}),
+            ('/schema/([^/]+?)/?$', {'rql': r'Any X WHERE X is EEType, X name "\1"', 'vid': 'eschema'}),
+            ('/add/([^/]+?)/?$' , dict(vid='creation', etype=r'\1')),
+            ('/doc/images/(.+?)/?$', dict(fid='\\1', vid='wdocimages')),
+            ('/doc/?$', dict(fid='main', vid='wdoc')),
+            ('/doc/(.+?)/?$', dict(fid='\\1', vid='wdoc')),
+            ('/changelog/?$', dict(vid='changelog')),
+            # now in SchemaBasedRewriter
+            #('/search/(.+)$', dict(rql=r'Any X WHERE X has_text "\1"')), 
+            ])
+
+
+    def test_no_extend_rules(self):
+        class Rewriter(SimpleReqRewriter):
+            ignore_baseclass_rules = True
+            rules = [
+                ('foo', dict(rql='Foo F')),
+                ('/index', dict(vid='index2')),
+                ]
+        self.assertListEquals(Rewriter.rules, [
+            ('foo' , dict(rql='Foo F')),
+            ('/index' , dict(vid='index2')),
+            ])
+
+    def test_basic_transformation(self):
+        """test simple string-based rewrite"""
+        rewriter = SimpleReqRewriter()
+        req = FakeRequest()
+        self.assertRaises(KeyError, rewriter.rewrite, req, '/view?vid=whatever')
+        self.assertEquals(req.form, {})
+        rewriter.rewrite(req, '/index')
+        self.assertEquals(req.form, {'vid' : "index"})
+
+    def test_regexp_transformation(self):
+        """test regexp-based rewrite"""
+        rewriter = SimpleReqRewriter()
+        req = FakeRequest()
+        rewriter.rewrite(req, '/add/Task')
+        self.assertEquals(req.form, {'vid' : "creation", 'etype' : "Task"})
+        req = FakeRequest()
+        rewriter.rewrite(req, '/add/Task/')
+        self.assertEquals(req.form, {'vid' : "creation", 'etype' : "Task"})
+
+
+
+
+class RgxActionRewriteTC(EnvBasedTC):
+
+    def setup_database(self):
+        self.p1 = self.create_user(u'user1')
+        self.p1.set_attributes(firstname=u'joe', surname=u'Dalton')
+        self.p2 = self.create_user(u'user2')
+        self.p2.set_attributes(firstname=u'jack', surname=u'Dalton')
+
+    def test_rgx_action_with_transforms(self):
+        class TestSchemaBasedRewriter(SchemaBasedRewriter):
+            rules = [
+                (rgx('/(?P<sn>\w+)/(?P<fn>\w+)'), rgx_action(r'Any X WHERE X surname %(sn)s, X firstname %(fn)s',
+                                                                             argsgroups=('sn', 'fn'),
+                                                                             transforms={'sn' : unicode.capitalize,
+                                                                                         'fn' : unicode.lower,})),
+                ]
+        rewriter = TestSchemaBasedRewriter()
+        req = self.request()
+        pmid, rset = rewriter.rewrite(req, u'/DaLToN/JoE')
+        self.assertEquals(len(rset), 1)
+        self.assertEquals(rset[0][0], self.p1.eid)
+        
+    
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_actions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,25 @@
+from logilab.common.testlib import unittest_main
+
+from cubicweb.devtools.apptest import EnvBasedTC
+
+class ActionsTC(EnvBasedTC):
+    def test_view_action(self):
+        req = self.request(__message='bla bla bla', vid='rss', rql='EUser X')
+        rset = self.execute('EUser X')
+        vaction = [action for action in self.vreg.possible_vobjects('actions', req, rset)
+                   if action.id == 'view'][0]
+        self.assertEquals(vaction.url(), 'http://testing.fr/cubicweb/view?rql=EUser%20X')
+
+    def test_sendmail_action(self):
+        req = self.request()
+        rset = self.execute('Any X WHERE X login "admin"', req=req)
+        self.failUnless([action for action in self.vreg.possible_vobjects('actions', req, rset)
+                         if action.id == 'sendemail'])
+        self.login('anon')
+        req = self.request()
+        rset = self.execute('Any X WHERE X login "anon"', req=req)
+        self.failIf([action for action in self.vreg.possible_vobjects('actions', req, rset)
+                     if action.id == 'sendemail'])
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_apacherewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,39 @@
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.web.views.apacherewrite import *
+
+class ApacheURLRewriteTC(TestCase):
+
+    def test(self):
+        class MyAppRules(ApacheURLRewrite): 
+            rules = [
+                RewriteCond('logilab\.fr', match='host',
+                            rules=[('/(.*)', r'http://www.logilab.fr/\1')],
+                            action='redirect'),
+                RewriteCond('(www)\.logilab\.fr', match='host', action='stop'),
+                RewriteCond('/(data|json)/', match='path', action='stop'),
+                RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', 
+                            rules=[('/(.*)', r'/m_%(cat)s/\1')]),
+                ]
+        urlrewriter = MyAppRules()
+        try:
+            urlrewriter.rewrite('logilab.fr', '/whatever')
+            self.fail('redirect exception expected')
+        except Redirect, ex:
+            self.assertEquals(ex.location, 'http://www.logilab.fr/whatever')
+        self.assertEquals(urlrewriter.rewrite('www.logilab.fr', '/whatever'),
+                          '/whatever')
+        self.assertEquals(urlrewriter.rewrite('www.logilab.fr', '/json/bla'),
+                          '/json/bla')
+        self.assertEquals(urlrewriter.rewrite('abcd.logilab.fr', '/json/bla'),
+                          '/json/bla')
+        self.assertEquals(urlrewriter.rewrite('abcd.logilab.fr', '/data/bla'),
+                          '/data/bla')
+        self.assertEquals(urlrewriter.rewrite('abcd.logilab.fr', '/whatever'),
+                          '/m_abcd/whatever')
+        self.assertEquals(urlrewriter.rewrite('abcd.fr', '/whatever'),
+                          '/whatever')
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_basecontrollers.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,646 @@
+"""cubicweb.web.views.basecontrollers unit tests"""
+import simplejson
+
+from logilab.common.testlib import unittest_main
+
+from cubicweb import Binary, Unauthorized
+from cubicweb.devtools._apptest import TestEnvironment
+from cubicweb.devtools.apptest import EnvBasedTC, ControllerTC
+
+from cubicweb.common import ValidationError
+from cubicweb.common.uilib import rql_for_eid
+
+from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect, RequestError
+from cubicweb.web.views.basecontrollers import xmlize
+
+from cubicweb.entities.authobjs import EUser
+
+
+class EditControllerTC(ControllerTC):
+    def setUp(self):
+        ControllerTC.setUp(self)
+        self.failUnless('users' in self.schema.eschema('EGroup').get_groups('read'))
+        
+    def tearDown(self):
+        ControllerTC.tearDown(self)
+        self.failUnless('users' in self.schema.eschema('EGroup').get_groups('read'))
+        
+    def test_noparam_edit(self):
+        """check behaviour of this controller without any form parameter
+        """
+        
+        self.req.form = {}
+        self.assertRaises(ValidationError, self.publish, self.req)
+        
+    def test_validation_unique(self):
+        """test creation of two linked entities
+        """        
+        user = self.user()
+        self.req.form = {'eid': 'X', '__type:X': 'EUser',
+                         'login:X': u'admin', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+
+    def test_user_editing_itself(self):
+        """checking that a manager user can edit itself
+        """
+        user = self.user()
+        basegroups = [str(eid) for eid, in self.execute('EGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})]
+        groupeids = [eid for eid, in self.execute('EGroup G WHERE G name in ("managers", "users")')]
+        groups = [str(eid) for eid in groupeids]
+        stateeid = [eid for eid, in self.execute('State S WHERE S name "activated"')][0]
+        self.req.form = {
+            'eid':       `user.eid`,
+            '__type:'+`user.eid`:    'EUser',
+            'login:'+`user.eid`:     unicode(user.login),
+            'firstname:'+`user.eid`: u'Th\xe9nault',
+            'surname:'+`user.eid`:   u'Sylvain',
+            'in_group:'+`user.eid`:  groups,
+            'in_state:'+`user.eid`:  `stateeid`,
+            #
+            'edits-login:'+`user.eid`:     unicode(user.login),
+            'edits-firstname:'+`user.eid`: u'',
+            'edits-surname:'+`user.eid`:   u'',
+            'edits-in_group:'+`user.eid`:  basegroups,
+            'edits-in_state:'+`user.eid`:  `stateeid`,
+            }
+        path, params = self.expect_redirect_publish()
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.firstname, u'Th\xe9nault')
+        self.assertEquals(e.surname, u'Sylvain')
+        self.assertEquals(e.login, user.login)
+        self.assertEquals([g.eid for g in e.in_group], groupeids)
+        self.assertEquals(e.in_state[0].eid, stateeid)
+
+    def test_user_can_change_its_password(self):
+        user = self.create_user('user')
+        cnx = self.login('user')
+        req = self.request()
+        #self.assertEquals(self.ctrl.schema['EUser']._groups['read'],
+        #                  ('managers', 'users'))
+        req.form = {
+            'eid': `user.eid`, '__type:'+`user.eid`: 'EUser',
+            '__maineid' : str(user.eid),
+            'upassword:'+`user.eid`: 'tournicoton',
+            'upassword-confirm:'+`user.eid`: 'tournicoton',
+            'edits-upassword:'+`user.eid`:  '',
+            }
+        path, params = self.expect_redirect_publish(req)
+        cnx.commit() # commit to check we don't get late validation error for instance
+        self.assertEquals(path, 'euser/user')
+        self.failIf('vid' in params)
+
+    def testr_user_editing_itself_no_relation(self):
+        """checking we can edit an entity without specifying some required
+        relations (meaning no changes)
+        """
+        user = self.user()
+        groupeids = [eid for eid, in self.execute('EGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})]
+        self.req.form = {
+            'eid':       `user.eid`,
+            '__type:'+`user.eid`:    'EUser',
+            'login:'+`user.eid`:     unicode(user.login),
+            'firstname:'+`user.eid`: u'Th\xe9nault',
+            'surname:'+`user.eid`:   u'Sylvain',
+            #
+            'edits-login:'+`user.eid`:     unicode(user.login),
+            'edits-firstname:'+`user.eid`: u'',
+            'edits-surname:'+`user.eid`:   u'',
+            }
+        path, params = self.expect_redirect_publish()
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.login, user.login)
+        self.assertEquals(e.firstname, u'Th\xe9nault')
+        self.assertEquals(e.surname, u'Sylvain')
+        self.assertEquals([g.eid for g in e.in_group], groupeids)
+        stateeids = [eid for eid, in self.execute('State S WHERE S name "activated"')]
+        self.assertEquals([s.eid for s in e.in_state], stateeids)
+        
+        
+    def test_create_multiple_linked(self):
+        gueid = self.execute('EGroup G WHERE G name "users"')[0][0]
+        self.req.form = {'eid': ['X', 'Y'],
+                         
+                         '__type:X': 'EUser',
+                         '__maineid' : 'X',
+                         'login:X': u'adim', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         'surname:X': u'Di Mascio', 'edits-surname:X': '',
+
+                         'in_group:X': `gueid`, 'edits-in_group:X': INTERNAL_FIELD_VALUE, 
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'dima@logilab.fr', 'edits-address:Y': '',
+                         'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, 
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        self.assertEquals(path, 'euser/adim')
+        e = self.execute('Any P WHERE P surname "Di Mascio"').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Mascio')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'dima@logilab.fr')
+        
+    def test_edit_multiple_linked(self):
+        peid = self.create_user('adim').eid
+        self.req.form = {'eid': [`peid`, 'Y'],
+                         '__type:%s'%peid: 'EUser',
+                         'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: '',
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'dima@logilab.fr', 'edits-address:Y': '',
+                         'use_email:%s'%peid: 'Y', 'edits-use_email:%s'%peid: INTERNAL_FIELD_VALUE,
+                         
+                         '__redirectrql': 'Any X WHERE X eid %s'%peid,
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        eid = params['rql'].split()[-1]
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Masci')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'dima@logilab.fr')
+        
+        emaileid = email.eid
+        self.req.form = {'eid': [`peid`, `emaileid`],
+                         '__type:%s'%peid: 'EUser',
+                         'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: 'Di Masci',
+                         '__type:%s'%emaileid: 'EmailAddress',
+                         'address:%s'%emaileid: u'adim@logilab.fr', 'edits-address:%s'%emaileid: 'dima@logilab.fr',
+                         'use_email:%s'%peid: `emaileid`, 'edits-use_email:%s'%peid: `emaileid`, 
+                         '__redirectrql': 'Any X WHERE X eid %s'%peid,
+                         }
+        path, params = self.expect_redirect_publish()
+        # should be redirected on the created person
+        eid = params['rql'].split()[-1]
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.surname, 'Di Masci')
+        email = e.use_email[0]
+        self.assertEquals(email.address, 'adim@logilab.fr')
+
+        
+    def test_password_confirm(self):
+        """test creation of two linked entities
+        """        
+        user = self.user()
+        self.req.form = {'__cloned_eid:X': user.eid,
+                         'eid': 'X', '__type:X': 'EUser',
+                         'login:X': u'toto', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+        self.req.form = {'__cloned_eid:X': user.eid,
+                         'eid': 'X', '__type:X': 'EUser',
+                         'login:X': u'toto', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'tutu', 'edits-upassword:X': u'', 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+
+    def test_interval_bound_constraint_success(self):
+        feid = self.execute('INSERT File X: X name "toto.txt", X data %(data)s',
+                            {'data': Binary('yo')})[0][0]
+        self.req.form = {'eid': ['X'],
+                         '__type:X': 'Salesterm',
+                         'amount:X': u'-10', 'edits-amount:X': '',
+                         'described_by_test:X': str(feid), 'edits-described_by_test:X': INTERNAL_FIELD_VALUE,
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+        self.req.form = {'eid': ['X'],
+                         '__type:X': 'Salesterm',
+                         'amount:X': u'110', 'edits-amount:X': '',
+                         'described_by_test:X': str(feid), 'edits-described_by_test:X': INTERNAL_FIELD_VALUE,
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+        self.req.form = {'eid': ['X'],
+                         '__type:X': 'Salesterm',
+                         'amount:X': u'10', 'edits-amount:X': '',
+                         'described_by_test:X': str(feid), 'edits-described_by_test:X': INTERNAL_FIELD_VALUE,
+                         }
+        self.expect_redirect_publish()
+        # should be redirected on the created 
+        #eid = params['rql'].split()[-1]
+        e = self.execute('Salesterm X').get_entity(0, 0)
+        self.assertEquals(e.amount, 10)
+
+    def test_req_pending_insert(self):
+        """make sure req's pending insertions are taken into account"""
+        tmpgroup = self.add_entity('EGroup', name=u"test")
+        user = self.user()
+        self.req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)]))
+        path, params = self.expect_redirect_publish()
+        usergroups = [gname for gname, in
+                      self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        self.assertUnorderedIterableEquals(usergroups, ['managers', 'test'])
+        self.assertEquals(self.req.get_pending_inserts(), [])
+
+
+    def test_req_pending_delete(self):
+        """make sure req's pending deletions are taken into account"""
+        user = self.user()
+        groupeid = self.execute('INSERT EGroup G: G name "test", U in_group G WHERE U eid %(x)s',
+                                {'x': user.eid})[0][0]
+        usergroups = [gname for gname, in
+                      self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        # just make sure everything was set correctly
+        self.assertUnorderedIterableEquals(usergroups, ['managers', 'test'])
+        # now try to delete the relation
+        self.req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)]))
+        path, params = self.expect_redirect_publish()
+        usergroups = [gname for gname, in
+                      self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
+        self.assertUnorderedIterableEquals(usergroups, ['managers'])
+        self.assertEquals(self.req.get_pending_deletes(), [])
+
+    def test_custom_attribute_handler(self):
+        def custom_login_edit(self, formparams, value, relations):
+            formparams['login'] = value.upper()
+            relations.append('X login %(login)s')
+        EUser.custom_login_edit = custom_login_edit
+        try:
+            user = self.user()
+            eid = repr(user.eid)
+            self.req.form = {
+                'eid': eid,
+                '__type:'+eid:  'EUser',
+                'login:'+eid: u'foo',
+                'edits-login:'+eid:  unicode(user.login),
+                }
+            path, params = self.expect_redirect_publish()
+            rset = self.execute('Any L WHERE X eid %(x)s, X login L', {'x': user.eid}, 'x')
+            self.assertEquals(rset[0][0], 'FOO')
+        finally:
+            del EUser.custom_login_edit
+        
+    def test_redirect_apply_button(self):
+        redirectrql = rql_for_eid(4012) # whatever
+        self.req.form = {
+                         'eid': 'A', '__type:A': 'BlogEntry',
+                         '__maineid' : 'A',
+                         'content:A': u'"13:03:43"', 'edits-content:A': '',
+                         'title:A': u'huuu', 'edits-title:A': '',
+                         '__redirectrql': redirectrql,
+                         '__redirectvid': 'primary',
+                         '__redirectparams': 'toto=tutu&tata=titi',
+                         '__form_id': 'edition',
+                         '__action_apply': '',
+                         }
+        path, params = self.expect_redirect_publish()
+        self.failUnless(path.startswith('blogentry/'))
+        eid = path.split('/')[1]
+        self.assertEquals(params['vid'], 'edition')
+        self.assertNotEquals(int(eid), 4012)
+        self.assertEquals(params['__redirectrql'], redirectrql)
+        self.assertEquals(params['__redirectvid'], 'primary')
+        self.assertEquals(params['__redirectparams'], 'toto=tutu&tata=titi')
+
+    def test_redirect_ok_button(self):
+        redirectrql = rql_for_eid(4012) # whatever
+        self.req.form = {
+                         'eid': 'A', '__type:A': 'BlogEntry',
+                         '__maineid' : 'A',
+                         'content:A': u'"13:03:43"', 'edits-content:A': '',
+                         'title:A': u'huuu', 'edits-title:A': '',
+                         '__redirectrql': redirectrql,
+                         '__redirectvid': 'primary',
+                         '__redirectparams': 'toto=tutu&tata=titi',
+                         '__form_id': 'edition',
+                         }
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'view')
+        self.assertEquals(params['rql'], redirectrql)
+        self.assertEquals(params['vid'], 'primary')
+        self.assertEquals(params['tata'], 'titi')
+        self.assertEquals(params['toto'], 'tutu')
+
+    def test_redirect_delete_button(self):
+        eid = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid
+        self.req.form = {'eid': str(eid), '__type:%s'%eid: 'BlogEntry',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'blogentry')
+        self.assertEquals(params, {u'__message': u'entity deleted'})
+        eid = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid
+        self.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
+                     {'x': self.session().user.eid, 'e': eid}, 'x')
+        self.commit()
+        self.req.form = {'eid': str(eid), '__type:%s'%eid: 'EmailAddress',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'euser/admin')
+        self.assertEquals(params, {u'__message': u'entity deleted'})
+        eid1 = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid
+        eid2 = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid
+        self.req.form = {'eid': [str(eid1), str(eid2)],
+                         '__type:%s'%eid1: 'BlogEntry',
+                         '__type:%s'%eid2: 'EmailAddress',
+                         '__action_delete': ''}
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'view')
+        self.assertEquals(params, {u'__message': u'entities deleted'})
+
+    def test_nonregr_egroup_etype_editing(self):
+        """non-regression test checking that a manager user can edit a EEType entity (EGroup)
+        """
+        groupeids = [eid for eid, in self.execute('EGroup G WHERE G name "managers"')]
+        groups = [str(eid) for eid in groupeids]
+        eeetypeeid = self.execute('EEType X WHERE X name "EGroup"')[0][0]
+        basegroups = [str(eid) for eid, in self.execute('EGroup G WHERE X read_permission G, X eid %(x)s', {'x': eeetypeeid})]
+        self.req.form = {
+                'eid':      `eeetypeeid`,
+                '__type:'+`eeetypeeid`:   'EEType',
+                'name:'+`eeetypeeid`:     u'EGroup',
+                'final:'+`eeetypeeid`:    False,
+                'meta:'+`eeetypeeid`:     True,
+                'description:'+`eeetypeeid`:     u'users group', 
+                'read_permission:'+`eeetypeeid`:  groups,
+                #
+                'edits-name:'+`eeetypeeid`:     u'EGroup',
+                'edits-final:'+`eeetypeeid`:    False,
+                'edits-meta:'+`eeetypeeid`:     True,
+                'edits-description:'+`eeetypeeid`:     u'users group', 
+                'edits-read_permission:'+`eeetypeeid`:  basegroups,
+                }
+        try:
+            path, params = self.expect_redirect_publish()
+            e = self.execute('Any X WHERE X eid %(x)s', {'x': eeetypeeid}, 'x').get_entity(0, 0)
+            self.assertEquals(e.name, 'EGroup')
+            self.assertEquals([g.eid for g in e.read_permission], groupeids)
+        finally:
+            # restore
+            self.execute('SET X read_permission Y WHERE X name "EGroup", Y eid IN (%s), NOT X read_permission Y' % (','.join(basegroups)))
+            self.commit()
+            
+    def test_nonregr_eetype_etype_editing(self):
+        """non-regression test checking that a manager user can edit a EEType entity (EEType)
+        """
+        groupeids = sorted(eid for eid, in self.execute('EGroup G WHERE G name in ("managers", "users")'))
+        groups = [str(eid) for eid in groupeids]
+        eeetypeeid = self.execute('EEType X WHERE X name "EEType"')[0][0]
+        basegroups = [str(eid) for eid, in self.execute('EGroup G WHERE X read_permission G, X eid %(x)s', {'x': eeetypeeid})]
+        self.req.form = {
+                'eid':      `eeetypeeid`,
+                '__type:'+`eeetypeeid`:  'EEType',
+                'name:'+`eeetypeeid`:     u'EEType',
+                'final:'+`eeetypeeid`:    False,
+                'meta:'+`eeetypeeid`:     True,
+                'description:'+`eeetypeeid`:     u'users group', 
+                'read_permission:'+`eeetypeeid`:  groups,
+
+                'edits-name:'+`eeetypeeid`:     u'EEType',
+                'edits-final:'+`eeetypeeid`:    False,
+                'edits-meta:'+`eeetypeeid`:     True,
+                'edits-description:'+`eeetypeeid`:     u'users group', 
+                'edits-read_permission:'+`eeetypeeid`:  basegroups,
+                }
+        try:
+            path, params = self.expect_redirect_publish()
+            e = self.execute('Any X WHERE X eid %(x)s', {'x': eeetypeeid}, 'x').get_entity(0, 0)
+            self.assertEquals(e.name, 'EEType')
+            self.assertEquals(sorted(g.eid for g in e.read_permission), groupeids)
+        finally:
+            # restore
+            self.execute('SET X read_permission Y WHERE X name "EEType", Y eid IN (%s), NOT X read_permission Y' % (','.join(basegroups)))
+            self.commit()
+        
+    def test_nonregr_strange_text_input(self):
+        """non-regression test checking text input containing "13:03:43"
+
+        this seems to be postgres (tsearch?) specific
+        """        
+        self.req.form = {
+                         'eid': 'A', '__type:A': 'BlogEntry',
+                         '__maineid' : 'A',
+                         'title:A': u'"13:03:40"', 'edits-title:A': '',
+                         'content:A': u'"13:03:43"', 'edits-content:A': ''}
+        path, params = self.expect_redirect_publish()
+        self.failUnless(path.startswith('blogentry/'))
+        eid = path.split('/')[1]
+        e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}, 'x').get_entity(0, 0)
+        self.assertEquals(e.title, '"13:03:40"')
+        self.assertEquals(e.content, '"13:03:43"')
+
+
+    def test_nonregr_multiple_empty_email_addr(self):
+        gueid = self.execute('EGroup G WHERE G name "users"')[0][0]
+        self.req.form = {'eid': ['X', 'Y'],
+                         
+                         '__type:X': 'EUser',
+                         'login:X': u'adim', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         'in_group:X': `gueid`, 'edits-in_group:X': INTERNAL_FIELD_VALUE, 
+                         
+                         '__type:Y': 'EmailAddress',
+                         'address:Y': u'', 'edits-address:Y': '',
+                         'alias:Y': u'', 'edits-alias:Y': '',
+                         'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, 
+                         }
+        self.assertRaises(ValidationError, self.publish, self.req)
+
+    def test_nonregr_copy(self):
+        user = self.user()
+        self.req.form = {'__cloned_eid:X': user.eid,
+                         'eid': 'X', '__type:X': 'EUser',
+                         '__maineid' : 'X',
+                         'login:X': u'toto', 'edits-login:X': u'', 
+                         'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', 
+                         }
+        path, params = self.expect_redirect_publish()
+        self.assertEquals(path, 'euser/toto')
+        e = self.execute('Any X WHERE X is EUser, X login "toto"').get_entity(0, 0)
+        self.assertEquals(e.login, 'toto')
+        self.assertEquals(e.in_group[0].name, 'managers')
+
+
+    def test_nonregr_rollback_on_validation_error(self):
+        p = self.create_user("doe")
+        # do not try to skip 'primary_email' for this test
+        old_skips = p.__class__.skip_copy_for
+        p.__class__.skip_copy_for = ()
+        try:
+            e = self.add_entity('EmailAddress', address=u'doe@doe.com')
+            self.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s',
+                         {'p' : p.eid, 'e' : e.eid})
+            self.req.form = {'__cloned_eid:X': p.eid,
+                             'eid': 'X', '__type:X': 'EUser',
+                             'login': u'dodo', 'edits-login': u'dodo', 
+                             'surname:X': u'Boom', 'edits-surname:X': u'',
+                             '__errorurl' : "whatever but required",
+                             }
+            # try to emulate what really happens in the web application
+            # 1/ validate form => EditController.publish raises a ValidationError
+            #    which fires a Redirect
+            # 2/ When re-publishing the copy form, the publisher implicitly commits
+            try:
+                self.env.app.publish('edit', self.req)
+            except Redirect:
+                self.req.form['rql'] = 'Any X WHERE X eid %s' % p.eid
+                self.req.form['vid'] = 'copy'
+                self.env.app.publish('view', self.req)
+            rset = self.execute('EUser P WHERE P surname "Boom"')
+            self.assertEquals(len(rset), 0)
+        finally:
+            p.__class__.skip_copy_for = old_skips
+
+
+class EmbedControllerTC(EnvBasedTC):
+
+    def test_nonregr_embed_publish(self):
+        # This test looks a bit stupid but at least it will probably
+        # fail if the controller API changes and if EmbedController is not
+        # updated (which is what happened before this test)
+        req = self.request()
+        req.form['url'] = 'http://intranet.logilab.fr/'
+        controller = self.env.app.select_controller('embed', req)
+        result = controller.publish(rset=None)
+
+
+class ReportBugControllerTC(EnvBasedTC):
+
+    def test_usable_by_guets(self):
+        req = self.request()
+        self.env.app.select_controller('reportbug', req)
+
+
+class SendMailControllerTC(EnvBasedTC):
+
+    def test_not_usable_by_guets(self):
+        self.login('anon')
+        req = self.request()
+        self.assertRaises(Unauthorized, self.env.app.select_controller, 'sendmail', req)
+   
+
+
+class JSONControllerTC(EnvBasedTC):
+
+    def ctrl(self, req=None):
+        req = req or self.request(url='http://whatever.fr/')
+        return self.env.app.select_controller('json', req)
+
+    def setup_database(self):
+        self.pytag = self.add_entity('Tag', name=u'python')
+        self.cubicwebtag = self.add_entity('Tag', name=u'cubicweb')
+        self.john = self.create_user(u'John')
+
+
+    ## tests ##################################################################
+    def test_simple_exec(self):
+        ctrl = self.ctrl(self.request(rql='EUser P WHERE P login "John"',
+                                      pageid='123'))
+        self.assertTextEquals(ctrl.publish(),
+                              xmlize(self.john.view('primary')))
+
+    def test_json_exec(self):
+        rql = 'Any T,N WHERE T is Tag, T name N'
+        ctrl = self.ctrl(self.request(mode='json', rql=rql, pageid='123'))
+        self.assertEquals(ctrl.publish(),
+                          simplejson.dumps(self.execute(rql).rows))
+
+    def test_remote_add_existing_tag(self):
+        self.remote_call('tag_entity', self.john.eid, ['python'])
+        self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+                             ['python', 'cubicweb'])
+        self.assertEquals(self.execute('Any N WHERE T tags P, P is EUser, T name N').rows,
+                          [['python']])
+    
+    def test_remote_add_new_tag(self):
+        self.remote_call('tag_entity', self.john.eid, ['javascript'])
+        self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+                             ['python', 'cubicweb', 'javascript'])
+        self.assertEquals(self.execute('Any N WHERE T tags P, P is EUser, T name N').rows,
+                          [['javascript']])
+
+    def test_edit_field(self):
+        nbusers = len(self.execute('EUser P'))
+        eid = self.john.eid
+        self.remote_call('edit_field', 'apply',
+                         ('eid', 'firstname:%s' % eid, '__maineid', '__type:%s'% eid, 'edits-firstname:%s' % eid ),
+                         (str(eid), u'Remi', str(eid), 'EUser', self.john.firstname),
+                         'firstname',
+                         eid)
+        self.commit()
+        rset = self.execute('EUser P')
+        # make sure we did not insert a new euser here
+        self.assertEquals(len(rset), nbusers)
+        john = self.execute('Any X WHERE X eid %(x)s', {'x': self.john.eid}, 'x').get_entity(0, 0)
+        self.assertEquals(john.eid, self.john.eid)
+        self.assertEquals(john.firstname, 'Remi')
+
+
+    def test_pending_insertion(self):
+        res, req = self.remote_call('add_pending_insert', ['12', 'tags', '13'])
+        deletes = req.get_pending_deletes()
+        self.assertEquals(deletes, [])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, ['12:tags:13'])
+        res, req = self.remote_call('add_pending_insert', ['12', 'tags', '14'])
+        deletes = req.get_pending_deletes()
+        self.assertEquals(deletes, [])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, ['12:tags:13', '12:tags:14'])
+        inserts = req.get_pending_inserts(12)
+        self.assertEquals(inserts, ['12:tags:13', '12:tags:14'])
+        inserts = req.get_pending_inserts(13)
+        self.assertEquals(inserts, ['12:tags:13'])
+        inserts = req.get_pending_inserts(14)
+        self.assertEquals(inserts, ['12:tags:14'])
+        req.remove_pending_operations()
+
+    def test_pending_deletion(self):
+        res, req = self.remote_call('add_pending_delete', ['12', 'tags', '13'])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, [])
+        deletes = req.get_pending_deletes()
+        self.assertEquals(deletes, ['12:tags:13'])
+        res, req = self.remote_call('add_pending_delete', ['12', 'tags', '14'])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, [])
+        deletes = req.get_pending_deletes()
+        self.assertEquals(deletes, ['12:tags:13', '12:tags:14'])
+        deletes = req.get_pending_deletes(12)
+        self.assertEquals(deletes, ['12:tags:13', '12:tags:14'])
+        deletes = req.get_pending_deletes(13)
+        self.assertEquals(deletes, ['12:tags:13'])
+        deletes = req.get_pending_deletes(14)
+        self.assertEquals(deletes, ['12:tags:14'])
+        req.remove_pending_operations()
+
+    def test_remove_pending_operations(self):
+        self.remote_call('add_pending_delete', ['12', 'tags', '13'])
+        _, req = self.remote_call('add_pending_insert', ['12', 'tags', '14'])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, ['12:tags:14'])
+        deletes = req.get_pending_deletes()
+        self.assertEquals(deletes, ['12:tags:13'])
+        req.remove_pending_operations()
+        self.assertEquals(req.get_pending_deletes(), [])
+        self.assertEquals(req.get_pending_inserts(), [])
+        
+
+    def test_add_inserts(self):
+        res, req = self.remote_call('add_pending_inserts',
+                                    [('12', 'tags', '13'), ('12', 'tags', '14')])
+        inserts = req.get_pending_inserts()
+        self.assertEquals(inserts, ['12:tags:13', '12:tags:14'])
+        req.remove_pending_operations()
+        
+
+    # silly tests
+    def test_external_resource(self):
+        self.assertEquals(self.remote_call('external_resource', 'RSS_LOGO')[0],
+                          simplejson.dumps(self.request().external_resource('RSS_LOGO')))
+    def test_i18n(self):
+        self.assertEquals(self.remote_call('i18n', ['bimboom'])[0],
+                          simplejson.dumps(['bimboom']))
+
+    def test_format_date(self):
+        self.assertEquals(self.remote_call('format_date', '"2007-01-01 12:00:00"')[0],
+                          simplejson.dumps('2007/01/01'))
+
+        
+
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_baseforms.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,244 @@
+"""cubicweb.web.views.baseforms unit tests"""
+
+from StringIO import StringIO
+import re
+
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.entities import AnyEntity
+
+from mx.DateTime import DateTime
+from cubicweb.web import widgets
+orig_today = widgets.today
+orig_now = widgets.now
+
+def setup_module(options):
+    def _today():
+        return DateTime(0000, 1, 1)
+    widgets.today = widgets.now = _today
+
+def teardown_module(options):
+    widgets.today = orig_today
+    widgets.now = orig_now
+
+
+def cleanup_text(text):
+    return re.sub('\d\d:\d\d', 'hh:mm', re.sub('\d+/\d\d/\d\d', 'YYYY/MM/DD', '\n'.join(l.strip() for l in text.splitlines() if l.strip())))
+
+
+
+class EditionFormTC(EnvBasedTC):
+
+    def setup_database(self):
+        self.create_user('joe')
+        
+    def _build_creation_form(self, etype):
+        req = self.request()
+        req.next_tabindex()
+        req.next_tabindex()
+        req.del_page_data()
+        req.form['etype'] = etype
+        view = self.vreg.select_view('creation', req, None)
+        entity = self.vreg.etype_class(etype)(req, None, None)
+        buffer = StringIO()
+        view.w = buffer.write
+        view.edit_form(entity, {})
+        return buffer.getvalue()
+    
+    def _test_view_for(self, etype, expected):
+        self.assertTextEquals(expected, cleanup_text(self._build_creation_form(etype)))
+        
+    def test_base(self):
+        self._test_view_for('EGroup', '''\
+<form id="entityForm" class="entityForm" cubicweb:target="eformframe"
+method="post" onsubmit="return freezeFormButtons('entityForm')" enctype="application/x-www-form-urlencoded" action="http://testing.fr/cubicweb/validateform">
+<div class="formTitle"><span>egroup (creation)</span></div>
+<div id="progress">validating...</div>
+<div class="iformTitle"><span>main informations</span></div>
+<div class="formBody"><fieldset>
+<input type="hidden" name="eid" value="A" />
+<input type="hidden" name="__type:A" value="EGroup" />
+<input type="hidden" name="__maineid" value="A" />
+<input id="errorurl" type="hidden" name="__errorurl" value="http://testing.fr/cubicweb/view?rql=Blop&amp;vid=blop" />
+<input type="hidden" name="__form_id" value="edition" />
+<input type="hidden" name="__message" value="element created" />
+<table id="entityFormA" class="attributeForm" style="width:100%;">
+<tr>
+<th class="labelCol"><label class="required" for="name:A">name</label></th>
+<td style="width:100%;">
+<input type="hidden" name="edits-name:A" value="__cubicweb_internal_field__"/>
+<input type="text" name="name:A" value="" accesskey="n" id="name:A" maxlength="64" size="40" tabindex="2"/>
+<br/>
+</td>
+</tr>
+</table>
+</fieldset>
+</div>
+<table width="100%">
+<tbody>
+<tr><td align="center">
+<input class="validateButton" type="submit" name="defaultsubmit" value="Button_ok" tabindex="3"/>
+</td><td style="align: right; width: 50%;">
+<input class="validateButton" type="button" onclick="postForm('__action_apply', 'Button_apply', 'entityForm')" value="Button_apply" tabindex="4"/>
+<input class="validateButton" type="button" onclick="postForm('__action_cancel', 'Button_cancel', 'entityForm')" value="Button_cancel" tabindex="5"/>
+</td></tr>
+</tbody>
+</table>
+</form>''')
+
+    def test_with_inline_view(self):
+        activated = self.execute('Any X WHERE X is State, X name "activated"')[0][0]
+        self._test_view_for('EUser', '''<form id="entityForm" class="entityForm" cubicweb:target="eformframe"
+method="post" onsubmit="return freezeFormButtons('entityForm')" enctype="application/x-www-form-urlencoded" action="http://testing.fr/cubicweb/validateform">
+<div class="formTitle"><span>euser (creation)</span></div>
+<div id="progress">validating...</div>
+<div class="iformTitle"><span>main informations</span></div>
+<div class="formBody"><fieldset>
+<input type="hidden" name="eid" value="A" />
+<input type="hidden" name="__type:A" value="EUser" />
+<input type="hidden" name="__maineid" value="A" />
+<input id="errorurl" type="hidden" name="__errorurl" value="http://testing.fr/cubicweb/view?rql=Blop&amp;vid=blop" />
+<input type="hidden" name="__form_id" value="edition" />
+<input type="hidden" name="__message" value="element created" />
+<table id="entityFormA" class="attributeForm" style="width:100%%;">
+<tr>
+<th class="labelCol"><label class="required" for="login:A">login</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-login:A" value="__cubicweb_internal_field__"/>
+<input type="text" name="login:A" value="" accesskey="l" id="login:A" maxlength="64" size="40" tabindex="2"/>
+<br/>&nbsp;<span class="helper">unique identifier used to connect to the application</span>
+</td>
+</tr>
+<tr>
+<th class="labelCol"><label class="required" for="upassword:A">upassword</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-upassword:A" value="__cubicweb_internal_field__"/>
+<input type="password" name="upassword:A" value="" accesskey="u" id="upassword:A" tabindex="3"/><br/>
+<input type="password" name="upassword-confirm:A" id="upassword-confirm:A" tabindex="4"/>&nbsp;<span class="emphasis">(confirm password)</span>
+<br/>
+</td>
+</tr>
+<tr>
+<th class="labelCol"><label for="firstname:A">firstname</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-firstname:A" value="__cubicweb_internal_field__"/>
+<input type="text" name="firstname:A" value="" accesskey="f" id="firstname:A" maxlength="64" size="40" tabindex="5"/>
+<br/>
+</td>
+</tr>
+<tr>
+<th class="labelCol"><label for="surname:A">surname</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-surname:A" value="__cubicweb_internal_field__"/>
+<input type="text" name="surname:A" value="" accesskey="s" id="surname:A" maxlength="64" size="40" tabindex="6"/>
+<br/>
+</td>
+</tr>
+<tr>
+<th class="labelCol"><label class="required" for="in_group:A">in_group</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-in_group:A" value="__cubicweb_internal_field__"/>
+<select name="in_group:A" id="in_group:A" multiple="multiple" size="5" tabindex="7">
+<option value="3" >guests</option>
+<option value="1" >managers</option>
+<option value="2" >users</option>
+</select>
+<br/>&nbsp;<span class="helper">groups grant permissions to the user</span>
+</td>
+</tr>
+<tr>
+<th class="labelCol"><label class="required" for="in_state:A">in_state</label></th>
+<td style="width:100%%;">
+<input type="hidden" name="edits-in_state:A" value="__cubicweb_internal_field__"/>
+<select name="in_state:A" id="in_state:A" tabindex="8">
+<option value="%(activated)s" >activated</option>
+</select>
+<br/>&nbsp;<span class="helper">account state</span>
+</td>
+</tr>
+</table>
+<div id="inlineuse_emailslot">
+<div class="inlinedform" id="addNewEmailAddressuse_emailsubject:A" cubicweb:limit="true">
+<a class="addEntity" id="adduse_email:Alink" href="javascript: addInlineCreationForm('A', 'EUser', 'EmailAddress', 'use_email', 'subject')" >+ add a EmailAddress.</a>
+</div>
+<div class="trame_grise">&nbsp;</div>
+</div>
+</fieldset>
+</div>
+<table width="100%%">
+<tbody>
+<tr><td align="center">
+<input class="validateButton" type="submit" name="defaultsubmit" value="Button_ok" tabindex="9"/>
+</td><td style="align: right; width: 50%%;">
+<input class="validateButton" type="button" onclick="postForm('__action_apply', 'Button_apply', 'entityForm')" value="Button_apply" tabindex="10"/>
+<input class="validateButton" type="button" onclick="postForm('__action_cancel', 'Button_cancel', 'entityForm')" value="Button_cancel" tabindex="11"/>
+</td></tr>
+</tbody>
+</table>
+</form>''' % {'activated' : activated})
+
+    def test_redirection_after_creation(self):
+        req = self.request()
+        req.form['etype'] = 'EUser'
+        view = self.vreg.select_view('creation', req, None)
+        self.assertEquals(view.redirect_url(), 'http://testing.fr/cubicweb/euser')
+        req.form['__redirectrql'] = 'Any X WHERE X eid 3012'
+        req.form['__redirectvid'] = 'avid'
+        self.assertEquals(view.redirect_url(), 'http://testing.fr/cubicweb/view?rql=Any%20X%20WHERE%20X%20eid%203012&vid=avid')
+
+
+    def test_need_multipart(self):
+        req = self.request()
+        class Salesterm(AnyEntity):
+            id = 'Salesterm'
+            __rtags__ = {'described_by_test' : 'inlineview'}
+        vreg = self.vreg
+        vreg.register_vobject_class(Salesterm)
+        req.form['etype'] = 'Salesterm'
+        entity = vreg.etype_class('Salesterm')(req, None, None)
+        view = vreg.select_view('creation', req, None)
+        self.failUnless(view.need_multipart(entity))
+        
+
+
+    def test_nonregr_check_add_permission_on_relation(self):
+        from eblog.entities import BlogEntry
+        class BlogEntryPlus(BlogEntry):
+            __rtags__ = {'checked_by': 'primary'}
+        self.vreg.register_vobject_class(BlogEntryPlus)
+        # an admin should be able to edit the checked_by relation
+        html = self._build_creation_form('BlogEntry')
+        self.failUnless('name="edits-checked_by:A"' in html)
+        # a regular user should not be able to see the relation
+        self.login('joe')
+        html = self._build_creation_form('BlogEntry')
+        self.failIf('name="edits-checked_by:A"' in html)
+        
+from cubicweb.devtools.testlib import WebTest
+from cubicweb.devtools.htmlparser import DTDValidator
+
+class CopyWebTest(WebTest):
+
+    def setup_database(self):
+        p = self.create_user("Doe")
+        # do not try to skip 'primary_email' for this test
+        e = self.add_entity('EmailAddress', address=u'doe@doe.com')
+        self.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s',
+                     {'p' : p.eid, 'e' : e.eid})
+
+
+    def test_cloned_elements_in_copy_form(self):
+        rset = self.execute('EUser P WHERE P login "Doe"')
+        output = self.view('copy', rset)
+        clones = [attrs for _, attrs in output.input_tags
+                  if attrs.get('name', '').startswith('__cloned_eid')]
+        # the only cloned entity should be the original person
+        self.assertEquals(len(clones), 1)
+        attrs = clones[0]
+        self.assertEquals(attrs['name'], '__cloned_eid:A')
+        self.assertEquals(int(attrs['value']), rset[0][0])
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_baseviews.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,108 @@
+from simplejson import loads
+
+from logilab.common.testlib import unittest_main
+from logilab.mtconverter import html_unescape
+
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.web.htmlwidgets import TableWidget
+from cubicweb.web.views.baseviews import vid_from_rset
+
+def loadjson(value):
+    return loads(html_unescape(value))
+
+class VidFromRsetTC(EnvBasedTC):
+    
+    def test_no_rset(self):
+        req = self.request()
+        self.assertEquals(vid_from_rset(req, None, self.schema), 'index')
+    
+    def test_no_entity(self):
+        req = self.request()
+        rset = self.execute('Any X WHERE X login "blabla"')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'noresult')
+
+    def test_one_entity(self):
+        req = self.request()
+        rset = self.execute('Any X WHERE X login "admin"')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'primary')
+        rset = self.execute('Any X, L WHERE X login "admin", X login L')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'primary')
+        req.search_state = ('pasnormal',)
+        rset = self.execute('Any X WHERE X login "admin"')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'outofcontext-search')
+
+    def test_one_entity_eid(self):
+        req = self.request()
+        rset = self.execute('Any X WHERE X eid 1')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'primary')
+        
+    def test_more_than_one_entity(self):
+        req = self.request()
+        rset = self.execute('Any X WHERE X is EUser')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'list')
+        rset = self.execute('Any X, L WHERE X login L')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'list')
+    
+    def test_more_than_one_entity_by_row(self):
+        req = self.request()
+        rset = self.execute('Any X, G WHERE X in_group G')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'table')
+    
+    def test_more_than_one_entity_by_row_2(self):
+        req = self.request()
+        rset = self.execute('Any X, GN WHERE X in_group G, G name GN')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'table')
+    
+    def test_aggregat(self):
+        req = self.request()
+        rset = self.execute('Any X, COUNT(T) GROUPBY X WHERE X is T')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'table')
+        rset = self.execute('Any MAX(X) WHERE X is EUser')
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'table')
+
+    def test_subquery(self):
+        rset = self.execute(
+'DISTINCT Any X,N ORDERBY N '
+'WITH X,N BEING ('
+'     (DISTINCT Any P,N WHERE P is EUser, P login N)'
+'       UNION'
+'     (DISTINCT Any W,N WHERE W is EGroup, W name N))')
+        req = self.request()
+        self.assertEquals(vid_from_rset(req, rset, self.schema), 'table')
+
+
+class TableViewTC(EnvBasedTC):
+
+    def _prepare_entity(self):
+        e = self.add_entity("State", name=u'<toto>', description=u'loo"ong blabla')
+        rset = self.execute('Any X, D, CD, NOW - CD WHERE X is State, X description D, X creation_date CD, X eid %(x)s',
+                            {'x': e.eid}, 'x')
+        req = self.request()
+        view = self.vreg.select_view('table', req, rset)
+        return e, rset, view
+      
+    def test_headers(self):
+        self.skip('implement me')
+
+    def test_sortvalue(self):
+        e, _, view = self._prepare_entity()
+        expected = ['<toto>', 'loo"ong blabla'[:10], e.creation_date.strftime('%Y-%m-%d %H:%M')]
+        got = [loadjson(view.sortvalue(0, i)) for i in xrange(3)]
+        self.assertListEqual(got, expected)
+        # XXX sqlite does not handle Interval correctly
+        # value = loadjson(view.sortvalue(0, 3))
+        # self.assertAlmostEquals(value, rset.rows[0][3].seconds)
+
+    def test_sortvalue_with_display_col(self):
+        e, rset, view = self._prepare_entity()
+        rqlstdescr = rset.syntax_tree().get_description()[0] # XXX missing Union support
+        table = TableWidget(view)
+        table.columns = view.get_columns(rqlstdescr, [1, 2], None, None, None, None, 0)
+        expected = ['loo"ong blabla'[:10], e.creation_date.strftime('%Y-%m-%d %H:%M')]
+        got = [loadjson(value) for _, value in table.itercols(0)]
+        self.assertListEqual(got, expected)
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_embeding.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,33 @@
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.web.views.embedding import prefix_links
+
+class UILIBTC(TestCase):
+
+
+    def test_prefix_links(self):
+        """suppose we are embedding http://embedded.com/page1.html"""
+        orig = ['<a href="http://www.perdu.com">perdu ?</a>',
+        '<a href="http://embedded.com/page1.html">perdu ?</a>',
+        '<a href="/page2.html">perdu ?</a>',
+        '<a href="page3.html">perdu ?</a>',
+        '<img src="http://www.perdu.com/img.png"/>',
+        '<img src="/img.png"/>',
+        '<img src="img.png"/>',
+        ]
+        expected = ['<a href="PREFIXhttp%3A%2F%2Fwww.perdu.com">perdu ?</a>',
+        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage1.html">perdu ?</a>',
+        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage2.html">perdu ?</a>',
+        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage3.html">perdu ?</a>',
+        '<img src="http://www.perdu.com/img.png"/>',
+        '<img src="http://embedded.com/img.png"/>',
+        '<img src="http://embedded.com/img.png"/>',
+        ]
+        for orig_a, expected_a in zip(orig, expected):
+            got = prefix_links(orig_a, 'PREFIX', 'http://embedded.com/page1.html')
+            self.assertEquals(got, expected_a)
+
+if __name__ == '__main__':
+    unittest_main()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_navigation.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,101 @@
+"""cubicweb.web.views.navigation unit tests"""
+
+from logilab.common.testlib import unittest_main, mock_object
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.web.views.navigation import PageNavigation, SortedNavigation
+
+from eclasstags.views import TagsBarVComponent
+TagsBarVComponent.visible = True
+
+class NavigationTC(EnvBasedTC):
+    
+    def test_navigation_selection(self):
+        rset = self.execute('Any X,N WHERE X name N')
+        req = self.request()
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, PageNavigation)
+        req.set_search_state('W:X:Y:Z')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, PageNavigation)
+        req.set_search_state('normal')
+        rset = self.execute('Any X,N ORDERBY N WHERE X name N')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, SortedNavigation)
+        req.set_search_state('W:X:Y:Z')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, SortedNavigation)
+        req.set_search_state('normal')
+        rset = self.execute('Any X,N WHERE X name N LIMIT 10')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertEquals(navcomp, None)
+        req.set_search_state('W:X:Y:Z')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertEquals(navcomp, None)
+        req.set_search_state('normal')
+        rset = self.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N WHERE RDEF relation_type RT, RT name N')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, SortedNavigation)
+        req.set_search_state('W:X:Y:Z')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        self.assertIsInstance(navcomp, SortedNavigation)
+        
+        
+    def test_sorted_navigation(self):
+        rset = self.execute('Any X,N ORDERBY N WHERE X name N')
+        req = self.request()
+        req.set_search_state('W:X:Y:Z')
+        navcomp = self.vreg.select_component('navigation', rset.req, rset)
+        html = navcomp.dispatch()
+        rset = self.execute('Any RDEF ORDERBY RT WHERE RDEF relation_type RT')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        html = navcomp.dispatch()
+        rset = self.execute('Any RDEF ORDERBY RDEF WHERE RDEF relation_type RT')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        html = navcomp.dispatch()
+        rset = self.execute('EFRDef RDEF ORDERBY RDEF')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        html = navcomp.dispatch()
+        rset = self.execute('Any RDEF ORDERBY N WHERE RDEF relation_type RT, RT name N')
+        navcomp = self.vreg.select_component('navigation', req, rset)
+        html = navcomp.dispatch()
+        rset = self.execute('Any N, COUNT(RDEF) GROUPBY N ORDERBY N WHERE RDEF relation_type RT, RT name N')
+        navcomp = self.vreg.select_component('navigation', rset.req, rset)
+        html = navcomp.dispatch()
+
+
+
+class ContentNavigationTC(EnvBasedTC):
+
+    def test_component_context(self):
+        view = mock_object(is_primary=lambda x: True)
+        rset = self.execute('EUser X LIMIT 1')
+        req = self.request()
+        objs = self.vreg.possible_vobjects('contentnavigation', req, rset,
+                                           view=view, context='navtop')
+        # tagbar should be in headers by default
+        clsids = set(obj.id for obj in objs)
+        self.failUnless('tagsbar' in clsids)
+        objs = self.vreg.possible_vobjects('contentnavigation', req, rset,
+                                          view=view, context='navbottom')
+        # tagbar should _NOT_ be in footers by default
+        clsids = set(obj.id for obj in objs)
+        self.failIf('tagsbar' in clsids)
+        self.execute('INSERT EProperty P: P pkey "contentnavigation.tagsbar.context", '
+                     'P value "navbottom"')
+        # tagbar should now be in footers
+        req.cnx.commit()
+        objs = self.vreg.possible_vobjects('contentnavigation', req, rset,
+                                          view=view, context='navbottom')
+        
+        clsids = [obj.id for obj in objs]
+        self.failUnless('tagsbar' in clsids)
+        objs = self.vreg.possible_vobjects('contentnavigation', req, rset,
+                                          view=view, context='navtop')
+        
+        clsids = [obj.id for obj in objs]
+        self.failIf('tagsbar' in clsids)
+        
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_searchrestriction.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,89 @@
+from cubicweb.devtools.apptest import EnvBasedTC
+from cubicweb.web.views.searchrestriction import extract_filter_fields, insert_attr_select_relation
+
+class ExtractFilterFieldsTC(EnvBasedTC):
+    def test_relations_cleanup(self):
+        # removing relation should be done in the table filter form but not
+        # from the facets box
+        rset = self.execute('Any X, S WHERE X in_state S')
+        afielddefs, baserql, groupby, orderby = extract_filter_fields(rset, 0)
+        afielddefs = [(getattr(r, 'r_type', r), role, type) for r, role, type, values in afielddefs]
+        self.assertEquals(afielddefs, [('has_text', 'subject', 'rstring'),
+                                       ('in_state', 'subject', 'eid')])
+        self.assertEquals(baserql, 'DISTINCT Any X,S')
+        self.assertEquals(groupby, '')
+        self.assertEquals(orderby, '')
+        # test rql st state
+        self.assertEquals(rset.syntax_tree().as_string(), 'Any X,S WHERE X in_state S')
+        afielddefs, baserql, groupby, orderby = extract_filter_fields(rset, 0, removerels=False)
+        afielddefs = [(getattr(r, 'r_type', r), role, type) for r, role, type, values in afielddefs]
+        self.assertEquals(afielddefs, [('has_text', 'subject', 'rstring'),
+                                       ('in_state', 'subject', 'eid')])
+        self.assertEquals(baserql, 'DISTINCT Any X,S WHERE X in_state S')
+
+
+class InsertAttrRelationTC(EnvBasedTC):
+    def parse(self, query):
+        rqlst = self.vreg.parse(self.session, query)
+        select = rqlst.children[0]
+        select.remove_groups()
+        return select
+
+    def _generate(self, select, rel, var, attr):
+        return insert_attr_select_relation(select, select.defined_vars[var], rel, attr)
+        
+    @property
+    def select(self):
+        return self.parse('Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD '
+                          'GROUPBY B,CD,S,V,U,VN,P,BMD '
+                          'WHERE B in_state S, B creation_date CD, '
+                          'B modification_date BMD, T? tags B, T name TN, '
+                          'V? bookmarked_by B, V title VN, B created_by U?, '
+                          'B in_group P, P name "managers"')
+    
+    def test_1(self):
+        self.assertEquals(self._generate(self.select, 'in_state', 'S', 'name'),
+                          "DISTINCT Any S,A ORDERBY A WHERE B in_state S, B in_group P, "
+                          "P name 'managers', S name A, B is EUser")
+        
+    def test_2(self):
+        self.assertEquals(self._generate(self.select, 'tags', 'T', 'name'),
+                          "DISTINCT Any T,TN ORDERBY TN WHERE T tags B, T name TN, "
+                          "B in_group P, P name 'managers', B is EUser")
+        
+    def test_3(self):
+        self.assertEquals(self._generate(self.select, 'created_by', 'U', 'login'),
+                          "DISTINCT Any U,A ORDERBY A WHERE B created_by U, B in_group P, "
+                          "P name 'managers', U login A, B is EUser")
+        
+    def test_nonregr1(self):
+        select = self.parse('Any T,V WHERE T bookmarked_by V?, '
+                            'V in_state VS, VS name "published", T created_by U')
+        self.assertEquals(self._generate(select, 'created_by', 'U', 'login'),
+                          'DISTINCT Any U,A ORDERBY A WHERE T created_by U, U login A, '
+                          'T is Bookmark')
+
+    def test_nonregr2(self):
+        #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N'
+        select = self.parse('DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is EUser,'
+                            'NOT V in_state VS, VS name "published", V login L')
+        rschema = self.schema['connait']
+        for s, o in rschema.iter_rdefs():
+            rschema.set_rproperty(s, o, 'cardinality', '++')
+        try:
+            self.assertEquals(self._generate(select, 'in_state', 'VS', 'name'),
+                              "DISTINCT Any VS,A ORDERBY A WHERE V is EUser, NOT V in_state VS, VS name 'published', VS name A")
+        finally:
+            for s, o in rschema.iter_rdefs():
+                rschema.set_rproperty(s, o, 'cardinality', '**')
+
+    def test_nonregr3(self):
+        #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N'
+        select = self.parse('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is EUser, Y is Bookmark, X in_group A')
+        self.assertEquals(self._generate(select, 'in_group', 'A', 'name'),
+                          "DISTINCT Any A,B ORDERBY B WHERE X is EUser, X in_group A, A name B")
+
+        
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_viewselector.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,462 @@
+# -*- coding: iso-8859-1 -*-
+"""XXX rename, split, reorganize this
+
+"""
+
+import os.path as osp
+
+from logilab.common.testlib import TestCase, unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+
+from cubicweb import CW_SOFTWARE_ROOT as BASE, Binary
+from cubicweb.common.selectors import in_group_selector
+
+from cubicweb.web._exceptions import NoSelectableObject
+from cubicweb.web.action import Action
+from cubicweb.web.views import (baseviews, tableview, baseforms, calendar, 
+                             management, embedding, actions, startup, 
+                             euser, schemaentities, xbel, vcard, 
+                             idownloadable, wdoc, debug)
+from cubicweb.entities.lib import Card
+from cubicweb.interfaces import IMileStone
+
+USERACTIONS = [('myprefs', actions.UserPreferencesAction),
+               ('myinfos', actions.UserInfoAction),
+               ('logout', actions.LogoutAction)]
+SITEACTIONS = [('siteconfig', actions.SiteConfigurationAction),
+               ('manage', actions.ManageAction),
+               ('schema', actions.ViewSchemaAction)]        
+
+
+class ViewSelectorTC(EnvBasedTC):
+
+    def setup_database(self):
+        self.add_entity('BlogEntry', title=u"une news !", content=u"cubicweb c'est beau")
+        self.add_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
+        self.add_entity('Card', title=u'mandatory', content=u"DoC !")
+        self.add_entity('EmailAddress', address=u"devel@logilab.fr", alias=u'devel')
+        self.add_entity('Tag', name=u'x')
+
+    def pactions(self, req, rset):
+        resdict = self.vreg.possible_actions(req, rset)
+        for cat, actions in resdict.items():
+            resdict[cat] = [(a.id, a.__class__) for a in actions]
+        return resdict
+
+
+class VRegistryTC(ViewSelectorTC):
+    """test the view selector"""
+
+    def _test_registered(self, registry, content):
+        try:
+            expected = getattr(self, 'all_%s' % registry)
+        except AttributeError:
+            return
+        if registry == 'hooks':
+            self.assertEquals(len(content), expected, content)
+            return
+        try:
+            self.assertSetEqual(content.keys(), expected)
+        except:
+            print registry, sorted(expected), sorted(content.keys())
+            print 'no more', [v for v in expected if not v in content.keys()]
+            print 'missing', [v for v in content.keys() if not v in expected]
+            raise
+        
+    
+    def test_possible_views(self):
+        # no entity
+        req = self.request()
+        self.assertListEqual(self.pviews(req, None),
+                             [('changelog', wdoc.ChangeLogView),
+                              ('debug', debug.DebugView),
+                              ('epropertiesform', management.EpropertiesForm),
+                              ('index', startup.IndexView),
+                              ('info', management.ProcessInformationView),
+                              ('manage', startup.ManageView),
+                              ('schema', startup.SchemaView),
+                              ('systemepropertiesform', management.SystemEpropertiesForm)])
+        # no entity but etype
+        rset, req = self.env.get_rset_and_req('Any X WHERE X eid 999999')
+        self.assertListEqual(self.pviews(req, rset),
+                             [#('changelog', wdoc.ChangeLogView),
+                              #('epropertiesform', management.EpropertiesForm),
+                              #('index', startup.IndexView),
+                              #('info', management.ProcessInformationView),
+                              #('manage', startup.ManageView),
+                              #('schema', startup.SchemaView),
+                              #('systemepropertiesform', management.SystemEpropertiesForm)
+                                 ])
+        # one entity
+        rset, req = self.env.get_rset_and_req('EGroup X WHERE X name "managers"')
+        self.assertListEqual(self.pviews(req, rset),
+                             [('csvexport', baseviews.CSVRsetView),
+                              ('ecsvexport', baseviews.CSVEntityView),
+                              ('editable-table', tableview.EditableTableView),
+                              ('list', baseviews.ListView),
+                              ('oneline', baseviews.OneLineView),
+                              ('primary', baseviews.PrimaryView),
+                              ('rss', baseviews.RssView),
+                              ('secondary', baseviews.SecondaryView),
+                              ('security', management.SecurityManagementView),
+                              ('table', tableview.TableView),
+                              ('text', baseviews.TextView),
+                              ('xbel', xbel.XbelView),
+                              ('xml', baseviews.XmlView),
+                              ])
+        # list of entities of the same type
+        rset, req = self.env.get_rset_and_req('EGroup X')
+        self.assertListEqual(self.pviews(req, rset),
+                             [('csvexport', baseviews.CSVRsetView),
+                              ('ecsvexport', baseviews.CSVEntityView),
+                              ('editable-table', tableview.EditableTableView),
+                              ('list', baseviews.ListView),
+                              ('oneline', baseviews.OneLineView),
+                              ('primary', baseviews.PrimaryView),
+                              ('rss', baseviews.RssView),
+                              ('secondary', baseviews.SecondaryView),
+                              ('security', management.SecurityManagementView),
+                              ('table', tableview.TableView),
+                              ('text', baseviews.TextView),
+                              ('xbel', xbel.XbelView),
+                              ('xml', baseviews.XmlView),
+                              ])
+        # list of entities of different types
+        rset, req = self.env.get_rset_and_req('Any X')
+        self.assertListEqual(self.pviews(req, rset),
+                             [('csvexport', baseviews.CSVRsetView),
+                              ('ecsvexport', baseviews.CSVEntityView),
+                              ('editable-table', tableview.EditableTableView),
+                              ('list', baseviews.ListView),
+                              ('oneline', baseviews.OneLineView),
+                              ('primary', baseviews.PrimaryView),
+                              ('rss', baseviews.RssView),
+                              ('secondary', baseviews.SecondaryView),
+                              ('security', management.SecurityManagementView),
+                              ('table', tableview.TableView),
+                              ('text', baseviews.TextView),
+                              ('xbel', xbel.XbelView),
+                              ('xml', baseviews.XmlView),
+                              ])
+        # whatever
+        rset, req = self.env.get_rset_and_req('Any N, X WHERE X in_group Y, Y name N')
+        self.assertListEqual(self.pviews(req, rset),
+                             [('csvexport', baseviews.CSVRsetView),
+                              ('editable-table', tableview.EditableTableView),
+                              ('table', tableview.TableView),
+                              ])
+        # list of euser entities
+        rset, req = self.env.get_rset_and_req('EUser X')
+        self.assertListEqual(self.pviews(req, rset),
+                             [('csvexport', baseviews.CSVRsetView),
+                              ('ecsvexport', baseviews.CSVEntityView),
+                              ('editable-table', tableview.EditableTableView),
+                              ('list', baseviews.ListView),
+                              ('oneline', baseviews.OneLineView),
+                              ('primary', euser.EUserPrimaryView),
+                              ('rss', baseviews.RssView),
+                              ('secondary', baseviews.SecondaryView),
+                              ('security', management.SecurityManagementView),
+                              ('table', tableview.TableView),
+                              ('text', baseviews.TextView),
+                              ('vcard', vcard.VCardEUserView),
+                              ('xbel', xbel.XbelView),
+                              ('xml', baseviews.XmlView),
+                              ])
+        
+    def test_possible_actions_none_rset(self):
+        req = self.request()
+        self.assertDictEqual(self.pactions(req, None),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              })
+    def test_possible_actions_no_entity(self):
+        rset, req = self.env.get_rset_and_req('Any X WHERE X eid 999999')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              })
+    def test_possible_actions_same_type_entities(self):
+        rset, req = self.env.get_rset_and_req('EGroup X')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              'mainactions': [('muledit', actions.MultipleEditAction)],
+                              'moreactions': [('delete', actions.DeleteAction),
+                                              ('addentity', actions.AddNewAction)],
+                              })
+    def test_possible_actions_different_types_entities(self):
+        rset, req = self.env.get_rset_and_req('Any X')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              'moreactions': [('delete', actions.DeleteAction)],
+                              })
+    def test_possible_actions_final_entities(self):
+        rset, req = self.env.get_rset_and_req('Any N, X WHERE X in_group Y, Y name N')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS})
+        
+    def test_possible_actions_eetype_euser_entity(self):
+        rset, req = self.env.get_rset_and_req('EEType X WHERE X name "EUser"')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              'mainactions': [('edit', actions.ModifyAction),
+                                              ('workflow', schemaentities.ViewWorkflowAction),],
+                              'moreactions': [('delete', actions.DeleteAction),
+                                              ('copy', actions.CopyAction)],
+                              })
+
+    def test_load_subinterface_based_vojects(self):
+        self.vreg._lastmodifs = {} # clear cache
+        self.vreg.register_objects([osp.join(BASE, 'web', 'views', 'iprogress.py')])
+        # check progressbar was kicked
+        self.failIf('progressbar' in self.vreg['views'])
+        class MyCard(Card):
+            __implements__ = (IMileStone,)
+        self.vreg.register_vobject_class(MyCard)
+        self.vreg._lastmodifs = {} # clear cache
+        self.vreg.register_objects([osp.join(BASE, 'web', 'views', 'iprogress.py')])
+        # check progressbar isn't kicked
+        self.assertEquals(len(self.vreg['views']['progressbar']), 1)
+        
+
+    def test_select_creation_form(self):
+        rset = None
+        req = self.request()
+        # creation form
+        req.form['etype'] = 'EGroup'
+        self.assertIsInstance(self.vreg.select_view('creation', req, rset),
+                                  baseforms.CreationForm)
+        del req.form['etype']
+        # custom creation form
+        class EUserCreationForm(baseforms.CreationForm):
+            accepts = ('EUser',)
+        self.vreg.register_vobject_class(EUserCreationForm)
+        req.form['etype'] = 'EUser'
+        self.assertIsInstance(self.vreg.select_view('creation', req, rset),
+                              EUserCreationForm)
+            
+    def test_select_view(self):
+        # no entity
+        rset = None
+        req = self.request()
+        self.assertIsInstance(self.vreg.select_view('index', req, rset),
+                             startup.IndexView)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'primary', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'table', req, rset)
+        
+        # no entity
+        rset, req = self.env.get_rset_and_req('Any X WHERE X eid 999999')
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'index', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'creation', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'primary', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'table', req, rset)
+        # one entity
+        rset, req = self.env.get_rset_and_req('EGroup X WHERE X name "managers"')
+        self.assertIsInstance(self.vreg.select_view('primary', req, rset),
+                             baseviews.PrimaryView)
+        self.assertIsInstance(self.vreg.select_view('list', req, rset),
+                             baseviews.ListView)
+        self.assertIsInstance(self.vreg.select_view('edition', req, rset),
+                             baseforms.EditionForm)
+        self.assertIsInstance(self.vreg.select_view('table', req, rset),
+                             tableview.TableView)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'creation', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'index', req, rset)
+        # list of entities of the same type
+        rset, req = self.env.get_rset_and_req('EGroup X')
+        self.assertIsInstance(self.vreg.select_view('primary', req, rset),
+                             baseviews.PrimaryView)
+        self.assertIsInstance(self.vreg.select_view('list', req, rset),
+                             baseviews.ListView)
+        self.assertIsInstance(self.vreg.select_view('table', req, rset),
+                             tableview.TableView)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'creation', req, rset)
+        # list of entities of different types
+        rset, req = self.env.get_rset_and_req('Any X')
+        self.assertIsInstance(self.vreg.select_view('primary', req, rset),
+                                  baseviews.PrimaryView)
+        self.assertIsInstance(self.vreg.select_view('list', req, rset),
+                                  baseviews.ListView)
+        self.assertIsInstance(self.vreg.select_view('table', req, rset),
+                                  tableview.TableView)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'creation', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'index', req, rset)
+        # whatever
+        rset, req = self.env.get_rset_and_req('Any N, X WHERE X in_group Y, Y name N')
+        self.assertIsInstance(self.vreg.select_view('table', req, rset),
+                                  tableview.TableView)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'index', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'creation', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'primary', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'list', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                             self.vreg.select_view, 'edition', req, rset)
+        # mixed query
+        rset, req = self.env.get_rset_and_req('Any U,G WHERE U is EUser, G is EGroup')
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'edition', req, rset)
+        self.failUnlessRaises(NoSelectableObject,
+                              self.vreg.select_view, 'creation', req, rset)
+        self.assertIsInstance(self.vreg.select_view('table', req, rset),
+                              tableview.TableView)
+        # euser primary view priority
+        rset, req = self.env.get_rset_and_req('EUser X WHERE X login "admin"')
+        self.assertIsInstance(self.vreg.select_view('primary', req, rset),
+                             euser.EUserPrimaryView)
+        self.assertIsInstance(self.vreg.select_view('text', req, rset),
+                             baseviews.TextView)
+
+    def test_interface_selector(self):
+        image = self.add_entity('Image', name=u'bim.png', data=Binary('bim'))
+        # image primary view priority
+        rset, req = self.env.get_rset_and_req('Image X WHERE X name "bim.png"')
+        self.assertIsInstance(self.vreg.select_view('primary', req, rset),
+                              idownloadable.IDownloadablePrimaryView)
+        
+        
+    def test_score_entity_selector(self):
+        image = self.add_entity('Image', name=u'bim.png', data=Binary('bim'))
+        # image primary view priority
+        rset, req = self.env.get_rset_and_req('Image X WHERE X name "bim.png"')
+        self.assertIsInstance(self.vreg.select_view('image', req, rset),
+                              idownloadable.ImageView)
+        fileobj = self.add_entity('File', name=u'bim.txt', data=Binary('bim'))
+        # image primary view priority
+        rset, req = self.env.get_rset_and_req('File X WHERE X name "bim.txt"')
+        self.assertRaises(NoSelectableObject, self.vreg.select_view, 'image', req, rset)
+        
+        
+        
+    def _test_view(self, vid, rql, args):
+        if rql is None:
+            rset = None
+            req = self.request()
+        else:
+            rset, req = self.env.get_rset_and_req(rql)
+        try:
+            self.vreg.render('views', vid, req, rset=rset, **args)
+        except:
+            print vid, rset, args
+            raise
+
+    def test_form(self):
+        for vid, rql, args in (
+            #('creation', 'Any X WHERE X eid 999999', {}),
+            ('edition', 'EGroup X WHERE X name "managers"', {}),
+            ('copy', 'EGroup X WHERE X name "managers"', {}),
+            ('muledit', 'EGroup X', {}),
+            #('muledit', 'Any X', {}),
+            ):
+            self._test_view(vid, rql, args)
+
+
+    def test_properties(self):
+        self.assertEquals(sorted(k for k in self.vreg['propertydefs'].keys()
+                                 if k.startswith('boxes.edit_box')),
+                          ['boxes.edit_box.context',
+                           'boxes.edit_box.order',
+                           'boxes.edit_box.visible'])
+        self.assertEquals([k for k in self.vreg['propertyvalues'].keys()
+                           if not k.startswith('system.version')],
+                          [])
+        self.assertEquals(self.vreg.property_value('boxes.edit_box.visible'), True)
+        self.assertEquals(self.vreg.property_value('boxes.edit_box.order'), 2)
+        self.assertEquals(self.vreg.property_value('boxes.possible_views_box.visible'), False)
+        self.assertEquals(self.vreg.property_value('boxes.possible_views_box.order'), 10)
+        self.assertRaises(KeyError, self.vreg.property_value, 'boxes.actions_box')
+        
+
+
+    def test_owners_in_group_selector(self):
+        """tests usage of 'owners' group with in_group_selector"""
+        class SomeAction(Action):
+            id = 'yo'
+            category = 'foo'
+            __selectors__ = (in_group_selector,)
+            require_groups = ('owners', )            
+        self.vreg.register_vobject_class(SomeAction)
+        self.failUnless(SomeAction in self.vreg['actions']['yo'], self.vreg['actions'])
+        try:
+            # login as a simple user
+            self.create_user('john')
+            self.login('john')
+            # it should not be possible to use SomeAction not owned objects
+            rset, req = self.env.get_rset_and_req('Any G WHERE G is EGroup, G name "managers"')
+            self.failIf('foo' in self.pactions(req, rset))
+            # insert a new card, and check that we can use SomeAction on our object
+            self.execute('INSERT Card C: C title "zoubidou"')
+            self.commit()
+            rset, req = self.env.get_rset_and_req('Card C WHERE C title "zoubidou"')
+            self.failUnless('foo' in self.pactions(req, rset))
+            # make sure even managers can't use the action
+            self.restore_connection()
+            rset, req = self.env.get_rset_and_req('Card C WHERE C title "zoubidou"')
+            self.failIf('foo' in self.pactions(req, rset))
+        finally:
+            del self.vreg[SomeAction.__registry__][SomeAction.id]
+
+
+        
+
+
+from cubicweb.web.action import EntityAction
+
+class EETypeRQLAction(EntityAction):
+    id = 'testaction'
+    accepts = ('EEType',)
+    condition = 'X name "EEType"'
+    title = 'bla'
+
+class RQLActionTC(ViewSelectorTC):
+            
+    def setUp(self):
+        super(RQLActionTC, self).setUp()
+        self.vreg.register_vobject_class(EETypeRQLAction)
+        
+    def tearDown(self):
+        super(RQLActionTC, self).tearDown()        
+        del self.vreg._registries['actions']['testaction']
+        
+    def test(self):
+        rset, req = self.env.get_rset_and_req('EEType X WHERE X name "EEType"')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              'mainactions': [('edit', actions.ModifyAction)],
+                              'moreactions': [('delete', actions.DeleteAction),
+                                              ('copy', actions.CopyAction),
+                                              ('testaction', EETypeRQLAction)],
+                              })
+        rset, req = self.env.get_rset_and_req('EEType X WHERE X name "ERType"')
+        self.assertDictEqual(self.pactions(req, rset),
+                             {'useractions': USERACTIONS,
+                              'siteactions': SITEACTIONS,
+                              'mainactions': [('edit', actions.ModifyAction)],
+                              'moreactions': [('delete', actions.DeleteAction),
+                                              ('copy', actions.CopyAction)],
+                              })
+        
+
+
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_webconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,33 @@
+import os
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools._apptest import FakeRequest
+from cubicweb.devtools import ApptestConfiguration
+
+class WebconfigTC(TestCase):
+    def setUp(self):
+        self.config = ApptestConfiguration('data')
+        self.config._cubes = ['efile']
+        self.config.load_configuration()
+        
+    def test_nonregr_print_css_as_list(self):
+        """make sure PRINT_CSS *must* is a list"""
+        config = self.config
+        req = FakeRequest()
+        print_css = req.external_resource('STYLESHEETS_PRINT')
+        self.failUnless(isinstance(print_css, list))
+        ie_css = req.external_resource('IE_STYLESHEETS')
+        self.failUnless(isinstance(ie_css, list))
+
+    def test_locate_resource(self):
+        self.failUnless('FILE_ICON' in self.config.ext_resources)
+        rname = self.config.ext_resources['FILE_ICON'].replace('DATADIR/', '')
+        self.failUnless('efile' in self.config.locate_resource(rname).split(os.sep))
+        cubicwebcsspath = self.config.locate_resource('cubicweb.css').split(os.sep)
+        self.failUnless('web' in cubicwebcsspath or 'shared' in cubicwebcsspath) # 'shared' if tests under apycot
+        
+if __name__ == '__main__':
+    unittest_main()
+
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_widgets.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,364 @@
+"""cubicweb.common.widget unit tests
+
+"""
+
+from mx.DateTime import now
+NOW = now()
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.apptest import EnvBasedTC
+
+from cubicweb.web.widgets import widget, AutoCompletionWidget
+
+
+class WidgetsTC(EnvBasedTC):
+        
+    def get_widget(self, etype, rname, rtype):
+        rschema = self.schema[rname]
+        return widget(self.vreg, etype, rschema, rtype, role='subject')
+    
+
+    def test_hidden_widget(self):
+        w = self.get_widget('State', 'eid', 'Int')
+        self.assertEquals(w.name, 'eid')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {})
+        entity = self.etype_instance('State')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), True)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="eid" value="X" />')
+
+    def test_textarea_widget(self):
+        self.add_entity('EProperty', pkey=u'ui.fckeditor', value=u'')
+        self.commit()
+        w = self.get_widget('State', 'description', 'String')
+        self.assertEquals(w.name, 'description')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 'd'})
+        entity = self.etype_instance('State')
+        entity.eid = 'X'
+        entity
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertTextEquals(w.edit_render(entity),
+                           u'''<input type="hidden" name="edits-description:X" value="__cubicweb_internal_field__"/>
+<input type="hidden" name="edits-description_format:X" value="__cubicweb_internal_field__"/>
+
+<select name="description_format:X" id="description_format:X" tabindex="0">
+<option value="text/rest" >text/rest</option>
+<option value="text/html" selected="selected">text/html</option>
+<option value="text/plain" >text/plain</option>
+<option value="text/cubicweb-page-template" >text/cubicweb-page-template</option>
+</select><br/><textarea onkeypress="autogrow(this)" name="description:X" accesskey="d" cols="80" id="description:X" rows="20" tabindex="1"></textarea>''')
+
+    def test_textarea_widget_previous_value(self):
+        self.add_entity('EProperty', pkey=u'ui.fckeditor', value=u'')
+        self.commit()
+        w = self.get_widget('State', 'description', 'String')
+        req = self.request()
+        req.data['formvalues'] = {'description:X': 'a description'}
+        entity = self.etype_instance('State', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertTextEquals(w.edit_render(entity),
+                           u'''<input type="hidden" name="edits-description:X" value="__cubicweb_internal_field__"/>
+<input type="hidden" name="edits-description_format:X" value="__cubicweb_internal_field__"/>
+
+<select name="description_format:X" id="description_format:X" tabindex="0">
+<option value="text/rest" >text/rest</option>
+<option value="text/html" selected="selected">text/html</option>
+<option value="text/plain" >text/plain</option>
+<option value="text/cubicweb-page-template" >text/cubicweb-page-template</option>
+</select><br/><textarea onkeypress="autogrow(this)" name="description:X" accesskey="d" cols="80" id="description:X" rows="20" tabindex="1">a description</textarea>''')
+
+    def test_fckeditor_widget(self):
+        w = self.get_widget('State', 'description', 'String')
+        req = self.request()
+        entity = self.etype_instance('State', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertTextEquals(w.edit_render(entity),
+                           u'''<input type="hidden" name="edits-description:X" value="__cubicweb_internal_field__"/>
+<input type="hidden" name="edits-description_format:X" value=""/>
+<input type="hidden" name="description_format:X" value="text/html"/>
+<textarea cubicweb:type="wysiwyg" onkeypress="autogrow(this)" name="description:X" accesskey="d" cols="80" id="description:X" rows="20" tabindex="0"></textarea>''')
+
+    def test_string_widget(self):
+        w = self.get_widget('Personne', 'nom', 'String')
+        self.assertEquals(w.name, 'nom')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 'n', 'maxlength': 64, 'size': 40})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), True)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-nom:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="nom:X" value="" accesskey="n" id="nom:X" maxlength="64" size="40" tabindex="0"/>')
+
+    def test_string_widget_previous_value(self):
+        w = self.get_widget('Personne', 'nom', 'String')
+        self.assertEquals(w.name, 'nom')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 'n', 'maxlength': 64, 'size': 40})
+        req = self.request()
+        req.data['formvalues'] = {'nom:X': 'a name'}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), True)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-nom:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="nom:X" value="a name" accesskey="n" id="nom:X" maxlength="64" size="40" tabindex="0"/>')
+
+    def test_static_combo_widget(self):
+        w = self.get_widget('Personne', 'promo', 'String')
+        self.assertEquals(w.name, 'promo')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertTextEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-promo:X" value="__cubicweb_internal_field__"/>\n\n'
+                          '<select name="promo:X" id="promo:X" tabindex="0">\n'
+                          '<option value="bon" >bon</option>\n'
+                          '<option value="pasbon" >pasbon</option>\n'
+                          '</select>')
+
+    def test_static_combo_widget_previous_value(self):
+        w = self.get_widget('Personne', 'promo', 'String')
+        self.assertEquals(w.name, 'promo')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {})
+        req = self.request()
+        req.data['formvalues'] = {'promo:X': 'pasbon'}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertTextEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-promo:X" value="__cubicweb_internal_field__"/>\n\n'
+                          '<select name="promo:X" id="promo:X" tabindex="0">\n'
+                          '<option value="bon" >bon</option>\n'
+                          '<option value="pasbon" selected="selected">pasbon</option>\n'
+                          '</select>')
+
+    def test_integer_widget(self):
+        w = self.get_widget('Personne', 'tel', 'Int')
+        self.assertEquals(w.name, 'tel')
+        self.assertEquals(w.render_example(self.request()), '23')
+        self.assertDictEquals(w.attrs, {'accesskey': 't', 'maxlength': 15, 'size': 5})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-tel:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="tel:X" value="" accesskey="t" id="tel:X" maxlength="15" size="5" tabindex="0"/>')
+
+    def test_integer_widget_previous_value(self):
+        w = self.get_widget('Personne', 'tel', 'Int')
+        self.assertEquals(w.name, 'tel')
+        self.assertEquals(w.render_example(self.request()), '23')
+        self.assertDictEquals(w.attrs, {'accesskey': 't', 'maxlength': 15, 'size': 5})
+        req = self.request()
+        req.data['formvalues'] = {'tel:X': '0123456789'}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-tel:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="tel:X" value="0123456789" accesskey="t" id="tel:X" maxlength="15" size="5" tabindex="0"/>')
+
+    def test_datetime_widget(self):
+        w = self.get_widget('Personne', 'datenaiss', 'Datetime')
+        self.assertEquals(w.name, 'datenaiss')
+        now_ = now()
+        example = '%s, or without time: %s' % (        
+            now_.strftime(self.vreg.property_value('ui.datetime-format')),
+            now_.strftime(self.vreg.property_value('ui.date-format')))
+        self.assertEquals(w.render_example(self.request()), example)
+        self.assertDictEquals(w.attrs, {'accesskey': 'd', 'maxlength': 16, 'size': 16})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-datenaiss:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="datenaiss:X" value="" accesskey="d" id="datenaiss:X" maxlength="16" size="16" tabindex="0"/>'
+                          '<a onclick="toggleCalendar(\'datenaiss:Xhelper\', \'datenaiss:X\', %s, %s);" class="calhelper">\n<img src="http://testing.fr/cubicweb/data/calendar.gif" title="calendar" alt="" /></a><div class="calpopup hidden" id="datenaiss:Xhelper"></div>' % (NOW.year, NOW.month))
+
+    def test_datetime_widget_previous_value(self):
+        w = self.get_widget('Personne', 'datenaiss', 'Datetime')
+        self.assertEquals(w.name, 'datenaiss')
+        self.assertDictEquals(w.attrs, {'accesskey': 'd', 'maxlength': 16, 'size': 16})
+        req = self.request()
+        req.data['formvalues'] = {'datenaiss:X': '2000/01/01'}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-datenaiss:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="datenaiss:X" value="2000/01/01" accesskey="d" id="datenaiss:X" maxlength="16" size="16" tabindex="0"/>'
+                          '<a onclick="toggleCalendar(\'datenaiss:Xhelper\', \'datenaiss:X\', %s, %s);" class="calhelper">\n<img src="http://testing.fr/cubicweb/data/calendar.gif" title="calendar" alt="" /></a><div class="calpopup hidden" id="datenaiss:Xhelper"></div>' % (NOW.year, NOW.month))
+
+
+
+    def test_float_widget(self):
+        w = self.get_widget('Personne', 'salary', 'Float')
+        self.assertEquals(w.name, 'salary')
+        format = now().strftime(self.vreg.property_value('ui.float-format'))
+        self.assertEquals(w.render_example(self.request()), format % 1.23)
+        self.assertDictEquals(w.attrs, {'accesskey': 's', 'maxlength': 15, 'size': 5})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                          u'<input type="hidden" name="edits-salary:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="salary:X" value="" accesskey="s" id="salary:X" maxlength="15" size="5" tabindex="0"/>')
+                          
+                          
+    def test_float_widget_previous_value(self):
+        w = self.get_widget('Personne', 'salary', 'Float')
+        self.assertEquals(w.name, 'salary')
+        format = now().strftime(self.vreg.property_value('ui.float-format'))
+        self.assertEquals(w.render_example(self.request()), format % 1.23)
+        self.assertDictEquals(w.attrs, {'accesskey': 's', 'maxlength': 15, 'size': 5})
+        req = self.request()
+        req.data['formvalues'] = {'salary:X': 7.89}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                          u'<input type="hidden" name="edits-salary:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="salary:X" value="7.89" accesskey="s" id="salary:X" maxlength="15" size="5" tabindex="0"/>')
+
+
+    def test_bool_widget(self):
+        w = self.get_widget('Personne', 'test', 'Boolean')
+        self.assertEquals(w.name, 'test')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 't'})
+        entity = self.etype_instance('Personne')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'''<input type="hidden" name="edits-test:X" value="__cubicweb_internal_field__"/>
+
+<input type="radio" name="test:X" value="1" accesskey="t" id="test:X" tabindex="0"/>yes<br/>
+<input type="radio" name="test:X" value="" accesskey="t" tabindex="0" checked="checked"/>no<br/>''')
+
+    def test_bool_widget_previous_value(self):
+        w = self.get_widget('Personne', 'test', 'Boolean')
+        self.assertEquals(w.name, 'test')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 't'})
+        req = self.request()
+        req.data['formvalues'] = {'test:X': 'checked'}
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'''<input type="hidden" name="edits-test:X" value="__cubicweb_internal_field__"/>
+
+<input type="radio" name="test:X" value="1" accesskey="t" id="test:X" tabindex="0" checked="checked"/>yes<br/>
+<input type="radio" name="test:X" value="" accesskey="t" tabindex="0"/>no<br/>''')
+
+
+    def test_password_widget(self):
+        w = self.get_widget('EUser', 'upassword', 'Password')
+        self.assertEquals(w.name, 'upassword')
+        self.assertEquals(w.render_example(self.request()), '')
+        self.assertDictEquals(w.attrs, {'accesskey': 'u'})
+        entity = self.etype_instance('EUser')
+        entity.eid = 'X'
+        self.assertEquals(w.required(entity), True)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                           u'<input type="hidden" name="edits-upassword:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="password" name="upassword:X" value="" accesskey="u" id="upassword:X" tabindex="0"/><br/>\n'
+                          '<input type="password" name="upassword-confirm:X" id="upassword-confirm:X" tabindex="1"/>&nbsp;<span class="emphasis">(confirm password)</span>')
+
+    def test_autocompletion_widget(self):
+        entity = self.etype_instance('Personne')
+        entity.widgets['nom'] = 'AutoCompletionWidget'
+        entity.autocomplete_initfuncs = {'nom' : 'getnames'}
+        try:
+            w = self.get_widget(entity, 'nom', 'String')
+            self.failUnless(isinstance(w, AutoCompletionWidget))
+            self.assertEquals(w.name, 'nom')
+            self.assertEquals(w.render_example(self.request()), '')
+            self.assertDictEquals(w.attrs, {'accesskey': 'n', 'maxlength': 64, 'size': 40})
+            entity.eid = 'X'
+            self.assertEquals(w.required(entity), True)
+            self.assertEquals(w.render(entity), '')
+
+            self.assertTextEquals(w.edit_render(entity),
+                                  u'<input type="hidden" name="edits-nom:X" value="__cubicweb_internal_field__"/>\n'
+                                  u'<input type="text" name="nom:X" value="" cubicweb:dataurl="http://testing.fr/cubicweb/json?pageid=None&amp;mode=remote&amp;fname=getnames" class="widget required" id="nom:X" tabindex="0" cubicweb:loadtype="auto" cubicweb:wdgtype="SuggestField"  cubicweb:accesskey="n" cubicweb:maxlength="64" cubicweb:size="40" />')
+                                  
+        finally:
+            del entity.widgets['nom']
+
+
+    def test_autocompletion_widget_previous_value(self):
+        req = self.request()
+        req.data['formvalues'] = {'nom:X': 'a name'}
+        entity = self.etype_instance('Personne', req)
+        entity.widgets['nom'] = 'AutoCompletionWidget'
+        entity.autocomplete_initfuncs = {'nom' : 'getnames'}
+        try:
+            w = self.get_widget(entity, 'nom', 'String')
+            self.failUnless(isinstance(w, AutoCompletionWidget))
+            self.assertEquals(w.name, 'nom')
+            self.assertEquals(w.render_example(self.request()), '')
+            self.assertDictEquals(w.attrs, {'accesskey': 'n', 'maxlength': 64, 'size': 40})
+            entity.eid = 'X'
+            self.assertEquals(w.required(entity), True)
+            self.assertEquals(w.render(entity), '')
+            self.assertTextEquals(w.edit_render(entity),
+                                  u'<input type="hidden" name="edits-nom:X" value="__cubicweb_internal_field__"/>\n'
+                                  u'<input type="text" name="nom:X" value="a name" cubicweb:dataurl="http://testing.fr/cubicweb/json?pageid=None&amp;mode=remote&amp;fname=getnames" class="widget required" id="nom:X" tabindex="0" cubicweb:loadtype="auto" cubicweb:wdgtype="SuggestField"  cubicweb:accesskey="n" cubicweb:maxlength="64" cubicweb:size="40" />')
+            
+        finally:
+            del entity.widgets['nom']
+
+
+    def test_nonregr_float_widget_with_none(self):
+        w = self.get_widget('Personne', 'salary', 'Float')
+        self.assertEquals(w.name, 'salary')
+        format = now().strftime(self.vreg.property_value('ui.float-format'))
+        self.assertEquals(w.render_example(self.request()), format % 1.23)
+        self.assertDictEquals(w.attrs, {'accesskey': 's', 'maxlength': 15, 'size': 5})
+        req = self.request()
+        entity = self.etype_instance('Personne', req)
+        entity.eid = 'X'
+        entity.salary = None
+        self.assertEquals(w.required(entity), False)
+        self.assertEquals(w.render(entity), '')
+        self.assertEquals(w.edit_render(entity),
+                          u'<input type="hidden" name="edits-salary:X" value="__cubicweb_internal_field__"/>\n'
+                          '<input type="text" name="salary:X" value="" accesskey="s" id="salary:X" maxlength="15" size="5" tabindex="0"/>')
+
+
+    def test_custom_widget_for_non_final_relation(self):
+        entity = self.etype_instance('Personne', self.request())
+        entity.widgets['travaille'] = 'AutoCompletionWidget'
+        entity.autocomplete_initfuncs = {'nom' : 'getnames'}
+        w = self.get_widget(entity, 'travaille', 'Societe')
+        self.failUnless(isinstance(w, AutoCompletionWidget))
+        
+        
+if __name__ == '__main__':
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,96 @@
+"""Views/forms and actions for the CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+    
+from rql import nodes
+
+def need_table_view(rset, schema):
+    """return True if we think that a table view is more appropriate than a
+    list or primary view to display the given result set
+    """
+    rqlst = rset.syntax_tree()
+    if len(rqlst.children) > 1:
+        # UNION query, use a table
+        return True
+    selected = rqlst.children[0].selection
+    try:
+        mainvar = selected[0]
+    except AttributeError:
+        # not a variable ref, using table view is probably a good option
+        return True
+    if not (isinstance(mainvar, nodes.VariableRef) or
+            (isinstance(mainvar, nodes.Constant) and mainvar.uid)):
+        return True
+    for i, etype in enumerate(rset.description[0][1:]):
+        # etype may be None on outer join
+        if etype is None:
+            return True
+        # check the selected index node is a VariableRef (else we
+        # won't detect aggregate function
+        if not isinstance(selected[i+1], nodes.VariableRef):
+            return True
+        # if this is not a final entity
+        if not schema.eschema(etype).is_final():
+            return True
+        # if this is a final entity not linked to the main variable
+        var = selected[i+1].variable
+        for vref in var.references():
+            rel = vref.relation()
+            if rel is None:
+                continue
+            if mainvar.is_equivalent(rel.children[0]):
+                break
+        else:
+            return True
+    return False
+
+
+def vid_from_rset(req, rset, schema):
+    """given a result set, return a view id"""
+    if rset is None:
+        return 'index'
+    nb_rows = len(rset)
+    # empty resultset
+    if nb_rows == 0 :
+        return 'noresult'
+    # entity result set
+    if not schema.eschema(rset.description[0][0]).is_final():
+        if need_table_view(rset, schema):
+            return 'table'
+        if nb_rows == 1:
+            if req.search_state[0] == 'normal':
+                return 'primary'
+            return 'outofcontext-search'
+        return 'list'
+    return 'table'
+
+def linksearch_match(req, rset):
+    """when searching an entity to create a relation, return True if entities in
+    the given rset may be used as relation end
+    """
+    try:
+        searchedtype = req.search_state[1][-1]
+    except IndexError:
+        return 0 # no searching for association
+    for etype in rset.column_types(0):
+        if etype != searchedtype:
+            return 0
+    return 1
+    
+def linksearch_select_url(req, rset):
+    """when searching an entity to create a relation, return an url to select
+    entities in the given rset
+    """
+    req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+    target, link_eid, r_type, searchedtype = req.search_state[1]
+    if target == 'subject':
+        id_fmt = '%s:%s:%%s' % (link_eid, r_type)
+    else:
+        id_fmt = '%%s:%s:%s' % (r_type, link_eid)
+    triplets = '-'.join(id_fmt % row[0] for row in rset.rows)
+    return "javascript: selectForAssociation('%s', '%s');" % (triplets,
+                                                              link_eid)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/actions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,284 @@
+"""Set of HTML base actions
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb import UnknownEid
+from cubicweb.common.selectors import *
+
+from cubicweb.web.action import (Action, EntityAction,  LinkToEntityAction,
+                              LinkToEntityAction2)
+from cubicweb.web.views import linksearch_select_url, linksearch_match
+from cubicweb.web.views.baseviews import vid_from_rset
+
+_ = unicode
+
+
+class SelectAction(EntityAction):
+    """base class for link search actions. By default apply on
+    any size entity result search it the current state is 'linksearch'
+    if accept match.
+    """
+    category = 'mainactions'    
+    __selectors__ = (searchstate_accept_selector,)
+    search_states = ('linksearch',)
+    order = 0
+    
+    id = 'select'
+    title = _('select')
+    
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        return linksearch_match(req, rset)
+    
+    def url(self):
+        return linksearch_select_url(self.req, self.rset)
+
+
+class CancelSelectAction(Action):
+    category = 'mainactions'
+    search_states = ('linksearch',)
+    order = 10
+    
+    id = 'cancel'
+    title = _('cancel select')
+    
+    def url(self):
+        target, link_eid, r_type, searched_type = self.req.search_state[1]
+        return self.build_url(rql="Any X WHERE X eid %s" % link_eid,
+                              vid='edition', __mode='normal')
+
+
+class ViewAction(Action):
+    category = 'mainactions'    
+    __selectors__ = (in_group_selector, searchstate_accept_selector)
+    require_groups = ('users', 'managers')
+    order = 0
+    
+    id = 'view'
+    title = _('view')
+    
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        # interesting if it propose another view than the current one
+        vid = req.form.get('vid')
+        if vid and vid != vid_from_rset(req, rset, cls.schema):
+            return 1
+        return 0
+    
+    def url(self):
+        params = self.req.form.copy()
+        params.pop('vid', None)
+        params.pop('__message', None)
+        return self.build_url(self.req.relative_path(includeparams=False), **params)
+
+
+class ModifyAction(EntityAction):
+    category = 'mainactions'
+    __selectors__ = (onelinerset_selector, searchstate_accept_selector)
+    #__selectors__ = searchstate_accept_selector,
+    schema_action = 'update'
+    order = 10
+    
+    id = 'edit'
+    title = _('modify')
+    
+    @classmethod
+    def has_permission(cls, entity, action):
+        if entity.has_perm(action):
+            return True
+        # if user has no update right but it can modify some relation,
+        # display action anyway
+        for dummy in entity.srelations_by_category(('generic', 'metadata'), 'add'):
+            return True
+        for rschema, targetschemas, role in entity.relations_by_category(('primary', 'secondary'), 'add'):
+            if not rschema.is_final():
+                return True
+        return False
+
+    def url(self):
+        entity = self.rset.get_entity(self.row or 0, self.col or 0)
+        return entity.absolute_url(vid='edition')
+
+    
+class DeleteAction(EntityAction):
+    category = 'moreactions' 
+    __selectors__ = (searchstate_accept_selector,)
+    schema_action = 'delete'
+    order = 20
+    
+    id = 'delete'
+    title = _('delete')
+    
+    def url(self):
+        if len(self.rset) == 1:
+            entity = self.rset.get_entity(0, 0)
+            return self.build_url(entity.rest_path(), vid='deleteconf')
+        return self.build_url(rql=self.rset.printable_rql(), vid='deleteconf')
+    
+        
+class CopyAction(EntityAction):
+    category = 'moreactions'
+    schema_action = 'add'
+    order = 30
+    
+    id = 'copy'
+    title = _('copy')
+    
+    def url(self):
+        entity = self.rset.get_entity(self.row or 0, self.col or 0)
+        return entity.absolute_url(vid='copy')
+        
+
+class MultipleEditAction(EntityAction):
+    category = 'mainactions'
+    __selectors__ = (twolinerset_selector, oneetyperset_selector,
+                     searchstate_accept_selector)
+    schema_action = 'update'
+    order = 10
+    
+    id = 'muledit' # XXX get strange conflicts if id='edit'
+    title = _('modify')
+    
+    def url(self):
+        return self.build_url('view', rql=self.rset.rql, vid='muledit')
+
+
+class AddNewAction(MultipleEditAction):
+    """when we're seeing more than one entity with the same type, propose to
+    add a new one
+    """
+    category = 'moreactions'
+    
+    def etype_rset_selector(cls, req, rset, **kwargs):
+        if rset is not None and not rset.rowcount:
+            rqlst = rset.syntax_tree()
+            if len(rqlst.children) > 1:
+                return 0
+            select = rqlst.children[0]
+            if len(select.defined_vars) == 1 and len(select.solutions) == 1:
+                    rset._searched_etype = select.solutions[0].itervalues().next()
+                    eschema = cls.schema.eschema(rset._searched_etype)
+                    if not eschema.is_final() and eschema.has_perm(req, 'add'):
+                        return 1
+        return 0
+
+    def has_add_perm_selector(cls, req, rset, **kwargs):
+        eschema = cls.schema.eschema(rset.description[0][0])
+        if not eschema.is_final() and eschema.has_perm(req, 'add'):
+            return 1
+        return 0
+    __selectors__ = (searchstate_selector,
+                     chainfirst(etype_rset_selector,
+                                chainall(twolinerset_selector, oneetyperset_selector,
+                                         has_add_perm_selector)))
+    order = 40
+    id = 'addentity'
+
+    @property
+    def rsettype(self):
+        if self.rset:
+            return self.rset.description[0][0]
+        return self.rset._searched_etype
+
+    @property
+    def title(self):
+        return self.req.__('add a %s' % self.rsettype) # generated msgid
+
+    def url(self):
+        return self.build_url('add/%s' % self.rsettype)
+
+
+class FollowAction(EntityAction):
+    category = 'mainactions'
+    accepts = ('Bookmark',)
+    
+    id = 'follow'
+    title = _('follow')
+    
+    def url(self):
+        return self.rset.get_entity(self.row or 0, self.col or 0).actual_url()
+
+class UserPreferencesAction(Action):
+    category = 'useractions'
+    __selectors__ = not_anonymous_selector,
+    order = 10
+    
+    id = 'myprefs'
+    title = _('user preferences')
+
+    def url(self):
+        return self.build_url(self.id)
+
+class UserPreferencesEntityAction(EntityAction):
+    __selectors__ = EntityAction.__selectors__ + (onelinerset_selector, in_group_selector,)
+    require_groups = ('owners', 'managers')
+    category = 'mainactions'
+    accepts = ('EUser',)
+    
+    id = 'prefs'
+    title = _('preferences')
+    
+    def url(self):
+        login = self.rset.get_entity(self.row or 0, self.col or 0).login
+        return self.build_url('euser/%s'%login, vid='epropertiesform')
+
+class UserInfoAction(Action):
+    category = 'useractions'
+    __selectors__ = not_anonymous_selector,
+    order = 20
+    
+    id = 'myinfos'
+    title = _('personnal informations')
+
+    def url(self):
+        return self.build_url('euser/%s'%self.req.user.login, vid='edition')
+
+class LogoutAction(Action):
+    category = 'useractions'
+    __selectors__ = not_anonymous_selector,
+    order = 30
+    
+    id = 'logout'
+    title = _('logout')
+
+    def url(self):
+        return self.build_url(self.id)
+    
+
+class ManagersAction(Action):
+    category = 'siteactions'
+    __abstract__ = True
+    __selectors__ = in_group_selector,
+    require_groups = ('managers',)
+
+    def url(self):
+        return self.build_url(self.id)
+    
+class SiteConfigurationAction(ManagersAction):
+    order = 10
+    
+    id = 'siteconfig'
+    title = _('site configuration')
+    
+class ManageAction(ManagersAction):
+    order = 20
+    
+    id = 'manage'
+    title = _('manage')
+
+
+class ViewSchemaAction(Action):
+    category = 'siteactions'
+    __selectors__ = yes_selector,
+    order = 30
+    
+    id = 'schema'
+    title = _("site schema")
+
+    def url(self):
+        return self.build_url(self.id)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/ajaxedit.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,70 @@
+"""Set of views allowing edition of entities/relations using ajax
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.selectors import (chainfirst, req_form_params_selector,
+                                    kwargs_selector)
+from cubicweb.web.box import EditRelationBoxTemplate
+
+class AddRelationView(EditRelationBoxTemplate):
+    """base class for view which let add entities linked
+    by a given relation
+
+    subclasses should define at least id, rtype and target
+    class attributes.
+    """
+    __registry__ = 'views'
+    __selectors__ = (chainfirst(req_form_params_selector, kwargs_selector),)
+    property_defs = {} # don't want to inherit this from Box
+    id = 'xaddrelation'
+    expected_kwargs = form_params = ('rtype', 'target')
+
+    build_js = EditRelationBoxTemplate.build_reload_js_call
+    
+    def cell_call(self, row, col, rtype=None, target=None, etype=None):
+        self.rtype = rtype or self.req.form['rtype']
+        self.target = target or self.req.form['target']
+        self.etype = etype or self.req.form.get('etype')
+        entity = self.entity(row, col)
+        rschema = self.schema.rschema(self.rtype)
+        if not self.etype:
+            if self.target == 'object':
+                etypes = rschema.objects(entity.e_schema)
+            else:
+                etypes = rschema.subjects(entity.e_schema)
+            if len(etypes) == 1:
+                self.etype = etypes[0]
+        fakebox = []
+        self.w(u'<div id="%s">' % self.id)
+        self.w(u'<h1>%s</h1>' % self.req._('relation %(relname)s of %(ent)s')
+               % {'relname': rschema.display_name(self.req, self.xtarget()[0]),
+                  'ent': entity.view('incontext')})
+        self.w(u'<ul>')
+        self.w_unrelated(fakebox, entity)
+        for boxitem in fakebox:
+            boxitem.render(self.w)
+        self.w(u'</ul></div>')
+
+    def unrelated_entities(self, entity):
+        """returns the list of unrelated entities
+
+        if etype is not defined on the Box's class, the default
+        behaviour is to use the entity's appropraite vocabulary function
+        """
+        x, target = self.xtarget()
+        # use entity.unrelated if we've been asked for a particular etype
+        if getattr(self, 'etype', None):
+            rset = entity.unrelated(self.rtype, self.etype, x, ordermethod='fetch_order')
+            self.pagination(self.req, rset, w=self.w)
+            return rset.entities()
+        # in other cases, use vocabulary functions
+        entities = []
+        for _, eid in entity.vocabulary(self.rtype, x):
+            if eid is not None:
+                rset = self.req.eid_rset(eid)
+                entities.append(rset.get_entity(0, 0))
+        return entities
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/apacherewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,94 @@
+"""provide class to do Apache rewrite rules'job inside cubicweb (though functionnalities
+are much more limited for the moment)
+
+:organization: Logilab
+:copyright: 2007-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from re import compile
+
+from cubicweb.web import Redirect
+from cubicweb.web.component import SingletonComponent
+
+class RewriteCond(object):
+    def __init__(self, condition, match='host', rules=(), action='rewrite'):
+        self.condition = compile(condition)
+        assert match in ('host', 'path'), match
+        self.match_part = match
+        self.rules = []
+        for rule, replace in rules:
+            rulergx = compile(rule)
+            self.rules.append( (rulergx, replace) )
+        assert action in ('rewrite', 'redirect', 'stop'), action
+        self.process = getattr(self, 'action_%s' % action)
+
+    def match(self, **kwargs):
+        self._match = self.condition.match(kwargs[self.match_part])
+        return not self._match is None
+    
+    def action_rewrite(self, path):
+        for rgx, replace in self.rules:
+            if not rgx.match(path) is None:
+                matchdict = self._match.groupdict() or None
+                if not matchdict is None:
+                    replace = replace % matchdict
+                return rgx.sub(replace, path)
+        return path
+
+    def action_redirect(self, path):
+        url = self.action_rewrite(path)
+        raise Redirect(url)
+
+    def action_stop(self, path):
+        return path
+
+    
+class ApacheURLRewrite(SingletonComponent):
+    """inherit from this class with actual rules to activate apache style rewriting
+
+    rules should have the form :
+
+    [('condition pattern 1', [('rule1 pattern', 'replace expression'),
+                              ('rule2 pattern', 'replace expression')],
+     ('condition pattern 2', [('rule1 pattern', 'replace expression'),
+                              ('rule2 pattern', 'replace expression')]
+    ]
+
+    for instance the equivalent of the following apache rules:
+
+        RewriteCond %{HTTP_HOST} ^logilab\.fr
+        RewriteRule ^/(.*) http://www.logilab.fr/$1 [L,R=301]
+
+        RewriteCond %{HTTP_HOST} ^www\.logilab\.fr
+        RewriteRule ^/(.*) http://localhost:8080/$1 [L,P]
+
+        RewriteCond %{HTTP_HOST} ^(.+)\.logilab\.fr
+        RewriteRule ^/(data/.*) http://localhost:8080/$1 [L,P]
+        RewriteRule ^/(json.*) http://localhost:8080/$1 [L,P]
+        RewriteRule ^/(.*) http://localhost:8080/m_%1/$1 [L,P]
+    
+    could be written (considering that no "host rewritting" is necessary):
+
+      class MyAppRules(ApacheURLRewrite): 
+        rules = [
+          RewriteCond('logilab\.fr', match='host',
+                      rules=[('/(.*)', r'http://www.logilab.fr/\1')],
+                      action='redirect'),
+          RewriteCond('(www)\.logilab\.fr', match='host', action='stop'),
+          RewriteCond('/(data|json)/', match='path', action='stop'),
+          RewriteCond('(?P<cat>.*)\.logilab\.fr', match='host', 
+                      rules=[('/(.*)', r'/m_%(cat)s/\1')]),
+        ]
+    """
+    __abstract__ = True
+    id = 'urlrewriter'
+    rules = []
+        
+    def rewrite(self, host, path):
+        for cond in self.rules:
+            if cond.match(host=host, path=path):
+                return cond.process(path)
+        return path
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/authentication.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,103 @@
+"""user authentication component
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import clear_cache
+
+from cubicweb import AuthenticationError, BadConnectionId
+from cubicweb.dbapi import repo_connect, ConnectionProperties
+from cubicweb.web import ExplicitLogin, InvalidSession
+from cubicweb.web.application import AbstractAuthenticationManager
+    
+
+class RepositoryAuthenticationManager(AbstractAuthenticationManager):
+    """authenticate user associated to a request and check session validity"""
+    
+    def __init__(self):
+        self.repo = self.config.repository(self.vreg)
+        self.log_queries = self.config['query-log-file']
+
+    def validate_session(self, req, session):
+        """check session validity, and return eventually hijacked session
+
+        :raise InvalidSession:
+          if session is corrupted for a reason or another and should be closed
+        """
+        # with this authentication manager, session is actually a dbapi
+        # connection
+        cnx = session
+        login = req.get_authorization()[0]
+        try:
+            # calling cnx.user() check connection validity, raise
+            # BadConnectionId on failure
+            user = cnx.user(req)
+            if login and user.login != login:
+                cnx.close()
+                raise InvalidSession('login mismatch')
+        except BadConnectionId:
+            # check if a connection should be automatically restablished
+            if (login is None or login == cnx.login):
+                login, password = cnx.login, cnx.password
+                cnx = self.authenticate(req, login, password)
+                user = cnx.user(req)
+                # backport session's data
+                cnx.data = session.data
+            else:
+                raise InvalidSession('bad connection id')
+        # associate the connection to the current request
+        req.set_connection(cnx, user)
+        return cnx
+        
+    def authenticate(self, req, _login=None, _password=None):
+        """authenticate user and return corresponding user object
+        
+        :raise ExplicitLogin: if authentication is required (no authentication
+        info found or wrong user/password)
+
+        Note: this method is violating AuthenticationManager interface by
+        returning a session instance instead of the user. This is expected by
+        the InMemoryRepositorySessionManager.
+        """
+        if _login is not None:
+            login, password = _login, _password
+        else:
+            login, password = req.get_authorization()
+        if not login:
+            # No session and no login -> try anonymous
+            login, password = self.vreg.config.anonymous_user()
+            if not login: # anonymous not authorized
+                raise ExplicitLogin()
+        # remove possibly cached cursor coming from closed connection
+        clear_cache(req, 'cursor')
+        cnxprops = ConnectionProperties(self.vreg.config.repo_method,
+                                        close=False, log=self.log_queries)
+        try:
+            cnx = repo_connect(self.repo, login, password, cnxprops=cnxprops)
+        except AuthenticationError:
+            req.set_message(req._('authentication failure'))
+            # restore an anonymous connection if possible
+            anonlogin, anonpassword = self.vreg.config.anonymous_user()
+            if anonlogin and anonlogin != login:
+                cnx = repo_connect(self.repo, anonlogin, anonpassword,
+                                   cnxprops=cnxprops)
+                self._init_cnx(cnx, anonlogin, anonpassword)
+            else:
+                raise ExplicitLogin()
+        else:
+            self._init_cnx(cnx, login, password)
+        # associate the connection to the current request
+        req.set_connection(cnx)
+        return cnx
+
+    def _init_cnx(self, cnx, login, password):
+        # decorate connection
+        if login == self.vreg.config.anonymous_user()[0]:
+            cnx.anonymous_connection = True
+        cnx.vreg = self.vreg
+        cnx.login = login
+        cnx.password = password
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/basecomponents.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,236 @@
+"""Bases HTML components:
+
+* the rql input form
+* the logged user link
+* the workflow history section for workflowable objects
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from rql import parse
+
+from cubicweb import Unauthorized
+from cubicweb.common.uilib import html_escape, toggle_action
+from cubicweb.common.selectors import yes_selector
+from cubicweb.schema import display_name
+from cubicweb.common.selectors import (chainfirst, multitype_selector,
+                                    req_form_params_selector)
+
+from cubicweb.web.htmlwidgets import MenuWidget, PopupBoxMenu, BoxSeparator, BoxLink
+from cubicweb.web.component import (SingletonVComponent, EntityVComponent, 
+                                 RelatedObjectsVComponent)
+
+_ = unicode
+
+
+class RQLInputForm(SingletonVComponent):
+    """build the rql input form, usually displayed in the header"""
+    id = 'rqlinput'
+    visible = False
+       
+    def call(self, view=None):
+        if hasattr(view, 'filter_box_context_info'):
+            rset = view.filter_box_context_info()[0]
+        else:
+            rset = self.rset
+        # display multilines query as one line
+        rql = rset is not None and rset.printable_rql(encoded=False) or self.req.form.get('rql', '')
+        rql = rql.replace(u"\n", u" ")
+        req = self.req
+        self.w(u'''<div id="rqlinput" class="%s">
+          <form action="%s">
+<fieldset>
+<input type="text" id="rql" name="rql" value="%s"  title="%s" tabindex="%s" accesskey="q" class="searchField" />
+<input type="submit" value="%s" class="searchButton" tabindex="%s" />
+</fieldset>
+''' % (not self.propval('visible') and 'hidden' or '', 
+       self.build_url('view'), html_escape(rql), req._('full text or RQL query'), req.next_tabindex(),
+       req._('search'), req.next_tabindex()))
+        if self.req.search_state[0] != 'normal':
+            self.w(u'<input type="hidden" name="__mode" value="%s"/>'
+                   % ':'.join(req.search_state[1]))
+        self.w(u'</form></div>')
+
+
+class ApplLogo(SingletonVComponent):
+    """build the application logo, usually displayed in the header"""
+    id = 'logo'
+    site_wide = True # don't want user to hide this component using an eproperty
+    def call(self):
+        self.w(u'<a href="%s"><img class="logo" src="%s" alt="logo"/></a>'
+               % (self.req.base_url(), self.req.external_resource('LOGO')))
+
+
+class ApplHelp(SingletonVComponent):
+    """build the help button, usually displayed in the header"""
+    id = 'help'
+    def call(self):
+        self.w(u'<a href="%s" class="help" title="%s">&nbsp;</a>'
+               % (self.build_url(_restpath='doc/main'),
+                  self.req._(u'help'),))
+
+
+class UserLink(SingletonVComponent):
+    """if the user is the anonymous user, build a link to login
+    else a link to the connected user object with a loggout link
+    """
+    id = 'loggeduserlink'
+    site_wide = True # don't want user to hide this component using an eproperty
+
+    def call(self):
+        if not self.req.cnx.anonymous_connection:
+            # display useractions and siteactions
+            actions = self.vreg.possible_actions(self.req, self.rset)
+            box = MenuWidget('', 'userActionsBox', _class='', islist=False)
+            menu = PopupBoxMenu(self.req.user.login, isitem=False)
+            box.append(menu)
+            for action in actions.get('useractions', ()):
+                menu.append(BoxLink(action.url(), self.req._(action.title),
+                                    action.html_class()))
+            if actions.get('useractions') and actions.get('siteactions'):
+                menu.append(BoxSeparator())
+            for action in actions.get('siteactions', ()):
+                menu.append(BoxLink(action.url(), self.req._(action.title),
+                                    action.html_class()))
+            box.render(w=self.w)
+        else:
+            self.anon_user_link()
+            
+    def anon_user_link(self):
+        if self.config['auth-mode'] == 'cookie':
+            self.w(self.req._('anonymous'))
+            self.w(u'''&nbsp;[<a class="logout" href="javascript:toggleVisibility('popupLoginBox'); document.login_form.__login.focus() ">%s</a>]'''
+                   % (self.req._('i18n_login_popup')))
+        else:
+            self.w(self.req._('anonymous'))
+            self.w(u'&nbsp;[<a class="logout" href="%s">%s</a>]'
+                   % (self.build_url('login'), self.req._('login')))
+
+
+class ApplicationMessage(SingletonVComponent):
+    """display application's messages given using the __message parameter
+    into a special div section
+    """
+    __selectors__ = yes_selector,
+    id = 'applmessages'
+    site_wide = True # don't want user to hide this component using an eproperty
+
+    def call(self):
+        msgs = [msg for msg in (self.req.get_shared_data('sources_error', pop=True),
+                                self.req.message) if msg]
+        self.w(u'<div id="appMsg" onclick="%s" class="%s">\n' %
+               (toggle_action('appMsg'), (msgs and ' ' or 'hidden')))
+        for msg in msgs:
+            self.w(u'<div class="message" id="%s">%s</div>' % (
+                self.div_id(), msg))
+        self.w(u'</div>')
+
+
+class WFHistoryVComponent(EntityVComponent):
+    """display the workflow history for entities supporting it"""
+    id = 'wfhistory'
+    accepts = ('Any',)
+    context = 'navcontentbottom'
+    rtype = 'wf_info_for'
+    target = 'subject'
+    title = _('Workflow history')
+
+    def call(self, view=None):
+        _ = self.req._
+        eid = self.rset[0][0]
+        sel = 'Any FS,TS,WF,D'
+        rql = ' ORDERBY D DESC WHERE WF wf_info_for X,'\
+              'WF from_state FS, WF to_state TS, WF comment C,'\
+              'WF creation_date D'
+        if self.vreg.schema.eschema('EUser').has_perm(self.req, 'read'):
+            sel += ',U,C'
+            rql += ', WF owned_by U?'
+            displaycols = range(5)
+            headers = (_('from_state'), _('to_state'), _('comment'), _('date'),
+                       _('EUser'))            
+        else:
+            sel += ',C'
+            displaycols = range(4)
+            headers = (_('from_state'), _('to_state'), _('comment'), _('date'))
+        rql = '%s %s, X eid %%(x)s' % (sel, rql)
+        try:
+            rset = self.req.execute(rql, {'x': eid}, 'x')
+        except Unauthorized:
+            return
+        if rset:
+            self.wview('table', rset, title=_(self.title), displayactions=False,
+                       displaycols=displaycols, headers=headers)
+
+
+class ApplicationName(SingletonVComponent):
+    """display the application name"""
+    id = 'appliname'
+
+    def call(self):
+        self.w(u'<span id="appliName"><a href="%s">%s</a></span>' % (self.req.base_url(),
+                                                         self.req.property_value('ui.site-title')))
+        
+
+class SeeAlsoVComponent(RelatedObjectsVComponent):
+    """display any entity's see also"""
+    id = 'seealso'
+    context = 'navcontentbottom'
+    rtype = 'see_also'
+    target = 'object'
+    order = 40
+    # register msg not generated since no entity use see_also in cubicweb itself
+    title = _('contentnavigation_seealso')
+    help = _('contentnavigation_seealso_description')
+
+    
+class EtypeRestrictionComponent(SingletonVComponent):
+    """displays the list of entity types contained in the resultset
+    to be able to filter accordingly.
+    """
+    id = 'etypenavigation'
+    __select__ = classmethod(chainfirst(multitype_selector, req_form_params_selector))
+    form_params = ('__restrtype', '__restrtypes', '__restrrql')
+    visible = False # disabled by default
+    
+    def call(self):
+        _ = self.req._
+        self.w(u'<div id="etyperestriction">')
+        restrtype = self.req.form.get('__restrtype')
+        restrtypes = self.req.form.get('__restrtypes', '').split(',')
+        restrrql = self.req.form.get('__restrrql')
+        if not restrrql:
+            rqlst = self.rset.syntax_tree()
+            restrrql = rqlst.as_string(self.req.encoding, self.rset.args)
+            restrtypes = self.rset.column_types(0)
+        else:
+            rqlst = parse(restrrql)
+        html = []
+        on_etype = False
+        etypes = sorted((display_name(self.req, etype).capitalize(), etype)
+                        for etype in restrtypes)
+        for elabel, etype in etypes:
+            if etype == restrtype:
+                html.append(u'<span class="selected">%s</span>' % elabel)
+                on_etype = True
+            else:
+                rqlst.save_state()
+                for select in rqlst.children:
+                    select.add_type_restriction(select.selection[0], etype)
+                newrql = rqlst.as_string(self.req.encoding, self.rset.args)
+                url = self.build_url(rql=newrql, __restrrql=restrrql,
+                                     __restrtype=etype, __restrtypes=','.join(restrtypes))
+                html.append(u'<span><a href="%s">%s</a></span>' % (
+                        html_escape(url), elabel))
+                rqlst.recover()
+        if on_etype:
+            url = self.build_url(rql=restrrql)
+            html.insert(0, u'<span><a href="%s">%s</a></span>' % (
+                    url, _('Any')))
+        else:
+            html.insert(0, u'<span class="selected">%s</span>' % _('Any'))
+        self.w(u'&nbsp;|&nbsp;'.join(html))
+        self.w(u'</div>')
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/basecontrollers.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,515 @@
+# -*- coding: utf-8 -*-
+"""Set of base controllers, which are directly plugged into the application
+object to handle publication.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from smtplib import SMTP
+
+import simplejson
+
+from mx.DateTime.Parser import DateFromString
+
+from logilab.common.decorators import cached
+
+from cubicweb import NoSelectableObject, ValidationError, typed_eid
+from cubicweb.common.selectors import yes_selector
+from cubicweb.common.mail import format_mail
+from cubicweb.common.view import STRICT_DOCTYPE, CW_XHTML_EXTENSIONS
+
+from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed
+from cubicweb.web.controller import Controller
+from cubicweb.web.views import vid_from_rset
+try:
+    from cubicweb.web.facet import (FilterRQLBuilder, get_facet,
+                                 prepare_facets_rqlst)
+    HAS_SEARCH_RESTRICTION = True
+except ImportError: # gae
+    HAS_SEARCH_RESTRICTION = False
+    
+    
+class LoginController(Controller):
+    id = 'login'
+
+    def publish(self, rset=None):
+        """log in the application"""
+        if self.config['auth-mode'] == 'http':
+            # HTTP authentication
+            raise ExplicitLogin()
+        else:
+            # Cookie authentication
+            return self.appli.need_login_content(self.req)
+
+    
+class LogoutController(Controller):
+    id = 'logout'
+    
+    def publish(self, rset=None):
+        """logout from the application"""
+        return self.appli.session_handler.logout(self.req)
+
+
+class ViewController(Controller):
+    id = 'view'
+    template = 'main'
+    
+    def publish(self, rset=None):
+        """publish a request, returning an encoded string"""
+        self.req.update_search_state()
+        template = self.req.property_value('ui.main-template')
+        if template not in self.vreg.registry('templates') :
+            template = self.template
+        return self.vreg.main_template(self.req, template, rset=rset)
+
+    def execute_linkto(self, eid=None):
+        """XXX __linkto parameter may cause security issue
+
+        defined here since custom application controller inheriting from this
+        one use this method?
+        """
+        req = self.req
+        if not '__linkto' in req.form:
+            return
+        if eid is None:
+            eid = typed_eid(req.form['eid'])
+        for linkto in req.list_form_param('__linkto', pop=True):
+            rtype, eids, target = linkto.split(':')
+            assert target in ('subject', 'object')
+            eids = eids.split('_')
+            if target == 'subject':
+                rql = 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype
+            else:
+                rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype
+            for teid in eids:
+                req.execute(rql, {'x': eid, 'y': typed_eid(teid)}, ('x', 'y')) 
+
+
+class FormValidatorController(Controller):
+    id = 'validateform'
+
+    def publish(self, rset=None):
+        vreg = self.vreg
+        try:
+            ctrl = vreg.select(vreg.registry_objects('controllers', 'edit'),
+                               req=self.req, appli=self.appli)
+        except NoSelectableObject:
+            status, args = (False, {None: self.req._('not authorized')})
+        else:
+            try:
+                ctrl.publish(None, fromjson=True)
+            except ValidationError, err:
+                status, args = self.validation_error(err)
+            except Redirect, err:
+                try:
+                    self.req.cnx.commit() # ValidationError may be raise on commit
+                except ValidationError, err:
+                    status, args = self.validation_error(err)
+                else:
+                    status, args = (True, err.location)
+            except Exception, err:
+                self.req.cnx.rollback()
+                self.exception('unexpected error in validateform')
+                try:
+                    status, args = (False, self.req._(unicode(err)))
+                except UnicodeError:
+                    status, args = (False, repr(err))
+            else:
+                status, args = (False, '???')
+        self.req.set_content_type('text/html')
+        jsarg = simplejson.dumps( (status, args) )
+        return """<script type="text/javascript">
+ window.parent.handleFormValidationResponse('entityForm', null, %s);
+</script>""" %  simplejson.dumps( (status, args) )
+
+    def validation_error(self, err):
+        self.req.cnx.rollback()
+        try:
+            eid = err.entity.eid
+        except AttributeError:
+            eid = err.entity
+        return (False, (eid, err.errors))
+        
+def xmlize(source):
+    head = u'<?xml version="1.0"?>\n' + STRICT_DOCTYPE % CW_XHTML_EXTENSIONS
+    return head + u'<div xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb">%s</div>' % source.strip()
+
+def jsonize(func):
+    """sets correct content_type and calls `simplejson.dumps` on results
+    """
+    def wrapper(self, *args, **kwargs):
+        self.req.set_content_type('application/json')
+        result = func(self, *args, **kwargs)
+        return simplejson.dumps(result)
+    return wrapper
+
+
+def check_pageid(func):
+    """decorator which checks the given pageid is found in the
+    user's session data
+    """
+    def wrapper(self, *args, **kwargs):
+        data = self.req.get_session_data(self.req.pageid)
+        if data is None:
+            raise RemoteCallFailed(self.req._('pageid-not-found'))
+        return func(self, *args, **kwargs)
+    return wrapper
+    
+
+class JSonController(Controller):
+    id = 'json'
+    template = 'main'
+
+    def publish(self, rset=None):
+        mode = self.req.form.get('mode', 'html')
+        self.req.pageid = self.req.form.get('pageid')
+        try:
+            func = getattr(self, '%s_exec' % mode)
+        except AttributeError, ex:
+            self.error('json controller got an unknown mode %r', mode)
+            self.error('\t%s', ex)
+            result = u''
+        else:
+            try:
+                result = func(rset)
+            except RemoteCallFailed:
+                raise
+            except Exception, ex:
+                self.exception('an exception occured on json request(rset=%s): %s',
+                               rset, ex)
+                raise RemoteCallFailed(repr(ex))
+        return result.encode(self.req.encoding)
+
+    def _exec(self, rql, args=None, eidkey=None, rocheck=True):
+        """json mode: execute RQL and return resultset as json"""
+        if rocheck:
+            self.ensure_ro_rql(rql)
+        try:
+            return self.req.execute(rql, args, eidkey)
+        except Exception, ex:
+            self.exception("error in _exec(rql=%s): %s", rql, ex)
+            return None
+        return None
+
+    @jsonize
+    def json_exec(self, rset=None):
+        """json mode: execute RQL and return resultset as json"""
+        rql = self.req.form.get('rql')
+        if rset is None and rql:
+            rset = self._exec(rql)
+        return rset and rset.rows or []
+
+    def _set_content_type(self, vobj, data):
+        """sets req's content type according to vobj's content type
+        (and xmlize data if needed)
+        """
+        content_type = vobj.content_type
+        if content_type == 'application/xhtml+xml':
+            self.req.set_content_type(content_type)
+            return xmlize(data)
+        return data
+
+    def html_exec(self, rset=None):
+        """html mode: execute query and return the view as HTML"""
+        req = self.req
+        rql = req.form.get('rql')
+        if rset is None and rql:
+            rset = self._exec(rql)
+            
+        vid = req.form.get('vid') or vid_from_rset(req, rset, self.schema)
+        try:
+            view = self.vreg.select_view(vid, req, rset)
+        except NoSelectableObject:
+            vid = req.form.get('fallbackvid', 'noresult')
+            view = self.vreg.select_view(vid, req, rset)
+        divid = req.form.get('divid', 'pageContent')
+        # we need to call pagination before with the stream set
+        stream = view.set_stream()
+        if req.form.get('paginate'):
+            if divid == 'pageContent':
+                # mimick main template behaviour
+                stream.write(u'<div id="pageContent">')
+                vtitle = self.req.form.get('vtitle')
+                if vtitle:
+                    w(u'<h1 class="vtitle">%s</h1>\n' % vtitle)
+            view.pagination(req, rset, view.w, not view.need_navigation)
+            if divid == 'pageContent':
+                stream.write(u'<div id="contentmain">')
+        view.dispatch()
+        if req.form.get('paginate') and divid == 'pageContent':
+            stream.write(u'</div></div>')
+        source = stream.getvalue()
+        return self._set_content_type(view, source)
+
+    def rawremote_exec(self, rset=None):
+        """like remote_exec but doesn't change content type"""
+        # no <arg> attribute means the callback takes no argument
+        args = self.req.form.get('arg', ())
+        if not isinstance(args, (list, tuple)):
+            args = (args,)
+        fname = self.req.form['fname']
+        args = [simplejson.loads(arg) for arg in args]
+        try:
+            func = getattr(self, 'js_%s' % fname)
+        except AttributeError:
+            self.exception('rawremote_exec fname=%s', fname)
+            return u""
+        return func(*args)
+
+    remote_exec = jsonize(rawremote_exec)
+        
+    def _rebuild_posted_form(self, names, values, action=None):
+        form = {}
+        for name, value in zip(names, values):
+            # remove possible __action_xxx inputs
+            if name.startswith('__action'):
+                continue
+            # form.setdefault(name, []).append(value)
+            if name in form:
+                curvalue = form[name]
+                if isinstance(curvalue, list):
+                    curvalue.append(value)
+                else:
+                    form[name] = [curvalue, value]
+            else:
+                form[name] = value
+        # simulate click on __action_%s button to help the controller
+        if action:
+            form['__action_%s' % action] = u'whatever'
+        return form
+    
+    def js_validate_form(self, action, names, values):
+        # XXX this method (and correspoding js calls) should use the new
+        #     `RemoteCallFailed` mechansim
+        self.req.form = self._rebuild_posted_form(names, values, action)
+        vreg = self.vreg
+        try:
+            ctrl = vreg.select(vreg.registry_objects('controllers', 'edit'),
+                               req=self.req)
+        except NoSelectableObject:
+            return (False, {None: self.req._('not authorized')})
+        try:
+            ctrl.publish(None, fromjson=True)
+        except ValidationError, err:
+            self.req.cnx.rollback()
+            if not err.entity or isinstance(err.entity, (long, int)):
+                eid = err.entity
+            else:
+                eid = err.entity.eid
+            return (False, (eid, err.errors))
+        except Redirect, err:
+            return (True, err.location)
+        except Exception, err:
+            self.req.cnx.rollback()
+            self.exception('unexpected error in js_validateform')
+            return (False, self.req._(str(err)))
+        return (False, '???')
+
+    def js_edit_field(self, action, names, values, rtype, eid):
+        success, args = self.js_validate_form(action, names, values)
+        if success:
+            rset = self.req.execute('Any X,N WHERE X eid %%(x)s, X %s N' % rtype,
+                                    {'x': eid}, 'x')
+            entity = rset.get_entity(0, 0)
+            return (success, args, entity.printable_value(rtype))
+        else:
+            return (success, args, None)
+            
+    def js_rql(self, rql):
+        rset = self._exec(rql)
+        return rset and rset.rows or []
+    
+    def js_i18n(self, msgids):
+        """returns the translation of `msgid`"""
+        return [self.req._(msgid) for msgid in msgids]
+
+    def js_format_date(self, strdate):
+        """returns the formatted date for `msgid`"""
+        date = DateFromString(strdate)
+        return self.format_date(date)
+
+    def js_external_resource(self, resource):
+        """returns the URL of the external resource named `resource`"""
+        return self.req.external_resource(resource)
+
+    def js_prop_widget(self, propkey, varname, tabindex=None):
+        """specific method for EProperty handling"""
+        w = self.vreg.property_value_widget(propkey, req=self.req)
+        entity = self.vreg.etype_class('EProperty')(self.req, None, None)
+        entity.eid = varname
+        self.req.form['value'] = self.vreg.property_info(propkey)['default']
+        return w.edit_render(entity, tabindex, includehelp=True)
+
+    def js_component(self, compid, rql, registry='components', extraargs=None):
+        if rql:
+            rset = self._exec(rql)
+        else:
+            rset = None
+        comp = self.vreg.select_object(registry, compid, self.req, rset)
+        if extraargs is None:
+            extraargs = {}
+        else: # we receive unicode keys which is not supported by the **syntax
+            extraargs = dict((str(key), value)
+                             for key, value in extraargs.items())
+        extraargs = extraargs or {}
+        print 'extraargs =', extraargs
+        return self._set_content_type(comp, comp.dispatch(**extraargs))
+
+    @check_pageid
+    def js_user_callback(self, cbname):
+        page_data = self.req.get_session_data(self.req.pageid, {})
+        try:
+            cb = page_data[cbname]
+        except KeyError:
+            return None
+        return cb(self.req)
+    
+    def js_unregister_user_callback(self, cbname):
+        self.req.unregister_callback(self.req.pageid, cbname)
+
+    def js_unload_page_data(self):
+        self.req.del_session_data(self.req.pageid)
+        
+    def js_cancel_edition(self, errorurl):
+        """cancelling edition from javascript
+
+        We need to clear associated req's data :
+          - errorurl
+          - pending insertions / deletions
+        """
+        self.req.cancel_edition(errorurl)
+    
+    @check_pageid
+    def js_inline_creation_form(self, peid, ptype, ttype, rtype, role):
+        view = self.vreg.select_view('inline-creation', self.req, None,
+                                     etype=ttype, ptype=ptype, peid=peid,
+                                     rtype=rtype, role=role)
+        source = view.dispatch(etype=ttype, ptype=ptype, peid=peid, rtype=rtype,
+                               role=role)
+        return self._set_content_type(view, source)
+
+    def js_remove_pending_insert(self, (eidfrom, rel, eidto)):
+        self._remove_pending(eidfrom, rel, eidto, 'insert')
+        
+    def js_add_pending_insert(self, (eidfrom, rel, eidto)):
+        self._add_pending(eidfrom, rel, eidto, 'insert')
+        
+    def js_add_pending_inserts(self, tripletlist):
+        for eidfrom, rel, eidto in tripletlist:
+            self._add_pending(eidfrom, rel, eidto, 'insert')
+        
+    def js_remove_pending_delete(self, (eidfrom, rel, eidto)):
+        self._remove_pending(eidfrom, rel, eidto, 'delete')
+    
+    def js_add_pending_delete(self, (eidfrom, rel, eidto)):
+        self._add_pending(eidfrom, rel, eidto, 'delete')
+
+    if HAS_SEARCH_RESTRICTION:
+        def js_filter_build_rql(self, names, values):
+            form = self._rebuild_posted_form(names, values)
+            self.req.form = form
+            builder = FilterRQLBuilder(self.req)
+            return builder.build_rql()
+
+        def js_filter_select_content(self, facetids, rql):
+            rqlst = self.vreg.parse(self.req, rql) # XXX Union unsupported yet
+            mainvar = prepare_facets_rqlst(rqlst)[0]
+            update_map = {}
+            for facetid in facetids:
+                facet = get_facet(self.req, facetid, rqlst.children[0], mainvar)
+                update_map[facetid] = facet.possible_values()
+            return update_map
+
+    def js_delete_bookmark(self, beid):
+        try:
+            rql = 'DELETE B bookmarked_by U WHERE B eid %(b)s, U eid %(u)s'
+            self.req.execute(rql, {'b': typed_eid(beid), 'u' : self.req.user.eid})
+        except Exception, ex:
+            self.exception(unicode(ex))
+            return self.req._('Problem occured')
+
+    def _add_pending(self, eidfrom, rel, eidto, kind):
+        key = 'pending_%s' % kind
+        pendings = self.req.get_session_data(key, set())
+        pendings.add( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
+        self.req.set_session_data(key, pendings)
+
+    def _remove_pending(self, eidfrom, rel, eidto, kind):
+        key = 'pending_%s' % kind        
+        try:
+            pendings = self.req.get_session_data(key)
+            pendings.remove( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
+        except:
+            self.exception('while removing pending eids')
+        else:
+            self.req.set_session_data(key, pendings)
+
+    def js_add_and_link_new_entity(self, etype_to, rel, eid_to, etype_from, value_from):
+        # create a new entity
+        eid_from = self.req.execute('INSERT %s T : T name "%s"' % ( etype_from, value_from ))[0][0]
+        # link the new entity to the main entity
+        rql = 'SET F %(rel)s T WHERE F eid %(eid_to)s, T eid %(eid_from)s' % {'rel' : rel, 'eid_to' : eid_to, 'eid_from' : eid_from}
+        return eid_from
+    
+class SendMailController(Controller):
+    id = 'sendmail'
+    require_groups = ('managers', 'users')
+
+    def recipients(self):
+        """returns an iterator on email's recipients as entities"""
+        eids = self.req.form['recipient']
+        # make sure we have a list even though only one recipient was specified
+        if isinstance(eids, basestring):
+            eids = (eids,)
+        rql = 'Any X WHERE X eid in (%s)' % (','.join(eids))
+        rset = self.req.execute(rql)
+        for entity in rset.entities():
+            entity.complete() # XXX really?
+            yield entity
+
+    @property
+    @cached
+    def smtp(self):
+        mailhost, port = self.config['smtp-host'], self.config['smtp-port']
+        try:
+            return SMTP(mailhost, port)
+        except Exception, ex:
+            self.exception("can't connect to smtp server %s:%s (%s)",
+                             mailhost, port, ex)
+            url = self.build_url(__message=self.req._('could not connect to the SMTP server'))
+            raise Redirect(url)
+
+    def sendmail(self, recipient, subject, body):
+        helo_addr = '%s <%s>' % (self.config['sender-name'],
+                                 self.config['sender-addr'])
+        msg = format_mail({'email' : self.req.user.get_email(),
+                           'name' : self.req.user.dc_title(),},
+                          [recipient], body, subject)
+        self.smtp.sendmail(helo_addr, [recipient], msg.as_string())    
+
+    def publish(self, rset=None):
+        # XXX this allow anybody with access to an cubicweb application to use it as a mail relay
+        body = self.req.form['mailbody']
+        subject = self.req.form['mailsubject']
+        for recipient in self.recipients():
+            text = body % recipient.as_email_context()
+            self.sendmail(recipient.get_email(), subject, text)
+        # breadcrumbs = self.req.get_session_data('breadcrumbs', None)
+        url = self.build_url(__message=self.req._('emails successfully sent'))
+        raise Redirect(url)
+
+
+class MailBugReportController(SendMailController):
+    id = 'reportbug'
+    __selectors__ = (yes_selector,)
+
+    def publish(self, rset=None):
+        body = self.req.form['description']
+        self.sendmail(self.config['submit-mail'], _('%s error report') % self.config.appid, body)
+        url = self.build_url(__message=self.req._('bug report sent'))
+        raise Redirect(url)
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/baseforms.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,989 @@
+"""Set of HTML automatic forms to create, delete, copy or edit a single entity
+or a list of entities of the same type
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from copy import copy
+
+from simplejson import dumps
+
+from logilab.mtconverter import html_escape
+from logilab.common.decorators import cached
+
+from cubicweb.interfaces import IWorkflowable
+from cubicweb.common.utils import make_uid
+from cubicweb.common.uilib import cut
+from cubicweb.common.selectors import (etype_form_selector, kwargs_selector,
+                                    onelinerset_selector, interface_selector,
+                                    req_form_params_selector, accept_selector)
+from cubicweb.common.view import EntityView
+from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs, eid_param
+from cubicweb.web.controller import NAV_FORM_PARAMETERS
+from cubicweb.web.widgets import checkbox, InputWidget, ComboBoxWidget
+from cubicweb.web.form import EntityForm, relation_id
+
+_ = unicode
+
+class DeleteConfForm(EntityForm):
+    id = 'deleteconf'
+    title = _('delete')
+    domid = 'deleteconf'
+    onsubmit = None
+    
+    def call(self):
+        """ask for confirmation before real deletion"""
+        _ = self.req._
+        self.req.add_js('cubicweb.edition.js')
+        self.w(u'<script type="text/javascript">updateMessage(\'%s\');</script>\n' % _('this action is not reversible!'))
+        # XXX above message should have style of a warning
+        self.w(u'<h4>%s</h4>\n' % _('Do you want to delete the following element(s) ?'))
+        if self.onsubmit:
+            self.w(u'<form id="deleteconf" action="%s" onsubmit="%s" method="post">'
+                   % (self.build_url(), self.onsubmit))
+        else:
+            self.w(u'<form id="deleteconf" action="%s" method="post">'
+                   % (self.build_url()))
+            
+        self.w(u'<fieldset>\n')
+        self.display_rset()
+        #self.w(u'<input type="hidden" name="rql" value="%s"/>' % self.req.form['rql'])
+        self.w(u'<input type="hidden" name="__form_id" value="%s"/>' % self.id)
+        self.w(self.button_delete(label=stdmsgs.YES))
+        self.w(self.button_cancel(label=stdmsgs.NO))
+        for param in NAV_FORM_PARAMETERS:
+            value = self.req.form.get(param)
+            if value:
+                self.w(u'<input type="hidden" name="%s" value="%s"/>' % (param, value))
+        self.w(u'</fieldset></form>\n')
+
+    def display_rset(self):
+        self.w(u'<ul>\n')
+        done = set()
+        for i in xrange(self.rset.rowcount):
+            if self.rset[i][0] in done:
+                continue
+            done.add(self.rset[i][0])
+            self.cell_call(i, 0)
+        self.w(u'</ul>\n')
+        
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        self.w(u'<li>')
+        self.w(u'<input type="hidden" name="eid" value="%s" />' % entity.eid)
+        self.w(u'<input type="hidden" name="%s" value="%s"/>\n'
+               % (eid_param('__type', entity.eid), self.rset.description[row][0]))
+        self.w(u'<a href="%s">' % html_escape(entity.absolute_url()))
+        # don't use outofcontext view or any other that may contain inline edition form
+        self.w(html_escape(entity.view('textoutofcontext')))
+        self.w(u'</a>')
+        self.w(u'</li>')
+
+
+class ChangeStateForm(EntityForm):
+    id = 'statuschange'
+    title = _('status change')
+
+    __selectors__ = (interface_selector, req_form_params_selector)
+    accepts_interfaces = (IWorkflowable,)
+    form_params = ('treid',)
+
+    def cell_call(self, row, col, vid='secondary'):
+        entity = self.entity(row, col)
+        eid = entity.eid
+        state = entity.in_state[0]
+        transition = self.req.eid_rset(self.req.form['treid']).get_entity(0, 0)
+        dest = transition.destination()
+        self.req.add_js('cubicweb.edition.js')
+        _ = self.req._
+        self.w(self.error_message())
+        self.w(u'<h4>%s %s</h4>\n' % (_(transition.name), entity.view('oneline')))
+        self.w(u'<p>%s</p>\n' % (_('status will change from %s to %s')
+                               % (_(state.name), _(dest.name))))
+        self.w(u'<form action="%s" onsubmit="return freezeFormButtons(\'entityForm\');" method="post" id="entityForm">\n'
+               % self.build_url('edit'))
+        self.w(u'<div id="progress">%s</div>' % _('validating...'))
+        self.w(u'<fieldset>\n')
+        #self.w(u'<input id="errorurl" type="hidden" name="__errorurl" value="%s"/>\n'
+        #       % html_escape(self.req.url()))
+        self.w(u'<input type="hidden" name="__form_id" value="%s"/>\n' % self.id)
+        self.w(u'<input type="hidden" name="eid" value="%s" />' % eid)
+        self.w(u'<input type="hidden" name="%s" value="%s"/>\n'
+               % (eid_param('__type', eid), entity.e_schema))
+        self.w(u'<input type="hidden" name="%s" value="%s"/>\n'
+               % (eid_param('state', eid), dest.eid))
+        self.w(u'<input type="hidden" name="__redirectpath" value="%s"/>\n'
+               % html_escape(entity.rest_path()))
+        self.fill_form(entity, state, dest)
+        self.w(u'<input type="hidden" name="__method" value="set_state"/>\n')
+        self.w(self.button_ok(label=stdmsgs.YES, tabindex=self.req.next_tabindex()))
+        self.w(self.button_cancel(label=stdmsgs.NO, tabindex=self.req.next_tabindex()))
+        self.w(u'</fieldset>\n')
+        self.w(u'</form>')
+        
+    def fill_form(self, entity, state, dest):
+        # hack to use the widget for comment_format
+        trinfo = self.vreg.etype_class('TrInfo')(self.req, None)
+        # widget are cached, copy it since we want to modify its name attribute
+        wdg = trinfo.get_widget('comment_format')
+        wdg.name = 'trcommentformat'
+        # set a value in entity to avoid lookup for a non existant attribute...
+        trinfo['trcommentformat'] = u''
+        # comment format/content have to be grouped using the original entity eid
+        wdg.rname = eid_param('trcommentformat', entity.eid)
+        self.w(wdg.render_label(trinfo))
+        self.w(wdg._edit_render(trinfo))
+        self.w(u'<br/>\n')
+        cformname = eid_param('trcomment', entity.eid)
+        self.w(u'<label for="%s">%s</label>\n' % (cformname, self.req._('comment:')))
+        self.w(u'<textarea rows="10" cols="80" name="%s" tabindex="%s"></textarea><br/>\n'
+               % (cformname, self.req.next_tabindex()))
+
+
+class ClickAndEditForm(EntityForm):
+    id = 'reledit'
+    __selectors__ = (kwargs_selector, )
+    expected_kwargs = ('rtype',)
+
+    #FIXME editableField class could be toggleable from userprefs
+
+    EDITION_BODY = '''
+<div class="editableField" id="%(divid)s"
+      ondblclick="showInlineEditionForm(%(eid)s, '%(rtype)s', '%(divid)s')">%(value)s</div>
+<form style="display: none;" onsubmit="return inlineValidateForm('%(divid)s-form', '%(rtype)s', '%(eid)s', '%(divid)s', %(reload)s);" id="%(divid)s-form" action="#">
+<fieldset>
+<input type="hidden" name="eid" value="%(eid)s" />
+<input type="hidden" name="__maineid" value="%(eid)s" />
+<input type="hidden" name="__type:%(eid)s" value="%(etype)s" />
+%(attrform)s
+</fieldset>
+<div class="buttonbar">
+%(ok)s
+%(cancel)s
+</div>
+</form>
+'''
+    def cell_call(self, row, col, rtype=None, role='subject', reload=False):
+        entity = self.entity(row, col)
+        if getattr(entity, rtype) is None:
+            value = self.req._('not specified')
+        else:
+            value = entity.printable_value(rtype)
+        if not entity.has_perm('update'):
+            self.w(value)
+            return
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+        eid = entity.eid
+        edit_key = make_uid('%s-%s' % (rtype, eid))
+        divid = 'd%s' % edit_key
+        widget = entity.get_widget(rtype, 'subject')
+        eschema = entity.e_schema
+        attrform = widget.edit_render(entity, useid='i%s' % edit_key)
+        ok = (u'<input class="validateButton" type="submit" name="__action_apply" value="%s" tabindex="%s" />'
+              % (self.req._(stdmsgs.BUTTON_OK), self.req.next_tabindex()))
+        cancel = (u'<input class="validateButton" type="button" '
+                  'value="%s" onclick="cancelInlineEdit(%s, \'%s\', \'%s\')"  tabindex="%s" />'
+                  % (self.req._(stdmsgs.BUTTON_CANCEL), eid, rtype, divid,
+                     self.req.next_tabindex()))
+        self.w(self.EDITION_BODY % {
+                'eid': eid,
+                'rtype': rtype,
+                'etype': entity.e_schema,
+                'attrform': attrform,
+                'action' : self.build_url('edit'), # NOTE: actually never gets called
+                'ok': ok,
+                'cancel': cancel,
+                'value': value,
+                'reload': dumps(reload),
+                'divid': divid,
+                })
+
+
+class EditionForm(EntityForm):
+    """primary entity edition form
+
+    When generating a new attribute_input, the editor will look for a method
+    named 'default_ATTRNAME' on the entity instance, where ATTRNAME is the
+    name of the attribute being edited. You may use this feature to compute
+    dynamic default values such as the 'tomorrow' date or the user's login
+    being connected
+    """    
+    __selectors__ = (onelinerset_selector, accept_selector)
+
+    id = 'edition'
+    title = _('edition')
+    controller = 'edit'
+    skip_relations = EntityForm.skip_relations.copy()
+    
+    EDITION_BODY = u'''\
+ %(errormsg)s
+<form id="%(formid)s" class="entityForm" cubicweb:target="eformframe"
+      method="post" onsubmit="%(onsubmit)s" enctype="%(enctype)s" action="%(action)s">
+ %(title)s
+ <div id="progress">%(inprogress)s</div>
+ <div class="iformTitle"><span>%(mainattrs_label)s</span></div>
+ <div class="formBody"><fieldset>
+ %(base)s
+ %(attrform)s
+ %(relattrform)s
+</fieldset>
+ %(relform)s
+ </div>
+ <table width="100%%">
+  <tbody>
+   <tr><td align="center">
+     %(validate)s
+   </td><td style="align: right; width: 50%%;">
+     %(apply)s
+     %(cancel)s
+   </td></tr>
+  </tbody>
+ </table>
+</form>
+'''
+
+    def cell_call(self, row, col, **kwargs):
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+        self.req.add_css('cubicweb.form.css')
+        entity = self.complete_entity(row, col)
+        self.edit_form(entity, kwargs)
+
+    def edit_form(self, entity, kwargs):
+        varmaker = self.req.get_page_data('rql_varmaker')
+        if varmaker is None:
+            varmaker = self.req.varmaker
+            self.req.set_page_data('rql_varmaker', varmaker)
+        self.varmaker = varmaker
+        self.w(self.EDITION_BODY % self.form_context(entity, kwargs))
+
+    def form_context(self, entity, kwargs):
+        """returns the dictionnary used to fill the EDITION_BODY template
+
+        If you create your own edition form, you can probably just override
+        `EDITION_BODY` and `form_context`
+        """
+        if self.need_multipart(entity):
+            enctype = 'multipart/form-data'
+        else:
+            enctype = 'application/x-www-form-urlencoded'
+        self._hiddens = []
+        if entity.eid is None:
+            entity.eid = self.varmaker.next()
+        # XXX (hack) action_title might need __linkto req's original value
+        #            and widgets such as DynamicComboWidget might change it
+        #            so we need to compute title before calling atttributes_form
+        formtitle = self.action_title(entity)
+        # be sure to call .*_form first so tabindexes are correct and inlined
+        # fields errors are consumed
+        if not entity.has_eid() or entity.has_perm('update'):
+            attrform = self.attributes_form(entity, kwargs)
+        else:
+            attrform = ''
+        inlineform = self.inline_entities_form(entity, kwargs)
+        relform = self.relations_form(entity, kwargs)
+        vindex = self.req.next_tabindex()
+        aindex = self.req.next_tabindex()
+        cindex = self.req.next_tabindex()
+        self.add_hidden_web_behaviour_params(entity)
+        _ = self.req._
+        return {
+            'formid'   : self.domid,
+            'onsubmit' : self.on_submit(entity),
+            'enctype'  : enctype,
+            'errormsg' : self.error_message(),
+            'action'   : self.build_url('validateform'),
+            'eids'     : entity.has_eid() and [entity.eid] or [],
+            'inprogress': _('validating...'),
+            'title'    : formtitle,
+            'mainattrs_label' : _('main informations'),
+            'reseturl' : self.redirect_url(entity),
+            'attrform' : attrform,
+            'relform'  : relform,
+            'relattrform': inlineform,
+            'base'     : self.base_form(entity, kwargs),
+            'validate' : self.button_ok(tabindex=vindex),
+            'apply'    : self.button_apply(tabindex=aindex),
+            'cancel'   : self.button_cancel(tabindex=cindex),
+            }
+
+    @property
+    def formid(self):
+        return self.id
+    
+    def action_title(self, entity):
+        """form's title"""
+        ptitle = self.req._(self.title)
+        return u'<div class="formTitle"><span>%s %s</span></div>' % (
+            entity.dc_type(), ptitle and '(%s)' % ptitle)
+
+
+    def base_form(self, entity, kwargs):
+        output = []
+        for name, value, iid in self._hiddens:
+            if isinstance(value, basestring):
+                value = html_escape(value)
+            if iid:
+                output.append(u'<input id="%s" type="hidden" name="%s" value="%s" />'
+                              % (iid, name, value))
+            else:
+                output.append(u'<input type="hidden" name="%s" value="%s" />'
+                              % (name, value))
+        return u'\n'.join(output)
+                
+    def add_hidden_web_behaviour_params(self, entity):
+        """inserts hidden params controlling how errors and redirection
+        should be handled
+        """
+        req = self.req
+        self._hiddens.append( (u'__maineid', entity.eid, u'') )
+        self._hiddens.append( (u'__errorurl', req.url(), u'errorurl') )
+        self._hiddens.append( (u'__form_id', self.formid, u'') )
+        for param in NAV_FORM_PARAMETERS:
+            value = req.form.get(param)
+            if value:
+                self._hiddens.append( (param, value, u'') )
+        msg = self.submited_message()
+        # If we need to directly attach the new object to another one
+        for linkto in req.list_form_param('__linkto'):
+            self._hiddens.append( ('__linkto', linkto, '') )
+            msg = '%s %s' % (msg, self.req._('and linked'))
+        self._hiddens.append( ('__message', msg, '') )
+        
+    
+    def attributes_form(self, entity, kwargs, include_eid=True):
+        """create a form to edit entity's attributes"""
+        html = []
+        w = html.append
+        eid = entity.eid
+        wdg = entity.get_widget
+        lines = (wdg(rschema, x) for rschema, x in self.editable_attributes(entity))
+        if include_eid:
+            self._hiddens.append( ('eid', entity.eid, '') )
+        self._hiddens.append( (eid_param('__type', eid), entity.e_schema, '') )
+        w(u'<table id="%s" class="%s" style="width:100%%;">' %
+          (kwargs.get('tab_id', 'entityForm%s' % eid),
+           kwargs.get('tab_class', 'attributeForm')))
+        for widget in lines:
+            w(u'<tr>\n<th class="labelCol">%s</th>' % widget.render_label(entity))
+            error = widget.render_error(entity)
+            if error:
+                w(u'<td class="error" style="width:100%;">')
+            else:
+                w(u'<td style="width:100%;">')
+            if error:
+                w(error)
+            w(widget.edit_render(entity))
+            w(widget.render_help(entity))
+            w(u'</td>\n</tr>')
+        w(u'</table>')
+        return u'\n'.join(html)
+
+    def editable_attributes(self, entity):
+        # XXX both (add, delete)
+        return [(rschema, x) for rschema, _, x in entity.relations_by_category(('primary', 'secondary'), 'add')
+                if rschema != 'eid']
+    
+    def relations_form(self, entity, kwargs):
+        req = self.req
+        _ = self.req._
+        label = u'%s :' % _('This %s' % entity.e_schema).capitalize()
+        eid = entity.eid
+        html = []
+        pendings = list(self.restore_pending_inserts(entity))
+        w = html.append
+        w(u'<fieldset class="subentity">')
+        w(u'<legend class="iformTitle">%s</legend>' % label)
+        w(u'<table id="relatedEntities">')
+        for row in self.relations_table(entity):
+            if row[2]:
+                w(u'<tr><th class="labelCol">%s</th>' % row[0].display_name(req, row[1]))
+                w(u'<td>')
+                w(u'<ul>')
+                for viewparams in row[2]:
+                    w(u'<li class="invisible">%s<div id="span%s" class="%s">%s</div></li>'
+                      % (viewparams[1], viewparams[0], viewparams[2], viewparams[3]))
+                if not self.force_display and self.maxrelitems < len(row[2]):
+                    w(u'<li class="invisible">%s</li>' % self.force_display_link())
+                w(u'</ul>')
+                w(u'</td>')
+                w(u'</tr>')
+        if not pendings:
+            w(u'<tr><th>&nbsp;</th><td>&nbsp;</td></tr>')
+        else:
+            for row in pendings:
+                w(u'<tr id="tr%s">' % row[1])
+                w(u'<th>%s</th>' % row[3])
+                w(u'<td>')
+                w(u'<a class="handle" title="%s" href="%s">[x]</a>' %
+                  (_('cancel this insert'), row[2]))
+                w(u'<a id="a%s" class="editionPending" href="%s">%s</a>'
+                  % (row[1], row[4], html_escape(row[5])))
+                w(u'</td>')
+                w(u'</tr>')
+        w(u'<tr id="relationSelectorRow_%s" class="separator">' % eid)
+        w(u'<th class="labelCol">')
+        w(u'<span>%s</span>' % _('add relation'))
+        w(u'<select id="relationSelector_%s" tabindex="%s" onchange="javascript:showMatchingSelect(this.options[this.selectedIndex].value,%s);">'
+          % (eid, req.next_tabindex(), html_escape(dumps(eid))))
+        w(u'<option value="">%s</option>' % _('select a relation'))
+        for i18nrtype, rschema, target in entity.srelations_by_category(('generic', 'metadata'), 'add'):
+            w(u'<option value="%s_%s">%s</option>' % (rschema, target, i18nrtype))
+        w(u'</select>')
+        w(u'</th>')
+        w(u'<td id="unrelatedDivs_%s"></td>' % eid)
+        w(u'</tr>')
+        w(u'</table>')
+        w(u'</fieldset>')
+        return '\n'.join(html)
+        
+    def inline_entities_form(self, entity, kwargs):
+        """create a form to edit entity's inlined relations"""
+        result = []
+        _ = self.req._
+        for rschema, targettypes, x in entity.relations_by_category('inlineview', 'add'):
+            # show inline forms only if there's one possible target type
+            # for rschema
+            if len(targettypes) != 1:
+                self.warning('entity related by the %s relation should have '
+                             'inlined form but there is multiple target types, '
+                             'dunno what to do', rschema)
+                continue
+            targettype = targettypes[0].type
+            if self.should_inline_relation_form(entity, rschema, targettype, x):
+                result.append(u'<div id="inline%sslot">' % rschema)
+                existant = entity.has_eid() and entity.related(rschema)
+                # display inline-edition view for all existing related entities
+                result.append(self.view('inline-edition', existant, 'null',
+                                        ptype=entity.e_schema, peid=entity.eid,
+                                        rtype=rschema, role=x, **kwargs))
+                if x == 'subject':
+                    card = rschema.rproperty(entity.e_schema, targettype, 'cardinality')[0]
+                else:
+                    card = rschema.rproperty(targettype, entity.e_schema, 'cardinality')[1]
+                # there is no related entity and we need at least one : we need to
+                # display one explicit inline-creation view
+                if self.should_display_inline_relation_form(rschema, existant, card):
+                    result.append(self.view('inline-creation', None, etype=targettype,
+                                            peid=entity.eid, ptype=entity.e_schema,
+                                            rtype=rschema, role=x, **kwargs))
+                # we can create more than one related entity, we thus display a link
+                # to add new related entities
+                if self.should_display_add_inline_relation_link(rschema, existant, card):
+                    divid = "addNew%s%s%s:%s" % (targettype, rschema, x, entity.eid)
+                    result.append(u'<div class="inlinedform" id="%s" cubicweb:limit="true">'
+                                  % divid)
+                    js = "addInlineCreationForm('%s', '%s', '%s', '%s', '%s')" % (
+                        entity.eid, entity.e_schema, targettype, rschema, x)
+                    if card in '1?':
+                        js = "toggleVisibility('%s'); %s" % (divid, js)
+                    result.append(u'<a class="addEntity" id="add%s:%slink" href="javascript: %s" >+ %s.</a>'
+                                  % (rschema, entity.eid, js,
+                                     self.req.__('add a %s' % targettype)))
+                    result.append(u'</div>')
+                    result.append(u'<div class="trame_grise">&nbsp;</div>')
+                result.append(u'</div>')
+        return '\n'.join(result)
+
+    # should_* method extracted to allow overriding
+    
+    def should_inline_relation_form(self, entity, rschema, targettype, role):
+        return entity.rtags.is_inlined(rschema, targettype, role)
+
+    def should_display_inline_relation_form(self, rschema, existant, card):
+        return not existant and card in '1+'
+
+    def should_display_add_inline_relation_link(self, rschema, existant, card):
+        return not existant or card in '+*'
+    
+    def reset_url(self, entity):
+        return entity.absolute_url()
+    
+    def on_submit(self, entity):
+        return u'return freezeFormButtons(\'%s\')' % (self.domid)
+
+
+    def submited_message(self):
+        return self.req._('element edited')
+
+
+    
+class CreationForm(EditionForm):
+    __selectors__ = (etype_form_selector, )
+    id = 'creation'
+    title = _('creation')
+    
+    def call(self, **kwargs):
+        """creation view for an entity"""
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+        self.req.add_css('cubicweb.form.css')
+        etype = kwargs.pop('etype', self.req.form.get('etype'))
+        try:
+            entity = self.vreg.etype_class(etype)(self.req, None, None)
+        except:
+            self.w(self.req._('no such entity type %s') % etype)
+        else:
+            self.edit_form(entity, kwargs)
+
+    def action_title(self, entity):
+        """custom form title if creating a entity with __linkto"""
+        if '__linkto' in self.req.form:
+            if isinstance(self.req.form['__linkto'], list):
+                # XXX which one should be considered (case: add a ticket to a version in jpl)
+                rtype, linkto_eid, role = self.req.form['__linkto'][0].split(':')
+            else:
+                rtype, linkto_eid, role = self.req.form['__linkto'].split(':')
+            linkto_rset = self.req.eid_rset(linkto_eid)
+            linkto_type = linkto_rset.description[0][0]
+            if role == 'subject':
+                title = self.req.__('creating %s (%s %s %s %%(linkto)s)' % (
+                    entity.e_schema, entity.e_schema, rtype, linkto_type))
+            else:
+                title = self.req.__('creating %s (%s %%(linkto)s %s %s)' % (
+                    entity.e_schema, linkto_type, rtype, entity.e_schema))
+            msg = title % {'linkto' : self.view('incontext', linkto_rset)}
+            return u'<div class="formTitle notransform"><span>%s</span></div>' % msg
+        else:
+            return super(CreationForm, self).action_title(entity)
+
+    @property
+    def formid(self):
+        return 'edition'
+    
+    def relations_form(self, entity, kwargs):
+        return u''
+
+    def reset_url(self, entity=None):
+        return self.build_url(self.req.form.get('etype', '').lower())
+    
+    def submited_message(self):
+        return self.req._('element created')
+    
+    def url(self):
+        """return the url associated with this view"""
+        return self.create_url(self.req.form.get('etype'))
+
+
+class InlineFormMixIn(object):
+
+    @cached
+    def card(self, etype):
+        return self.rschema.rproperty(self.parent_schema, etype, 'cardinality')[0]
+    
+    def action_title(self, entity):
+        return self.rschema.display_name(self.req, self.role)
+        
+    def add_hidden_web_behaviour_params(self, entity):
+        pass
+    
+    def edit_form(self, entity, ptype, peid, rtype,
+                  role='subject', **kwargs):
+        self.rschema = self.schema.rschema(rtype)
+        self.role = role        
+        self.parent_schema = self.schema.eschema(ptype)
+        self.parent_eid = peid
+        super(InlineFormMixIn, self).edit_form(entity, kwargs)
+    
+    def should_inline_relation_form(self, entity, rschema, targettype, role):
+        if rschema == self.rschema:
+            return False
+        return entity.rtags.is_inlined(rschema, targettype, role)
+
+    @cached
+    def keep_entity(self, entity):
+        req = self.req
+        # are we regenerating form because of a validation error ?
+        erroneous_post = req.data.get('formvalues')
+        if erroneous_post:
+            cdvalues = req.list_form_param('%s:%s' % (self.rschema,
+                                                      self.parent_eid),
+                                           erroneous_post)
+            if unicode(entity.eid) not in cdvalues:
+                return False
+        return True
+
+    def form_context(self, entity, kwargs):
+        ctx = super(InlineFormMixIn, self).form_context(entity, kwargs)
+        _ = self.req._
+        local_ctx = {'createmsg' : self.req.__('add a %s' % entity.e_schema),
+                     'so': self.role[0], # 's' for subject, 'o' for object
+                     'eid' : entity.eid,
+                     'rtype' : self.rschema,
+                     'parenteid' : self.parent_eid,
+                     'parenttype' : self.parent_schema,
+                     'etype' : entity.e_schema,
+                     'novalue' : INTERNAL_FIELD_VALUE,
+                     'removemsg' : self.req.__('remove this %s' % entity.e_schema),
+                     'notice' : self.req._('click on the box to cancel the deletion'),
+                     }
+        ctx.update(local_ctx)
+        return ctx
+
+
+class InlineEntityCreationForm(InlineFormMixIn, CreationForm):
+    id = 'inline-creation'
+    __selectors__ = (kwargs_selector, etype_form_selector)
+    expected_kwargs = ('ptype', 'peid', 'rtype')
+    
+    EDITION_BODY = u'''\
+<div id="div-%(parenteid)s-%(rtype)s-%(eid)s" class="inlinedform">
+ <div class="iformBody">
+ <div class="iformTitle"><span>%(title)s</span> #<span class="icounter">1</span> [<a href="javascript: removeInlineForm('%(parenteid)s', '%(rtype)s', '%(eid)s'); noop();">%(removemsg)s</a>]</div>
+ <fieldset class="subentity">
+ %(attrform)s
+ %(relattrform)s
+ </fieldset>
+ </div>
+ <fieldset class="hidden" id="fs-%(parenteid)s-%(rtype)s-%(eid)s">
+%(base)s
+ <input type="hidden" value="%(novalue)s" name="edit%(so)s-%(rtype)s:%(parenteid)s" />
+ <input id="rel-%(parenteid)s-%(rtype)s-%(eid)s" type="hidden" value="%(eid)s" name="%(rtype)s:%(parenteid)s" />
+ </fieldset>
+</div>''' # do not insert trailing space or \n here !
+
+    def call(self, etype, ptype, peid, rtype, role='subject', **kwargs):
+        """
+        :param etype: the entity type being created in the inline form
+        :param parent: the parent entity hosting the inline form
+        :param rtype: the relation bridging `etype` and `parent`
+        :param role: the role played by the `parent` in the relation
+        """
+        self.req.add_css('cubicweb.form.css')
+        try:
+            entity = self.vreg.etype_class(etype)(self.req, None, None)
+        except:
+            self.w(self.req._('no such entity type %s') % etype)
+            return
+        self.edit_form(entity, ptype, peid, rtype, role, **kwargs)
+    
+    
+
+
+class InlineEntityEditionForm(InlineFormMixIn, EditionForm):
+    id = 'inline-edition'
+    __selectors__ = (accept_selector, kwargs_selector)
+    expected_kwargs = ('ptype', 'peid', 'rtype')
+    
+    EDITION_BODY = u'''\
+<div onclick="restoreInlinedEntity('%(parenteid)s', '%(rtype)s', '%(eid)s')" id="div-%(parenteid)s-%(rtype)s-%(eid)s" class="inlinedform">   
+<div id="notice-%(parenteid)s-%(rtype)s-%(eid)s" class="notice">%(notice)s</div>
+<div class="iformTitle"><span>%(title)s</span>  #<span class="icounter">%(count)s</span> [<a href="javascript: removeInlinedEntity('%(parenteid)s', '%(rtype)s', '%(eid)s'); noop();">%(removemsg)s</a>]</div>
+ <div class="iformBody">
+ <fieldset class="subentity">
+ %(attrform)s
+ </fieldset>
+ %(relattrform)s
+ </div>
+ <fieldset id="fs-%(parenteid)s-%(rtype)s-%(eid)s">
+%(base)s
+ <input type="hidden" value="%(eid)s" name="edit%(so)s-%(rtype)s:%(parenteid)s" />
+ %(rinput)s
+ </fieldset>
+</div>''' # do not insert trailing space or \n here !
+
+    rel_input = u'''<input id="rel-%(parenteid)s-%(rtype)s-%(eid)s" type="hidden" value="%(eid)s" name="%(rtype)s:%(parenteid)s" />'''
+ 
+    def call(self, **kwargs):
+        """redefine default View.call() method to avoid automatic
+        insertions of <div class="section"> between each row of
+        the resultset
+        """
+        self.req.add_css('cubicweb.form.css')
+        rset = self.rset
+        for i in xrange(len(rset)):
+            self.wview(self.id, rset, row=i, **kwargs)
+
+    def cell_call(self, row, col, ptype, peid, rtype, role='subject', **kwargs):
+        """
+        :param parent: the parent entity hosting the inline form
+        :param rtype: the relation bridging `etype` and `parent`
+        :param role: the role played by the `parent` in the relation
+        """
+        entity = self.entity(row, col)
+        self.edit_form(entity, ptype, peid, rtype, role, **kwargs)
+
+
+    def form_context(self, entity, kwargs):
+        ctx = super(InlineEntityEditionForm, self).form_context(entity, kwargs)
+        if self.keep_entity(entity):
+            ctx['rinput'] = self.rel_input % ctx
+            ctx['todelete'] = u''
+        else:
+            ctx['rinput'] = u''
+            ctx['todelete'] = u'checked="checked"'
+        ctx['count'] = entity.row + 1
+        return ctx
+    
+    
+
+class CopyEditionForm(EditionForm):
+    id = 'copy'
+    title = _('copy edition')
+
+    def cell_call(self, row, col, **kwargs):
+        self.req.add_js(('cubicweb.ajax.js', 'cubicweb.edition.js'))
+        self.req.add_css('cubicweb.form.css')
+        entity = self.complete_entity(row, col, skip_bytes=True)
+        # make a copy of entity to avoid altering the entity in the
+        # request's cache. 
+        self.newentity = copy(entity)
+        self.copying = self.newentity.eid
+        self.newentity.eid = None
+        self.edit_form(self.newentity, kwargs)
+        del self.newentity
+
+    def action_title(self, entity):
+        """form's title"""
+        msg = super(CopyEditionForm, self).action_title(entity)
+        return msg + (u'<script type="text/javascript">updateMessage("%s");</script>\n'
+                      % self.req._('Please note that this is only a shallow copy'))
+        # XXX above message should have style of a warning
+
+    @property
+    def formid(self):
+        return 'edition'
+        
+    def relations_form(self, entity, kwargs):
+        return u''
+
+    def reset_url(self, entity):
+        return self.build_url('view', rql='Any X WHERE X eid %s' % self.copying)
+    
+    def attributes_form(self, entity, kwargs, include_eid=True):
+        # we don't want __clone_eid on inlined edited entities
+        if entity.eid == self.newentity.eid:
+            self._hiddens.append((eid_param('__cloned_eid', entity.eid), self.copying, ''))
+        return EditionForm.attributes_form(self, entity, kwargs, include_eid)
+    
+    def submited_message(self):
+        return self.req._('element copied')
+       
+    
+
+class TableEditForm(EntityForm):
+    id = 'muledit'
+    title = _('multiple edit')
+
+    EDITION_BODY = u'''<form method="post" id="entityForm" onsubmit="return validateForm('entityForm', null);" action="%(action)s">
+  %(error)s
+  <div id="progress">%(progress)s</div>
+  <fieldset>
+  <input type="hidden" name="__errorurl" value="%(url)s" />
+  <input type="hidden" name="__form_id" value="%(formid)s" />
+  <input type="hidden" name="__redirectvid" value="%(redirectvid)s" />
+  <input type="hidden" name="__redirectrql" value="%(redirectrql)s" />
+  <table class="listing">
+    <tr class="header">
+      <th align="left"><input type="checkbox" onclick="setCheckboxesState('eid', this.checked)" value="" title="toggle check boxes" /></th>
+      %(attrheaders)s
+    </tr>
+    %(lines)s
+  </table>
+  <table width="100%%">
+    <tr>
+      <td align="left">
+        <input class="validateButton" type="submit"  value="%(okvalue)s" title="%(oktitle)s" />
+        <input class="validateButton" type="reset" name="__action_cancel" value="%(cancelvalue)s" title="%(canceltitle)s" />
+      </td>
+    </tr>
+  </table>
+  </fieldset>    
+</form>
+'''
+
+    WIDGET_CELL = u'''\
+<td%(csscls)s>
+  %(error)s
+  <div>%(widget)s</div>
+</td>'''
+    
+    def call(self, **kwargs):
+        """a view to edit multiple entities of the same type
+        the first column should be the eid
+        """
+        req = self.req
+        form = req.form
+        req.add_js('cubicweb.edition.js')
+        req.add_css('cubicweb.form.css')
+        _ = req._
+        sampleentity = self.complete_entity(0)
+        attrheaders = [u'<th>%s</th>' % rdef[0].display_name(req, rdef[-1])
+                       for rdef in sampleentity.relations_by_category('primary', 'add')
+                       if rdef[0].type != 'eid']
+        ctx = {'action' : self.build_url('edit'),
+               'error': self.error_message(),
+               'progress': _('validating...'),
+               'url': html_escape(req.url()),
+               'formid': self.id,
+               'redirectvid': html_escape(form.get('__redirectvid', 'list')),
+               'redirectrql': html_escape(form.get('__redirectrql', self.rset.printable_rql())),
+               'attrheaders': u'\n'.join(attrheaders),
+               'lines': u'\n'.join(self.edit_form(ent) for ent in self.rset.entities()),
+               'okvalue': _('button_ok').capitalize(),
+               'oktitle': _('validate modifications on selected items').capitalize(),
+               'cancelvalue': _('button_reset').capitalize(),
+               'canceltitle': _('revert changes').capitalize(),
+               }        
+        self.w(self.EDITION_BODY % ctx)
+        
+        
+    def reset_url(self, entity=None):
+        self.build_url('view', rql=self.rset.printable_rql())
+        
+    def edit_form(self, entity):
+        html = []
+        w = html.append
+        entity.complete()
+        eid = entity.eid
+        values = self.req.data.get('formvalues', ())
+        qeid = eid_param('eid', eid)
+        checked = qeid in values
+        w(u'<tr class="%s">' % (entity.row % 2 and u'even' or u'odd'))
+        w(u'<td>%s<input type="hidden" name="__type:%s" value="%s" /></td>'
+          % (checkbox('eid', eid, checked=checked), eid, entity.e_schema))
+        # attribute relations (skip eid which is handled by the checkbox
+        wdg = entity.get_widget
+        wdgfactories = [wdg(rschema, x) for rschema, _, x in entity.relations_by_category('primary', 'add')
+                        if rschema.type != 'eid'] # XXX both (add, delete)
+        seid = html_escape(dumps(eid))
+        for wobj in wdgfactories:
+            if isinstance(wobj, ComboBoxWidget):
+                wobj.attrs['onchange'] = "setCheckboxesState2('eid', %s, 'checked')" % seid
+            elif isinstance(wobj, InputWidget):
+                wobj.attrs['onkeypress'] = "setCheckboxesState2('eid', %s, 'checked')" % seid
+            error = wobj.render_error(entity)
+            if error:
+                csscls = u' class="error"'
+            else:
+                csscls = u''
+            w(self.WIDGET_CELL % {'csscls': csscls, 'error': error,
+                                  'widget': wobj.edit_render(entity)})
+        w(u'</tr>')
+        return '\n'.join(html)
+        
+
+class UnrelatedDivs(EntityView):
+    id = 'unrelateddivs'
+    __selectors__ = (req_form_params_selector,)
+    form_params = ('relation',)
+
+    @property
+    def limit(self):
+        if self.req.form.get('__force_display'):
+            return None
+        return self.req.property_value('navigation.related-limit') + 1
+
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        relname, target = self.req.form.get('relation').rsplit('_', 1)
+        rschema = self.schema.rschema(relname)
+        hidden = 'hidden' in self.req.form
+        is_cell = 'is_cell' in self.req.form
+        self.w(self.build_unrelated_select_div(entity, rschema, target,
+                                               is_cell=is_cell, hidden=hidden))
+
+    def build_unrelated_select_div(self, entity, rschema, target,
+                                   is_cell=False, hidden=True):
+        options = []
+        divid = 'div%s_%s_%s' % (rschema.type, target, entity.eid)
+        selectid = 'select%s_%s_%s' % (rschema.type, target, entity.eid)
+        if rschema.symetric or target == 'subject':
+            targettypes = rschema.objects(entity.e_schema)
+            etypes = '/'.join(sorted(etype.display_name(self.req) for etype in targettypes))
+        else:
+            targettypes = rschema.subjects(entity.e_schema)
+            etypes = '/'.join(sorted(etype.display_name(self.req) for etype in targettypes))
+        etypes = cut(etypes, self.req.property_value('navigation.short-line-size'))
+        options.append('<option>%s %s</option>' % (self.req._('select a'), etypes))
+        options += self._get_select_options(entity, rschema, target)
+        options += self._get_search_options(entity, rschema, target, targettypes)
+        if 'Basket' in self.schema: # XXX
+            options += self._get_basket_options(entity, rschema, target, targettypes)
+        relname, target = self.req.form.get('relation').rsplit('_', 1)
+        return u"""\
+<div class="%s" id="%s">
+  <select id="%s" onchange="javascript: addPendingInsert(this.options[this.selectedIndex], %s, %s, '%s');">
+    %s
+  </select>
+</div>
+""" % (hidden and 'hidden' or '', divid, selectid, html_escape(dumps(entity.eid)),
+       is_cell and 'true' or 'null', relname, '\n'.join(options))
+
+    def _get_select_options(self, entity, rschema, target):
+        """add options to search among all entities of each possible type"""
+        options = []
+        eid = entity.eid
+        pending_inserts = self.req.get_pending_inserts(eid)
+        rtype = rschema.type
+        for eview, reid in entity.vocabulary(rschema, target, self.limit):
+            if reid is None:
+                options.append('<option class="separator">-- %s --</option>' % html_escape(eview))
+            else:
+                optionid = relation_id(eid, rtype, target, reid)
+                if optionid not in pending_inserts:
+                    # prefix option's id with letters to make valid XHTML wise
+                    options.append('<option id="id%s" value="%s">%s</option>' %
+                                   (optionid, reid, html_escape(eview)))
+        return options
+
+    def _get_search_options(self, entity, rschema, target, targettypes):
+        """add options to search among all entities of each possible type"""
+        options = []
+        _ = self.req._
+        for eschema in targettypes:
+            mode = '%s:%s:%s:%s' % (target, entity.eid, rschema.type, eschema)
+            url = self.build_url(entity.rest_path(), vid='search-associate',
+                                 __mode=mode)
+            options.append((eschema.display_name(self.req),
+                            '<option value="%s">%s %s</option>' % (
+                html_escape(url), _('Search for'), eschema.display_name(self.req))))
+        return [o for l, o in sorted(options)]
+
+    def _get_basket_options(self, entity, rschema, target, targettypes):
+        options = []
+        rtype = rschema.type
+        _ = self.req._
+        for basketeid, basketname in self._get_basket_links(self.req.user.eid,
+                                                            target, targettypes):
+            optionid = relation_id(entity.eid, rtype, target, basketeid)
+            options.append('<option id="%s" value="%s">%s %s</option>' % (
+                optionid, basketeid, _('link to each item in'), html_escape(basketname)))
+        return options
+
+    def _get_basket_links(self, ueid, target, targettypes):
+        targettypes = set(targettypes)
+        for basketeid, basketname, elements in self._get_basket_info(ueid):
+            baskettypes = elements.column_types(0)
+            # if every elements in the basket can be attached to the
+            # edited entity
+            if baskettypes & targettypes:
+                yield basketeid, basketname
+            
+    def _get_basket_info(self, ueid):
+        basketref = []
+        basketrql = 'Any B,N WHERE B is Basket, B owned_by U, U eid %(x)s, B name N'
+        basketresultset = self.req.execute(basketrql, {'x': ueid}, 'x')
+        for result in basketresultset:
+            basketitemsrql = 'Any X WHERE X in_basket B, B eid %(x)s'
+            rset = self.req.execute(basketitemsrql, {'x': result[0]}, 'x')
+            basketref.append((result[0], result[1], rset))
+        return basketref
+
+
+class ComboboxView(EntityView):
+    """the view used in combobox (unrelated entities)
+
+    THIS IS A TEXT VIEW. DO NOT HTML_ESCAPE
+    """
+    id = 'combobox'
+    accepts = ('Any',)
+    title = None
+    
+    def cell_call(self, row, col):
+        """the combo-box view for an entity: same as text out of context view
+        by default
+        """
+        self.wview('textoutofcontext', self.rset, row=row, col=col)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/basetemplates.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,516 @@
+# -*- coding: utf-8 -*-
+"""default templates for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb import NoSelectableObject, ObjectNotFound
+from cubicweb.common.view import Template, MainTemplate,  NOINDEX, NOFOLLOW
+from cubicweb.common.selectors import nfentity_selector
+from cubicweb.common.utils import make_uid
+
+from cubicweb.web.views.baseviews import vid_from_rset
+
+# main templates ##############################################################
+
+
+class LogInOutTemplate(MainTemplate):
+    
+    def call(self):
+        self.set_request_content_type()
+        w = self.w
+        self.write_doctype()
+        lang = self.req.lang
+        self.template_header('text/html', self.req._('login_action'))
+        w(u'<body>\n')
+        self.content(w)
+        w(u'</body>')
+
+    def template_header(self, content_type, view=None, page_title='', additional_headers=()):
+        w = self.whead
+        # explictly close the <base> tag to avoid IE 6 bugs while browsing DOM
+        w(u'<base href="%s"></base>' % html_escape(self.req.base_url()))
+        w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
+          % (content_type, self.req.encoding))
+        w(NOINDEX)
+        w(NOFOLLOW)
+        w(u'\n'.join(additional_headers) + u'\n')
+        self.template('htmlheader', rset=self.rset)
+        w(u'<title>%s</title>\n' % html_escape(page_title))
+        
+
+class LogInTemplate(LogInOutTemplate):
+    id = 'login'
+    title = 'log in'
+
+    def content(self, w):
+        self.template('logform', rset=self.rset, id='loginBox', klass='')
+        
+
+class LoggedOutTemplate(LogInOutTemplate):
+    id = 'loggedout'
+    title = 'logged out'
+
+    def content(self, w):
+        msg = self.req._('you have been logged out')
+        w(u'<h1 class="noborder">%s</h1>\n' % msg)
+        if self.config['anonymous-user']:
+            indexurl = self.build_url('view', vid='index', __message=msg)
+            w(u'<p><a href="%s">%s</a><p>' % (
+                html_escape(indexurl),
+                self.req._('go back to the index page')))
+
+        
+class TheMainTemplate(MainTemplate):
+    """default main template :
+    
+    - call header / footer templates
+    - build result set
+    - guess and call an appropriate view through the view manager
+    """
+    id = 'main'
+
+    def _select_view_and_rset(self):
+        req = self.req
+        if self.rset is None and not hasattr(req, '_rql_processed'):
+            req._rql_processed = True
+            rset = self.process_rql(req.form.get('rql'))
+        else:
+            rset = self.rset
+        # handle special "method" param when necessary
+        # XXX this should probably not be in the template (controller ?), however:
+        #     * we need to have the displayed rset
+        #     * we don't want to handle it in each view
+        if rset and rset.rowcount == 1 and '__method' in req.form:
+            entity = rset.get_entity(0, 0)
+            try:
+                method = getattr(entity, req.form.pop('__method'))
+                method()
+            except Exception, ex:
+                self.exception('while handling __method')
+                req.set_message(req._("error while handling __method: %s") % req._(ex))
+        vid = req.form.get('vid') or vid_from_rset(req, rset, self.schema)
+        try:
+            view = self.vreg.select_view(vid, req, rset)
+        except ObjectNotFound:
+            self.warning("the view %s could not be found", vid)
+            req.set_message(req._("The view %s could not be found") % vid)
+            vid = vid_from_rset(req, rset, self.schema)
+            view = self.vreg.select_view(vid, req, rset)
+        except NoSelectableObject:
+            if rset:
+                req.set_message(req._("The view %s can not be applied to this query") % vid)
+            else:
+                req.set_message(req._("You have no access to this view or it's not applyable to current data"))
+            self.warning("the view %s can not be applied to this query", vid)
+            vid = vid_from_rset(req, rset, self.schema)
+            view = self.vreg.select_view(vid, req, rset)
+        return view, rset
+    
+    def call(self):
+        view, rset = self._select_view_and_rset()
+        req = self.req
+        # update breadcrumps **before** validating cache, unless the view
+        # specifies explicitly it should not be added to breadcrumb or the
+        # view is a binary view
+        if view.add_to_breadcrumbs and not view.binary:
+            req.update_breadcrumbs()
+        view.set_http_cache_headers()
+        req.validate_cache()
+        with_templates = not view.binary and view.templatable and \
+                         not req.form.has_key('__notemplate')
+        if not with_templates:
+            view.set_request_content_type()
+            self.set_stream(templatable=False)
+        else:
+            self.set_request_content_type()
+            content_type = self.content_type
+            self.template_header(content_type, view)
+        if view.binary:
+            # have to replace our unicode stream using view's binary stream
+            view.dispatch()
+            assert self._stream, 'duh, template used as a sub-view ?? (%s)' % self._stream
+            self._stream = view._stream
+        else:
+            view.dispatch(w=self.w)
+        if with_templates:
+            self.template_footer(view)
+
+            
+    def process_rql(self, rql):
+        """execute rql if specified"""
+        if rql:
+            self.ensure_ro_rql(rql)
+            if not isinstance(rql, unicode):
+                rql = unicode(rql, self.req.encoding)
+            pp = self.vreg.select_component('magicsearch', self.req)
+            self.rset = pp.process_query(rql, self.req)
+            return self.rset
+        return None
+
+    def template_header(self, content_type, view=None, page_title='', additional_headers=()):
+        page_title = page_title or view.page_title()
+        additional_headers = additional_headers or view.html_headers()
+        self.template_html_header(content_type, page_title, additional_headers)
+        self.template_body_header(view)
+        # display entity type restriction component
+        etypefilter = self.vreg.select_component('etypenavigation',
+                                                 self.req, self.rset)
+        if etypefilter and etypefilter.propval('visible'):
+            etypefilter.dispatch(w=self.w)
+        self.pagination(self.req, self.rset, self.w, not view.need_navigation)
+        self.w(u'<div id="contentmain">\n')
+    
+    def template_html_header(self, content_type, page_title, additional_headers=()):
+        w = self.whead
+        lang = self.req.lang
+        self.write_doctype()
+        w(u'<base href="%s" />' % html_escape(self.req.base_url()))
+        w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
+          % (content_type, self.req.encoding))
+        w(u'\n'.join(additional_headers) + u'\n')
+        self.template('htmlheader', rset=self.rset)
+        if page_title:
+            w(u'<title>%s</title>\n' % html_escape(page_title))
+
+    def template_body_header(self, view):
+        w = self.w
+        w(u'<body>\n')
+        self.template('header', rset=self.rset, view=view)
+        w(u'<div id="page"><table width="100%" border="0" id="mainLayout"><tr>\n')
+        self.nav_column(view, 'left')
+        w(u'<td id="contentcol">\n')
+        rqlcomp = self.vreg.select_component('rqlinput', self.req, self.rset)
+        if rqlcomp:
+            rqlcomp.dispatch(w=self.w, view=view)
+        msgcomp = self.vreg.select_component('applmessages', self.req, self.rset)
+        if msgcomp:
+            msgcomp.dispatch(w=self.w)
+        self.content_header(view)
+        w(u'<div id="pageContent">\n')
+        vtitle = self.req.form.get('vtitle')
+        if vtitle:
+            w(u'<h1 class="vtitle">%s</h1>\n' % vtitle)
+            
+    def template_footer(self, view=None):
+        self.w(u'</div>\n') # close id=contentmain
+        self.w(u'</div>\n') # closes id=pageContent
+        self.content_footer(view)
+        self.w(u'</td>\n')
+        self.nav_column(view, 'right')
+        self.w(u'</tr></table></div>\n')
+        self.template('footer', rset=self.rset)
+        self.w(u'</body>')
+
+    def nav_column(self, view, context):
+        boxes = list(self.vreg.possible_vobjects('boxes', self.req, self.rset,
+                                                 view=view, context=context))
+        if boxes:
+            self.w(u'<td class="navcol"><div class="navboxes">\n')
+            for box in boxes:
+                box.dispatch(w=self.w, view=view)
+            self.w(u'</div></td>\n')
+
+    def content_header(self, view=None):
+        """by default, display informal messages in content header"""
+        self.template('contentheader', rset=self.rset, view=view)
+            
+    def content_footer(self, view=None):
+        self.template('contentfooter', rset=self.rset, view=view)
+
+
+class ErrorTemplate(TheMainTemplate):
+    """fallback template if an internal error occured during displaying the
+    main template. This template may be called for authentication error,
+    which means that req.cnx and req.user may not be set.
+    """
+    id = 'error'
+    
+    def call(self):
+        """display an unexpected error"""
+        self.set_request_content_type()
+        self.req.reset_headers()
+        view = self.vreg.select_view('error', self.req, self.rset)
+        self.template_header(self.content_type, view, self.req._('an error occured'),
+                             [NOINDEX, NOFOLLOW])
+        view.dispatch(w=self.w)
+        self.template_footer(view)
+    
+    def template_header(self, content_type, view=None, page_title='', additional_headers=()):
+        w = self.whead
+        lang = self.req.lang
+        self.write_doctype()
+        w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
+          % (content_type, self.req.encoding))
+        w(u'\n'.join(additional_headers))
+        self.template('htmlheader', rset=self.rset)
+        w(u'<title>%s</title>\n' % html_escape(page_title))
+        self.w(u'<body>\n')
+
+    def template_footer(self, view=None):
+        self.w(u'</body>')
+
+
+class SimpleMainTemplate(TheMainTemplate):
+
+    id = 'main-no-top'
+    
+    def template_header(self, content_type, view=None, page_title='', additional_headers=()):
+        page_title = page_title or view.page_title()
+        additional_headers = additional_headers or view.html_headers()
+        whead = self.whead
+        lang = self.req.lang
+        self.write_doctype()
+        whead(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
+              % (content_type, self.req.encoding))
+        whead(u'\n'.join(additional_headers) + u'\n')
+        self.template('htmlheader', rset=self.rset)
+        w = self.w
+        w(u'<title>%s</title>\n' % html_escape(page_title))
+        w(u'<body>\n')
+        w(u'<div id="page">')
+        w(u'<table width="100%" height="100%" border="0"><tr>\n')
+        w(u'<td class="navcol">\n')
+        self.topleft_header()
+        boxes = list(self.vreg.possible_vobjects('boxes', self.req, self.rset,
+                                                 view=view, context='left'))
+        if boxes:
+            w(u'<div class="navboxes">\n')
+            for box in boxes:
+                box.dispatch(w=w)
+            self.w(u'</div>\n')
+        w(u'</td>')
+        w(u'<td id="contentcol" rowspan="2">')
+        w(u'<div id="pageContent">\n')
+        vtitle = self.req.form.get('vtitle')
+        if vtitle:
+            w(u'<h1 class="vtitle">%s</h1>' % (vtitle))
+            
+    def topleft_header(self):
+        self.w(u'<table id="header"><tr>\n')
+        self.w(u'<td>')
+        self.vreg.select_component('logo', self.req, self.rset).dispatch(w=self.w)
+        self.w(u'</td>\n')
+        self.w(u'</tr></table>\n')
+
+# page parts templates ########################################################
+
+class HTMLHeader(Template):
+    """default html headers"""
+    id = 'htmlheader'
+    
+    def call(self, **kwargs):
+        self.favicon()
+        self.stylesheets()
+        self.javascripts()
+        self.alternates()
+        self.pageid()
+
+    def favicon(self):
+        favicon = self.req.external_resource('FAVICON', None)
+        if favicon:
+            self.whead(u'<link rel="shortcut icon" href="%s"/>\n' % favicon)
+            
+    def stylesheets(self):
+        req = self.req
+        add_css = req.add_css
+        for css in req.external_resource('STYLESHEETS'):
+            add_css(css, localfile=False)
+        for css in req.external_resource('STYLESHEETS_PRINT'):
+            add_css(css, u'print', localfile=False)
+        for css in req.external_resource('IE_STYLESHEETS'):
+            add_css(css, localfile=False, ieonly=True)
+        
+    def javascripts(self):
+        for jscript in self.req.external_resource('JAVASCRIPTS'):
+            self.req.add_js(jscript, localfile=False)
+            
+    def alternates(self):
+        # nfentity_selector is used by the rss icon box as well
+        if nfentity_selector(self, self.req, self.rset):
+            url = self.build_url(rql=self.limited_rql(), vid='rss')
+            self.whead(u'<link rel="alternate" type="application/rss+xml" title="RSS feed" href="%s"/>\n'
+                   % html_escape(url))
+
+    def pageid(self):
+        req = self.req
+        pid = make_uid(id(req))
+        req.pageid = pid
+        req.html_headers.define_var('pageid', pid);
+
+
+class HTMLPageHeader(Template):
+    """default html page header"""
+    id = 'header'
+    
+    def call(self, view, **kwargs):
+        self.main_header(view)
+        self.w(u'''
+  <div id="stateheader">''')
+        self.state_header()
+        self.w(u'''
+  </div>
+  ''')
+        
+    def main_header(self, view):
+        """build the top menu with authentification info and the rql box"""
+        self.w(u'<table id="header"><tr>\n')
+        self.w(u'<td id="firstcolumn">')
+        self.vreg.select_component('logo', self.req, self.rset).dispatch(w=self.w)
+        self.w(u'</td>\n')
+        # appliname and breadcrumbs
+        self.w(u'<td id="headtext">')
+        comp = self.vreg.select_component('appliname', self.req, self.rset)
+        if comp and comp.propval('visible'):
+            comp.dispatch(w=self.w)
+        comp = self.vreg.select_component('breadcrumbs', self.req, self.rset, view=view)
+        if comp and comp.propval('visible'):
+            comp.dispatch(w=self.w, view=view)
+        self.w(u'</td>')
+        # logged user and help
+        self.w(u'<td>\n')
+        comp = self.vreg.select_component('loggeduserlink', self.req, self.rset)
+        comp.dispatch(w=self.w)
+        self.w(u'</td><td>')
+        helpcomp = self.vreg.select_component('help', self.req, self.rset)
+        if helpcomp: # may not be available if Card is not defined in the schema
+            helpcomp.dispatch(w=self.w)
+        self.w(u'</td>')
+        # lastcolumn
+        self.w(u'<td id="lastcolumn">')
+        self.w(u'</td>\n')
+        self.w(u'</tr></table>\n')
+        self.template('logform', rset=self.rset, id='popupLoginBox', klass='hidden',
+                      title=False, message=False)
+        
+    def state_header(self):
+        state = self.req.search_state
+        if state[0] == 'normal':
+            return
+        _ = self.req._
+        value = self.view('oneline', self.req.eid_rset(state[1][1]))
+        msg = ' '.join((_("searching for"),
+                        display_name(self.req, state[1][3]),
+                        _("to associate with"), value,
+                        _("by relation"), '"', 
+                        display_name(self.req, state[1][2], state[1][0]),
+                        '"'))
+        return self.w(u'<div class="stateMessage">%s</div>' % msg)
+
+
+
+class HTMLPageFooter(Template):
+    """default html page footer: include logo if any, and close the HTML body
+    """
+    id = 'footer'
+    
+    def call(self, **kwargs):
+        req = self.req
+        self.w(u'<div class="footer">')
+        # XXX Take object from the registry if in there? would be
+        #     better anyway
+        from cubicweb.web.views.wdoc import ChangeLogView
+        self.w(u'<a href="%s">%s</a> | ' % (req.build_url('changelog'),
+                                            req._(ChangeLogView.title).lower()))
+        self.w(u'<a href="%s">%s</a> | ' % (req.build_url('doc/about'),
+                                            req._('about this site')))
+        self.w(u'© 2001-2008 <a href="http://www.logilab.fr">Logilab S.A.</a>')
+        self.w(u'</div>')
+
+
+class HTMLContentHeader(Template):
+    """default html page content header:
+    * include message component if selectable for this request
+    * include selectable content navigation components
+    """
+    id = 'contentheader'
+    
+    def call(self, view, **kwargs):
+        """by default, display informal messages in content header"""
+        components = self.vreg.possible_vobjects('contentnavigation',
+                                                 self.req, self.rset,
+                                                 view=view, context='navtop')
+        if components:
+            self.w(u'<div id="contentheader">')
+            for comp in components:
+                comp.dispatch(w=self.w, view=view)
+            self.w(u'</div><div class="clear"></div>')
+
+
+class HTMLContentFooter(Template):
+    """default html page content footer: include selectable content navigation
+    components
+    """
+    id = 'contentfooter'
+    
+    def call(self, view, **kwargs):
+        components = self.vreg.possible_vobjects('contentnavigation',
+                                                 self.req, self.rset,
+                                                 view=view, context='navbottom')
+        if components:
+            self.w(u'<div id="contentfooter">')
+            for comp in components:
+                comp.dispatch(w=self.w, view=view)
+            self.w(u'</div>')
+
+
+class LogFormTemplate(Template):
+    id = 'logform'
+    title = 'log in'
+
+    def call(self, id, klass, title=True, message=True):
+        self.req.add_css('cubicweb.login.css')
+        self.w(u'<div id="%s" class="%s">' % (id, klass))
+        if title:
+            self.w(u'<div id="loginTitle">%s</div>'
+                   % self.req.property_value('ui.site-title'))
+        self.w(u'<div id="loginContent">\n')        
+
+        if message:
+            self.display_message()
+        if self.config['auth-mode'] == 'http':
+            # HTTP authentication
+            pass
+        else:
+            # Cookie authentication
+            self.login_form(id)
+        self.w(u'</div></div>\n')
+
+    def display_message(self):
+        message = self.req.message
+        if message:
+            self.w(u'<div class="simpleMessage">%s</div>\n' % message)
+                     
+    def login_form(self, id):
+        _ = self.req._
+        self.w(u'<form method="post" action="%s" id="login_form">\n'
+               % html_escape(login_form_url(self.config, self.req)))
+        self.w(u'<table>\n')
+        self.w(u'<tr>\n')
+        self.w(u'<td><label for="__login">%s</label></td>' % _('login'))
+        self.w(u'<td><input name="__login" id="__login" class="data" type="text" /></td>')
+        self.w(u'</tr><tr>\n')
+        self.w(u'<td><label for="__password" >%s</label></td>' % _('password'))
+        self.w(u'<td><input name="__password" id="__password" class="data" type="password" /></td>\n')
+        self.w(u'</tr><tr>\n')
+        self.w(u'<td>&nbsp;</td><td><input type="submit" class="loginButton right" value="%s" />\n</td>' % _('log in'))
+        self.w(u'</tr>\n')
+        self.w(u'</table>\n')
+        self.w(u'</form>\n')
+        # XXX doesn't seem to work, rewrite this
+        self.w(u'''<script type="text/javascript">if(document.getElementById("%s").className != "hidden")
+                   {$('login_form').__login.focus()}</script>''' % id)
+
+    
+def login_form_url(config, req):
+    if req.https:
+        return req.url()
+    if config.get('https-url'):
+        return req.url().replace(req.base_url(), config['https-url'])
+    return req.url()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/baseviews.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,995 @@
+"""Set of HTML generic base views:
+
+* noresult, final
+* primary, sidebox
+* secondary, oneline, incontext, outofcontext, text
+* list
+* xml, rss
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from time import timezone
+
+from rql import nodes
+
+from logilab.common.decorators import cached
+from logilab.mtconverter import html_escape, TransformError
+
+from cubicweb import Unauthorized, NoSelectableObject, typed_eid
+from cubicweb.common.selectors import (yes_selector, anyrset_selector, accept_selector,
+                                    onelinerset_selector, searchstate_selector, 
+                                    req_form_params_selector, accept_rset_selector)
+from cubicweb.common.uilib import (cut, printable_value,  UnicodeCSVWriter,
+                                ajax_replace_url, rql_for_eid)
+from cubicweb.common.view import EntityView, AnyRsetView, EmptyRsetView
+from cubicweb.web.httpcache import MaxAgeHTTPCacheManager
+from cubicweb.web.views import vid_from_rset, linksearch_select_url, linksearch_match
+
+_ = unicode
+
+
+class NullView(AnyRsetView):
+    """default view when no result has been found"""
+    id = 'null'
+    __select__ = classmethod(yes_selector)
+    def call(self, **kwargs):
+        pass
+    cell_call = call
+
+
+class NoResultView(EmptyRsetView):
+    """default view when no result has been found"""
+    id = 'noresult'
+    
+    def call(self, **kwargs):
+        self.w(u'<div class="searchMessage"><strong>%s</strong></div>\n'
+               % self.req._('No result matching query'))
+
+
+class FinalView(AnyRsetView):
+    """display values without any transformation (i.e. get a number for
+    entities) 
+    """
+    id = 'final'
+            
+    def cell_call(self, row, col, props=None, displaytime=False):
+        etype = self.rset.description[row][col]
+        value = self.rset.rows[row][col]
+        if etype == 'String':
+            entity, rtype = self.rset.related_entity(row, col)
+            if entity is not None:
+                # yes !
+                self.w(entity.printable_value(rtype, value))
+                return
+        if etype in ('Time', 'Interval'):
+            _ = self.req._
+            # value is DateTimeDelta but we have no idea about what is the 
+            # reference date here, so we can only approximate years and months
+            if value.days > 730: # 2 years
+                self.w(_('%d years') % (value.days // 365))
+            elif value.days > 60: # 2 months
+                self.w(_('%d months') % (value.days // 30))
+            elif value.days > 14: # 2 weeks
+                self.w(_('%d weeks') % (value.days // 7))
+            elif value.days > 2:
+                self.w(_('%s days') % int(value.days))
+            elif value.hours > 2:
+                self.w(_('%s hours') % int(value.hours))
+            elif value.minutes >= 2:
+                self.w(_('%s minutes') % int(value.minutes))
+            else:
+                self.w(_('%s seconds') % int(value.seconds))
+            return
+        self.wdata(printable_value(self.req, etype, value, props, displaytime=displaytime))
+
+
+class EditableFinalView(FinalView):
+    """same as FinalView but enables inplace-edition when possible"""
+    id = 'editable-final'
+                
+    def cell_call(self, row, col, props=None, displaytime=False):
+        etype = self.rset.description[row][col]
+        value = self.rset.rows[row][col]
+        entity, rtype = self.rset.related_entity(row, col)
+        if entity is not None:
+            self.w(entity.view('reledit', rtype=rtype))
+        else:
+            super(EditableFinalView, self).cell_call(row, col, props, displaytime)
+        
+PRIMARY_SKIP_RELS = set(['is', 'is_instance_of', 'identity',
+                         'owned_by', 'created_by', 
+                         'in_state', 'wf_info_for', 'require_permission',
+                         'from_entity', 'to_entity',
+                         'see_also'])
+
+class PrimaryView(EntityView):
+    """the full view of an non final entity"""
+    id = 'primary'
+    title = _('primary')
+    show_attr_label = True
+    show_rel_label = True
+    skip_none = True
+    skip_attrs = ('eid', 'creation_date', 'modification_date')
+    skip_rels = ()
+    main_related_section = True
+
+    def html_headers(self):
+        """return a list of html headers (eg something to be inserted between
+        <head> and </head> of the returned page
+
+        by default primary views are indexed
+        """
+        return []
+    
+    def cell_call(self, row, col):        
+        self.row = row
+        self.render_entity(self.complete_entity(row, col))
+    
+    def render_entity(self, entity):
+        """return html to display the given entity"""
+        siderelations = []
+        self.render_entity_title(entity)
+        self.render_entity_metadata(entity)
+        # entity's attributes and relations, excluding meta data
+        # if the entity isn't meta itself
+        self.w(u'<table border="0" width="100%">')
+        self.w(u'<tr>')
+        self.w(u'<td style="width:75%" valign="top">')
+        self.w(u'<div class="mainInfo">')
+        self.render_entity_attributes(entity, siderelations)
+        self.w(u'</div>')
+        self.w(u'<div class="navcontenttop">')
+        for comp in self.vreg.possible_vobjects('contentnavigation',
+                                                self.req, self.rset,
+                                                view=self, context='navcontenttop'):
+            comp.dispatch(w=self.w, view=self)
+        self.w(u'</div>')
+        if self.main_related_section:
+            self.render_entity_relations(entity, siderelations)
+        self.w(u'</td>')
+        # side boxes
+        self.w(u'<td valign="top">')
+        self.render_side_related(entity, siderelations)
+        self.w(u'</td>')
+        self.w(u'<td valign="top">')
+        self.w(u'</td>')        
+        self.w(u'</tr>')
+        self.w(u'</table>')        
+        self.w(u'<div class="navcontentbottom">')
+        for comp in self.vreg.possible_vobjects('contentnavigation',
+                                                self.req, self.rset,
+                                                view=self, context='navcontentbottom'):
+            comp.dispatch(w=self.w, view=self)
+        self.w(u'</div>')
+
+    def iter_attributes(self, entity):
+        for rschema, targetschema in entity.e_schema.attribute_definitions():
+            attr = rschema.type
+            if attr in self.skip_attrs:
+               continue
+            yield rschema, targetschema
+            
+    def iter_relations(self, entity):
+        skip = set(self.skip_rels)
+        skip.update(PRIMARY_SKIP_RELS)
+        for rschema, targetschemas, x in entity.e_schema.relation_definitions():
+            if rschema.type in skip:
+                continue
+            yield rschema, targetschemas, x
+
+    def render_entity_title(self, entity):
+        title = self.content_title(entity) # deprecate content_title?
+        if title:
+            self.w(u'<h1><span class="etype">%s</span> %s</h1>'
+                   % (entity.dc_type().capitalize(), title))
+    
+    def content_title(self, entity):
+        """default implementation return an empty string"""
+        return u''
+            
+    def render_entity_metadata(self, entity):
+        entity.view('metadata', w=self.w)
+        summary = self.summary(entity) # deprecate summary?
+        if summary:
+            self.w(u'<div class="summary">%s</div>' % summary)
+    
+    def summary(self, entity):
+        """default implementation return an empty string"""
+        return u''
+    
+               
+    def render_entity_attributes(self, entity, siderelations):
+        for rschema, targetschema in self.iter_attributes(entity):
+            attr = rschema.type
+            if targetschema.type in ('Password', 'Bytes'):
+                continue
+            try:
+                wdg = entity.get_widget(attr)
+            except Exception, ex:
+                value = entity.printable_value(attr, entity[attr], targetschema.type)
+            else:
+                value = wdg.render(entity)
+            if self.skip_none and (value is None or value == ''):
+                continue
+            if rschema.meta:
+                continue
+            self._render_related_entities(entity, rschema, value)
+
+    def render_entity_relations(self, entity, siderelations):
+        if hasattr(self, 'get_side_boxes_defs'):
+            return
+        eschema = entity.e_schema
+        maxrelated = self.req.property_value('navigation.related-limit')
+        for rschema, targetschemas, x in self.iter_relations(entity):
+            try:
+                related = entity.related(rschema.type, x, limit=maxrelated+1)
+            except Unauthorized:
+                continue
+            if not related:
+                continue
+            if self.is_side_related(rschema, eschema):
+                siderelations.append((rschema, related, x))
+                continue
+            self._render_related_entities(entity, rschema, related, x)
+
+    def render_side_related(self, entity, siderelations):
+        """display side related relations:
+        non-meta in a first step, meta in a second step
+        """
+        if hasattr(self, 'get_side_boxes_defs'):
+            for label, rset in self.get_side_boxes_defs(entity):
+                if rset:
+                    self.w(u'<div class="sideRelated">')
+                    self.wview('sidebox', rset, title=label)
+                    self.w(u'</div>')
+        elif siderelations:
+            self.w(u'<div class="sideRelated">')
+            for relatedinfos in siderelations:
+                # if not relatedinfos[0].meta:
+                #    continue
+                self._render_related_entities(entity, *relatedinfos)
+            self.w(u'</div>')
+        for box in self.vreg.possible_vobjects('boxes', self.req, entity.rset,
+                                               col=entity.col, row=entity.row,
+                                               view=self, context='incontext'):
+            try:
+                box.dispatch(w=self.w, col=entity.col, row=entity.row)
+            except NotImplementedError:
+                # much probably a context insensitive box, which only implements
+                # .call() and not cell_call()
+                box.dispatch(w=self.w)
+                
+    def is_side_related(self, rschema, eschema):
+        return rschema.meta and \
+               not rschema.schema_relation() == eschema.schema_entity()
+
+    def _render_related_entities(self, entity, rschema, related,
+                                 role='subject'):
+        if rschema.is_final():
+            value = related
+            show_label = self.show_attr_label
+        else:
+            if not related:
+                return
+            show_label = self.show_rel_label
+            # if not too many entities, show them all in a list
+            maxrelated = self.req.property_value('navigation.related-limit')
+            if related.rowcount <= maxrelated:
+                if related.rowcount == 1:
+                    value = self.view('incontext', related, row=0)
+                elif 1 < related.rowcount <= 5:
+                    value = self.view('csv', related)
+                else:
+                    value = '<div>' + self.view('simplelist', related) + '</div>'
+            # else show links to display related entities
+            else:
+                rql = related.printable_rql()
+                related.limit(maxrelated)
+                value = '<div>' + self.view('simplelist', related)
+                value += '[<a href="%s">%s</a>]' % (self.build_url(rql=rql),
+                                                    self.req._('see them all'))
+                value +=  '</div>'
+        label = display_name(self.req, rschema.type, role)
+        self.field(label, value, show_label=show_label, w=self.w, tr=False)
+
+
+class SideBoxView(EntityView):
+    """side box usually displaying some related entities in a primary view"""
+    id = 'sidebox'
+    
+    def call(self, boxclass='sideBox', title=u''):
+        """display a list of entities by calling their <item_vid> view
+        """
+        if title:
+            self.w(u'<div class="sideBoxTitle"><span>%s</span></div>' % title)
+        self.w(u'<div class="%s"><div class="sideBoxBody">' % boxclass)
+        # if not too much entities, show them all in a list
+        maxrelated = self.req.property_value('navigation.related-limit')
+        if self.rset.rowcount <= maxrelated:
+            if len(self.rset) == 1:
+                self.wview('incontext', self.rset, row=0)
+            elif 1 < len(self.rset) < 5:
+                self.wview('csv', self.rset)
+            else:
+                self.wview('simplelist', self.rset)
+        # else show links to display related entities
+        else:
+            self.rset.limit(maxrelated)
+            rql = self.rset.printable_rql(encoded=False)
+            self.wview('simplelist', self.rset)
+            self.w(u'[<a href="%s">%s</a>]' % (self.build_url(rql=rql),
+                                               self.req._('see them all')))
+        self.w(u'</div>\n</div>\n')
+
+
+ 
+class SecondaryView(EntityView):
+    id = 'secondary'
+    title = _('secondary')
+    
+    def cell_call(self, row, col):
+        """the secondary view for an entity
+        secondary = icon + view(oneline)
+        """
+        entity = self.entity(row, col)
+        self.w(u'&nbsp;')
+        self.wview('oneline', self.rset, row=row, col=col)
+
+class OneLineView(EntityView):
+    id = 'oneline'
+    title = _('oneline') 
+
+    def cell_call(self, row, col):
+        """the one line view for an entity: linked text view
+        """
+        entity = self.entity(row, col)
+        self.w(u'<a href="%s">' % html_escape(entity.absolute_url()))
+        self.w(html_escape(self.view('text', self.rset, row=row, col=col)))
+        self.w(u'</a>')
+
+class TextView(EntityView):
+    """the simplest text view for an entity
+    """
+    id = 'text'
+    title = _('text')
+    accepts = 'Any',
+    def call(self, **kwargs):
+        """the view is called for an entire result set, by default loop
+        other rows of the result set and call the same view on the
+        particular row
+
+        Views applicable on None result sets have to override this method
+        """
+        rset = self.rset
+        if rset is None:
+            raise NotImplementedError, self
+        for i in xrange(len(rset)):
+            self.wview(self.id, rset, row=i, **kwargs)
+            if len(rset) > 1:
+                self.w(u"\n")
+    
+    def cell_call(self, row, col=0, **kwargs):
+        entity = self.entity(row, col)
+        self.w(cut(entity.dc_title(),
+                   self.req.property_value('navigation.short-line-size')))
+
+class MetaDataView(EntityView):
+    """paragraph view of some metadata"""
+    id = 'metadata'
+    accepts = 'Any',
+    show_eid = True
+    
+    def cell_call(self, row, col):
+        _ = self.req._
+        entity = self.entity(row, col)
+        self.w(u'<div class="metadata">')
+        if self.show_eid:
+            self.w(u'#%s - ' % entity.eid)
+        if entity.modification_date != entity.creation_date:
+            self.w(u'<span>%s</span> ' % _('latest update on'))
+            self.w(u'<span class="value">%s</span>,&nbsp;'
+                   % self.format_date(entity.modification_date))
+        # entities from external source may not have a creation date (eg ldap)
+        if entity.creation_date: 
+            self.w(u'<span>%s</span> ' % _('created on'))
+            self.w(u'<span class="value">%s</span>'
+                   % self.format_date(entity.creation_date))
+        if entity.creator:
+            creatoreid = entity.creator.eid
+            self.w(u'&nbsp;<span>%s</span> ' % _('by'))
+            self.w(u'<span class="value">%s</span>' % entity.creator.name())
+        else:
+            creatoreid = None            
+        try:
+            owners = ','.join(u.name() for u in entity.owned_by
+                              if u.eid != creatoreid)
+            if owners:
+                self.w(u',&nbsp;<span>%s</span> ' % _('owned by'))
+                self.w(u'<span class="value">%s</span>' % owners)
+        except Unauthorized:
+            pass
+        self.w(u'</div>')
+
+
+# new default views for finner control in general views , to use instead of
+# oneline / secondary
+
+class InContextTextView(TextView):
+    id = 'textincontext'
+    title = None # not listed as a possible view
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        self.w(entity.dc_title())
+        
+class OutOfContextTextView(InContextTextView):
+    id = 'textoutofcontext'
+
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        self.w(entity.dc_long_title())
+
+
+class InContextView(EntityView):
+    id = 'incontext'
+
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        desc = cut(entity.dc_description(), 50)
+        self.w(u'<a href="%s" title="%s">' % (html_escape(entity.absolute_url()),
+                                              html_escape(desc)))
+        self.w(html_escape(self.view('textincontext', self.rset, row=row, col=col)))
+        self.w(u'</a>')
+
+        
+class OutOfContextView(EntityView):
+    id = 'outofcontext'
+
+    def cell_call(self, row, col):
+        self.w(u'<a href="%s">' % self.entity(row, col).absolute_url())
+        self.w(html_escape(self.view('textoutofcontext', self.rset, row=row, col=col)))
+        self.w(u'</a>')
+
+class NotClickableInContextView(EntityView):
+    id = 'incontext'
+    accepts = ('State',)
+    def cell_call(self, row, col):
+        self.w(html_escape(self.view('textincontext', self.rset, row=row, col=col)))
+
+## class NotClickableOutOfContextView(EntityView):
+##     id = 'outofcontext'
+##     accepts = ('State',)
+##     def cell_call(self, row, col):
+##         self.w(html_escape(self.view('textoutofcontext', self.rset, row=row)))
+
+            
+# list and table related views ################################################
+    
+class ListView(EntityView):
+    id = 'list'
+    title = _('list')
+    item_vid = 'listitem'
+        
+    def call(self, klass=None, title=None, subvid=None, listid=None, **kwargs):
+        """display a list of entities by calling their <item_vid> view
+        
+        :param listid: the DOM id to use for the root element
+        """
+        if subvid is None and 'subvid' in self.req.form:
+            subvid = self.req.form.pop('subvid') # consume it
+        if listid:
+            listid = u' id="%s"' % listid
+        else:
+            listid = u''
+        if title:
+            self.w(u'<div%s class="%s"><h4>%s</h4>\n' % (listid, klass or 'section', title))
+            self.w(u'<ul>\n')
+        else:
+            self.w(u'<ul%s class="%s">\n' % (listid, klass or 'section'))
+        for i in xrange(self.rset.rowcount):
+            self.cell_call(row=i, col=0, vid=subvid, **kwargs)
+        self.w(u'</ul>\n')
+        if title:
+            self.w(u'</div>\n')
+
+    def cell_call(self, row, col=0, vid=None, **kwargs):
+        self.w(u'<li>')
+        self.wview(self.item_vid, self.rset, row=row, col=col, vid=vid, **kwargs)
+        self.w(u'</li>\n')
+
+    def url(self):
+        """overrides url method so that by default, the view list is called
+        with sorted entities
+        """
+        coltypes = self.rset.column_types(0)
+        # don't want to generate the rql if there is some restriction on
+        # something else than the entity type
+        if len(coltypes) == 1:
+            # XXX norestriction is not correct here. For instance, in cases like
+            # Any P,N WHERE P is Project, P name N
+            # norestriction should equal True
+            restr = self.rset.syntax_tree().children[0].where
+            norestriction = (isinstance(restr, nodes.Relation) and
+                             restr.is_types_restriction())
+            if norestriction:
+                etype = iter(coltypes).next()
+                return self.build_url(etype.lower(), vid=self.id)
+        if len(self.rset) == 1:
+            entity = self.rset.get_entity(0, 0)
+            return self.build_url(entity.rest_path(), vid=self.id)
+        return self.build_url(rql=self.rset.printable_rql(), vid=self.id)
+
+ 
+class ListItemView(EntityView):
+    id = 'listitem'
+    
+    @property
+    def redirect_vid(self):
+        if self.req.search_state[0] == 'normal':
+            return 'outofcontext'
+        return 'outofcontext-search'
+        
+    def cell_call(self, row, col, vid=None, **kwargs):
+        if not vid:
+            vid = self.redirect_vid
+        try:
+            self.wview(vid, self.rset, row=row, col=col, **kwargs)
+        except NoSelectableObject:
+            if vid == self.redirect_vid:
+                raise
+            kwargs.pop('done', None)
+            self.wview(self.redirect_vid, self.rset, row=row, col=col, **kwargs)
+
+
+class SimpleListView(ListItemView):
+    """list without bullets"""
+    id = 'simplelist'
+    redirect_vid = 'incontext'
+
+
+class CSVView(SimpleListView):
+    id = 'csv'
+    redirect_vid = 'incontext'
+        
+    def call(self, **kwargs):
+        rset = self.rset
+        for i in xrange(len(rset)):
+            self.cell_call(i, 0, vid=kwargs.get('vid'))
+            if i < rset.rowcount-1:
+                self.w(u", ")
+
+
+class TreeItemView(ListItemView):
+    accepts = ('Any',)
+    id = 'treeitem'
+    
+    def cell_call(self, row, col):
+        self.wview('incontext', self.rset, row=row, col=col)
+
+
+# xml and xml/rss views #######################################################
+    
+class XmlView(EntityView):
+    id = 'xml'
+    title = _('xml')
+    templatable = False
+    content_type = 'text/xml'
+    xml_root = 'rset'
+    item_vid = 'xmlitem'
+    
+    def cell_call(self, row, col):
+        self.wview(self.item_vid, self.rset, row=row, col=col)
+        
+    def call(self):
+        """display a list of entities by calling their <item_vid> view
+        """
+        self.w(u'<?xml version="1.0" encoding="%s"?>\n' % self.req.encoding)
+        self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.rset)))
+        for i in xrange(self.rset.rowcount):
+            self.cell_call(i, 0)
+        self.w(u'</%s>\n' % self.xml_root)
+
+
+class XmlItemView(EntityView):
+    id = 'xmlitem'
+
+    def cell_call(self, row, col):
+        """ element as an item for an xml feed """
+        entity = self.complete_entity(row, col)
+        self.w(u'<%s>\n' % (entity.e_schema))
+        for rschema, attrschema in entity.e_schema.attribute_definitions():
+            attr = rschema.type
+            try:
+                value = entity[attr]
+            except KeyError:
+                # Bytes
+                continue
+            if value is not None:
+                if attrschema == 'Bytes':
+                    from base64 import b64encode
+                    value = '<![CDATA[%s]]>' % b64encode(value.getvalue())
+                elif isinstance(value, basestring):
+                    value = value.replace('&', '&amp;').replace('<', '&lt;')
+                self.w(u'  <%s>%s</%s>\n' % (attr, value, attr))
+        self.w(u'</%s>\n' % (entity.e_schema))
+
+
+class RssView(XmlView):
+    id = 'rss'
+    title = _('rss')
+    templatable = False
+    content_type = 'text/xml'
+    http_cache_manager = MaxAgeHTTPCacheManager
+    cache_max_age = 60*60*2 # stay in http cache for 2 hours by default 
+    
+    def cell_call(self, row, col):
+        self.wview('rssitem', self.rset, row=row, col=col)
+        
+    def call(self):
+        """display a list of entities by calling their <item_vid> view"""
+        req = self.req
+        self.w(u'<?xml version="1.0" encoding="%s"?>\n' % req.encoding)
+        self.w(u'''<rdf:RDF
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns="http://purl.org/rss/1.0/"
+>''')
+        self.w(u'  <channel rdf:about="%s">\n' % html_escape(req.url()))
+        self.w(u'    <title>%s RSS Feed</title>\n' % html_escape(self.page_title()))
+        self.w(u'    <description>%s</description>\n' % html_escape(req.form.get('vtitle', '')))
+        params = req.form.copy()
+        params.pop('vid', None)
+        self.w(u'    <link>%s</link>\n' % html_escape(self.build_url(**params)))
+        self.w(u'    <items>\n')
+        self.w(u'      <rdf:Seq>\n')
+        for entity in self.rset.entities():
+            self.w(u'      <rdf:li resource="%s" />\n' % html_escape(entity.absolute_url()))
+        self.w(u'      </rdf:Seq>\n')
+        self.w(u'    </items>\n')
+        self.w(u'  </channel>\n')
+        for i in xrange(self.rset.rowcount):
+            self.cell_call(i, 0)
+        self.w(u'</rdf:RDF>')
+
+
+class RssItemView(EntityView):
+    id = 'rssitem'
+    date_format = '%%Y-%%m-%%dT%%H:%%M%+03i:00' % (timezone / 3600)
+
+    def cell_call(self, row, col):
+        entity = self.complete_entity(row, col)
+        self.w(u'<item rdf:about="%s">\n' % html_escape(entity.absolute_url()))
+        self._marker('title', entity.dc_long_title())
+        self._marker('link', entity.absolute_url())
+        self._marker('description', entity.dc_description())
+        self._marker('dc:date', entity.dc_date(self.date_format))
+        self._marker('author', entity.dc_authors())
+        self.w(u'</item>\n')
+        
+    def _marker(self, marker, value):
+        if value:
+            self.w(u'  <%s>%s</%s>\n' % (marker, html_escape(value), marker))
+
+
+class CSVMixIn(object):
+    """mixin class for CSV views"""
+    templatable = False
+    content_type = "text/comma-separated-values"    
+    binary = True # avoid unicode assertion
+    csv_params = {'dialect': 'excel',
+                  'quotechar': '"',
+                  'delimiter': ';',
+                  'lineterminator': '\n'}
+    
+    def set_request_content_type(self):
+        """overriden to set a .csv filename"""
+        self.req.set_content_type(self.content_type, filename='cubicwebexport.csv')
+            
+    def csvwriter(self, **kwargs):
+        params = self.csv_params.copy()
+        params.update(kwargs)
+        return UnicodeCSVWriter(self.w, self.req.encoding, **params)
+
+    
+class CSVRsetView(CSVMixIn, AnyRsetView):
+    """dumps rset in CSV"""
+    id = 'csvexport'
+    title = _('csv export')
+        
+    def call(self):
+        writer = self.csvwriter()
+        writer.writerow(self.get_headers_labels())
+        descr = self.rset.description
+        for rowindex, row in enumerate(self.rset):
+            csvrow = []
+            for colindex, val in enumerate(row):
+                etype = descr[rowindex][colindex]
+                if val is not None and not self.schema.eschema(etype).is_final():
+                    # csvrow.append(val) # val is eid in that case
+                    content = self.view('textincontext', self.rset, 
+                                        row=rowindex, col=colindex)
+                else:
+                    content = self.view('final', self.rset, displaytime=True,
+                                        row=rowindex, col=colindex)
+                csvrow.append(content)                    
+            writer.writerow(csvrow)
+    
+    def get_headers_labels(self):
+        rqlstdescr = self.rset.syntax_tree().get_description()[0] # XXX missing Union support
+        labels = []
+        for colindex, attr in enumerate(rqlstdescr):
+            # compute column header
+            if colindex == 0 or attr == 'Any': # find a better label
+                label = ','.join(display_name(self.req, et)
+                                 for et in self.rset.column_types(colindex))
+            else:
+                label = display_name(self.req, attr)
+            labels.append(label)
+        return labels
+
+    
+class CSVEntityView(CSVMixIn, EntityView):
+    """dumps rset's entities (with full set of attributes) in CSV"""
+    id = 'ecsvexport'
+    title = _('csv entities export')
+
+    def call(self):
+        """
+        the generated CSV file will have a table per entity type
+        found in the resultset. ('table' here only means empty
+        lines separation between table contents)
+        """
+        req = self.req
+        rows_by_type = {}
+        writer = self.csvwriter()
+        rowdef_by_type = {}
+        for index in xrange(len(self.rset)):
+            entity = self.complete_entity(index)
+            if entity.e_schema not in rows_by_type:
+                rowdef_by_type[entity.e_schema] = [rs for rs, as in entity.e_schema.attribute_definitions()
+                                                   if as.type != 'Bytes']
+                rows_by_type[entity.e_schema] = [[display_name(req, rschema.type)
+                                                  for rschema in rowdef_by_type[entity.e_schema]]]
+            rows = rows_by_type[entity.e_schema]
+            rows.append([entity.printable_value(rs.type, format='text/plain')
+                         for rs in rowdef_by_type[entity.e_schema]])
+        for etype, rows in rows_by_type.items():
+            writer.writerows(rows)
+            # use two empty lines as separator
+            writer.writerows([[], []])        
+    
+
+## Work in progress ###########################################################
+
+class SearchForAssociationView(EntityView):
+    """view called by the edition view when the user asks
+    to search for something to link to the edited eid
+    """
+    id = 'search-associate'
+    title = _('search for association')
+    __selectors__ = (onelinerset_selector, searchstate_selector, accept_selector)
+    accepts = ('Any',)
+    search_states = ('linksearch',)
+
+    def cell_call(self, row, col):
+        rset, vid, divid, paginate = self.filter_box_context_info()
+        self.w(u'<div id="%s">' % divid)
+        self.pagination(self.req, rset, w=self.w)
+        self.wview(vid, rset)
+        self.w(u'</div>')
+
+    @cached
+    def filter_box_context_info(self):
+        entity = self.entity(0, 0)
+        role, eid, rtype, etype = self.req.search_state[1]
+        assert entity.eid == typed_eid(eid)
+        # the default behaviour is to fetch all unrelated entities and display
+        # them. Use fetch_order and not fetch_unrelated_order as sort method
+        # since the latter is mainly there to select relevant items in the combo
+        # box, it doesn't give interesting result in this context
+        rql = entity.unrelated_rql(rtype, etype, role,
+                                   ordermethod='fetch_order',
+                                   vocabconstraints=False)
+        rset = self.req.execute(rql, {'x' : entity.eid}, 'x')
+        #vid = vid_from_rset(self.req, rset, self.schema)
+        return rset, 'list', "search-associate-content", True
+
+
+class OutOfContextSearch(EntityView):
+    id = 'outofcontext-search'
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        erset = entity.as_rset()
+        if linksearch_match(self.req, erset):
+            self.w(u'<a href="%s" title="%s">%s</a>&nbsp;<a href="%s" title="%s">[...]</a>' % (
+                html_escape(linksearch_select_url(self.req, erset)),
+                self.req._('select this entity'),
+                html_escape(entity.view('textoutofcontext')),
+                html_escape(entity.absolute_url(vid='primary')),
+                self.req._('view detail for this entity')))
+        else:
+            entity.view('outofcontext', w=self.w)
+            
+            
+class EditRelationView(EntityView):
+    """Note: This is work in progress
+
+    This view is part of the edition view refactoring.
+    It is still too big and cluttered with strange logic, but it's a start
+
+    The main idea is to be able to call an edition view for a specific
+    relation. For example :
+       self.wview('editrelation', person_rset, rtype='firstname')
+       self.wview('editrelation', person_rset, rtype='works_for')
+    """
+    id = 'editrelation'
+
+    __selectors__ = (req_form_params_selector,)
+    form_params = ('rtype',)
+    
+    # TODO: inlineview, multiple edit, (widget view ?)
+    def cell_call(self, row, col, rtype=None, role='subject', targettype=None,
+                 showlabel=True):
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+        entity = self.entity(row, col)
+        rtype = self.req.form.get('rtype', rtype)
+        showlabel = self.req.form.get('showlabel', showlabel)
+        assert rtype is not None, "rtype is mandatory for 'edirelation' view"
+        targettype = self.req.form.get('targettype', targettype)
+        role = self.req.form.get('role', role)
+        mode = entity.rtags.get_mode(rtype, targettype, role)
+        if mode == 'create':
+            return
+        category = entity.rtags.get_category(rtype, targettype, role)
+        if category in ('generated', 'metadata'):
+            return
+        elif category in ('primary', 'secondary'):
+            if hasattr(entity, '%s_format' % rtype):
+                formatwdg = entity.get_widget('%s_format' % rtype, role)
+                self.w(formatwdg.edit_render(entity))
+                self.w(u'<br/>')
+            wdg = entity.get_widget(rtype, role)
+            if showlabel:
+                self.w(u'%s' % wdg.render_label(entity))
+            self.w(u'%s %s %s' %
+                   (wdg.render_error(entity), wdg.edit_render(entity),
+                    wdg.render_help(entity),))
+        elif category == 'generic':
+            self._render_generic_relation(entity, rtype, role)
+        else:
+            self.error("oops, wrong category %s", category)
+
+    def _render_generic_relation(self, entity, relname, role):
+        text = self.req.__('add %s %s %s' % (entity.e_schema, relname, role))
+        # pending operations
+        operations = self.req.get_pending_operations(entity, relname, role)
+        if operations['insert'] or operations['delete'] or 'unfold' in self.req.form:
+            self.w(u'<h3>%s</h3>' % text)
+            self._render_generic_relation_form(operations, entity, relname, role)
+        else:
+            divid = "%s%sreledit" % (relname, role)
+            url = ajax_replace_url(divid, rql_for_eid(entity.eid), 'editrelation',
+                                   {'unfold' : 1, 'relname' : relname, 'role' : role})
+            self.w(u'<a href="%s">%s</a>' % (url, text))
+            self.w(u'<div id="%s"></div>' % divid)
+        
+
+    def _build_opvalue(self, entity, relname, target, role):
+        if role == 'subject':
+            return '%s:%s:%s' % (entity.eid, relname, target)
+        else:
+            return '%s:%s:%s' % (target, relname, entity.eid)
+        
+    
+    def _render_generic_relation_form(self, operations, entity, relname, role):
+        rqlexec = self.req.execute
+        for optype, targets in operations.items():
+            for target in targets:
+                self._render_pending(optype, entity, relname, target, role)
+                opvalue = self._build_opvalue(entity, relname, target, role)
+                self.w(u'<a href="javascript: addPendingDelete(\'%s\', %s);">-</a> '
+                       % (opvalue, entity.eid))
+                rset = rqlexec('Any X WHERE X eid %(x)s', {'x': target}, 'x')
+                self.wview('oneline', rset)
+        # now, unrelated ones
+        self._render_unrelated_selection(entity, relname, role)
+
+    def _render_pending(self, optype, entity, relname, target, role):
+        opvalue = self._build_opvalue(entity, relname, target, role)
+        self.w(u'<input type="hidden" name="__%s" value="%s" />'
+               % (optype, opvalue))
+        if optype == 'insert':
+            checktext = '-'
+        else:
+            checktext = '+'
+        rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': target}, 'x')
+        self.w(u"""[<a href="javascript: cancelPending%s('%s:%s:%s')">%s</a>"""
+               % (optype.capitalize(), relname, target, role,
+                  self.view('oneline', rset)))
+
+    def _render_unrelated_selection(self, entity, relname, role):
+        rschema = self.schema.rschema(relname)
+        if role == 'subject':
+            targettypes = rschema.objects(entity.e_schema)
+        else:
+            targettypes = rschema.subjects(entity.e_schema)
+        self.w(u'<select onselect="addPendingInsert(this.selected.value);">')
+        for targettype in targettypes:
+            unrelated = entity.unrelated(relname, targettype, role) # XXX limit
+            for rowindex, row in enumerate(unrelated):
+                teid = row[0]
+                opvalue = self._build_opvalue(entity, relname, teid, role)
+                self.w(u'<option name="__insert" value="%s>%s</option>'
+                       % (opvalue, self.view('text', unrelated, row=rowindex)))
+        self.w(u'</select>')
+
+
+class TextSearchResultView(EntityView):
+    """this view is used to display full-text search
+
+    It tries to highlight part of data where the search word appears.
+
+    XXX: finish me (fixed line width, fixed number of lines, CSS, etc.)
+    """
+    id = 'tsearch'
+
+
+    def cell_call(self, row, col, **kwargs):
+        entity = self.complete_entity(row, col)
+        self.w(entity.view('incontext'))
+        searched = self.rset.searched_text()
+        if searched is None:
+            return
+        searched = searched.lower()
+        highlighted = '<b>%s</b>' % searched
+        for attr in entity.e_schema.indexable_attributes():
+            try:
+                value = html_escape(entity.printable_value(attr, format='text/plain').lower())
+            except TransformError, ex:
+                continue
+            except:
+                continue
+            if searched in value:
+                contexts = []
+                for ctx in value.split(searched):
+                    if len(ctx) > 30:
+                        contexts.append(u'...' + ctx[-30:])
+                    else:
+                        contexts.append(ctx)
+                value = u'\n' + highlighted.join(contexts)
+                self.w(value.replace('\n', '<br/>'))            
+
+
+class EntityRelationView(EntityView):
+    accepts = ()
+    vid = 'list'
+    
+    def cell_call(self, row, col):
+        if self.target == 'object':
+            role = 'subject'
+        else:
+            role = 'object'
+        rset = self.rset.get_entity(row, col).related(self.rtype, role)
+        self.w(u'<h1>%s</h1>' % self.req._(self.title).capitalize())
+        self.w(u'<div class="mainInfo">')
+        self.wview(self.vid, rset, 'noresult')
+        self.w(u'</div>')
+
+
+class TooltipView(OneLineView):
+    """A entity view used in a tooltip"""
+    id = 'tooltip'
+    title = None # don't display in possible views
+    def cell_call(self, row, col):
+        self.wview('oneline', self.rset, row=row, col=col)
+
+try:
+    from cubicweb.web.views.tableview import TableView
+    from logilab.common.deprecation import class_moved
+    TableView = class_moved(TableView)
+except ImportError:
+    pass # gae has no tableview module (yet)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/bookmark.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,99 @@
+"""Primary view for bookmarks + user's bookmarks box
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb import Unauthorized
+from cubicweb.web.htmlwidgets import BoxWidget, BoxMenu, RawBoxItem
+from cubicweb.web.box import UserRQLBoxTemplate
+from cubicweb.web.views.baseviews import PrimaryView
+
+
+class BookmarkPrimaryView(PrimaryView):
+    accepts = ('Bookmark',)
+        
+    def cell_call(self, row, col):
+        """the primary view for bookmark entity"""
+        entity = self.complete_entity(row, col)
+        self.w(u'&nbsp;')
+        self.w(u"<span class='title'><b>")
+        self.w(u"%s : %s" % (self.req._('Bookmark'), html_escape(entity.title)))
+        self.w(u"</b></span>")
+        self.w(u'<br/><br/><div class="content"><a href="%s">' % (
+            html_escape(entity.actual_url())))
+        self.w(u'</a>')
+        self.w(u'<p>%s%s</p>' % (self.req._('Used by:'), ', '.join(html_escape(u.name())
+                                                                   for u in entity.bookmarked_by)))
+        self.w(u'</div>')
+
+
+class BookmarksBox(UserRQLBoxTemplate):
+    """display a box containing all user's bookmarks"""
+    id = 'bookmarks_box'
+    order = 40
+    title = _('bookmarks')
+    rql = ('Any B,T,P ORDERBY lower(T) '
+           'WHERE B is Bookmark,B title T, B path P, B bookmarked_by U, '
+           'U eid %(x)s')
+    etype = 'Bookmark'
+    rtype = 'bookmarked_by'
+    
+    
+    def call(self, **kwargs):
+        req = self.req
+        ueid = req.user.eid
+        try:
+            rset = req.execute(self.rql, {'x': ueid})
+        except Unauthorized:
+            # can't access to something in the query, forget this box
+            return
+        box = BoxWidget(req._(self.title), self.id)
+        box.listing_class = 'sideBox'
+        rschema = self.schema.rschema(self.rtype)
+        eschema = self.schema.eschema(self.etype)
+        candelete = rschema.has_perm(req, 'delete', toeid=ueid)
+        if candelete:
+            req.add_js( ('cubicweb.ajax.js', 'cubicweb.bookmarks.js') )
+        else:
+            dlink = None
+        for bookmark in rset.entities():
+            label = '<a href="%s">%s</a>' % (html_escape(bookmark.action_url()),
+                                             html_escape(bookmark.title))
+            if candelete:
+                dlink = u'[<a href="javascript:removeBookmark(%s)" title="%s">-</a>]' % (
+                    bookmark.eid, _('delete this bookmark'))
+                label = '%s %s' % (dlink, label)
+            box.append(RawBoxItem(label, liclass=u'invisible'))
+        if eschema.has_perm(req, 'add') and rschema.has_perm(req, 'add', toeid=ueid):
+            boxmenu = BoxMenu(req._('manage bookmarks'), liclass=u'invisible')
+            linkto = 'bookmarked_by:%s:subject' % ueid
+            # use a relative path so that we can move the application without
+            # loosing bookmarks
+            path = req.relative_path()
+            url = self.create_url(self.etype, __linkto=linkto, path=path)
+            boxmenu.append(self.mk_action(req._('bookmark this page'), url,
+                                          category='manage', id='bookmark'))
+            if rset:
+                if req.user.is_in_group('managers'):
+                    bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, U eid %s' % ueid
+                    erset = rset
+                else:
+                    # we can't edit shared bookmarks we don't own
+                    bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s'
+                    erset = req.execute(bookmarksrql, {'x': ueid}, 'x',
+                                                build_descr=False)
+                    bookmarksrql %= {'x': ueid}
+                if erset:
+                    url = self.build_url(vid='muledit', rql=bookmarksrql)
+                    boxmenu.append(self.mk_action(self.req._('edit bookmarks'), url, category='manage'))
+            url = req.user.absolute_url(vid='xaddrelation', rtype='bookmarked_by',
+                                        target='subject')
+            boxmenu.append(self.mk_action(self.req._('pick existing bookmarks'), url, category='manage'))
+            box.append(boxmenu)
+        if not box.is_empty():
+            box.render(self.w)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/boxes.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,235 @@
+"""
+generic boxes for CubicWeb web client:
+
+* actions box
+* possible views box
+* rss icon
+
+additional (disabled by default) boxes
+* schema box
+* startup views box
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.selectors import rset_selector, nfentity_selector
+from cubicweb.web.htmlwidgets import BoxWidget, BoxMenu, BoxHtml, RawBoxItem
+from cubicweb.web.box import BoxTemplate, ExtResourcesBoxTemplate
+
+_ = unicode
+
+
+class EditBox(BoxTemplate):
+    """
+    box with all actions impacting the entity displayed: edit, copy, delete
+    change state, add related entities
+    """
+    __selectors__ = (rset_selector,) + BoxTemplate.__selectors__
+    id = 'edit_box'
+    title = _('actions')
+    order = 2
+
+    def call(self, **kwargs):
+        _ = self.req._
+        title = _(self.title)
+        if self.rset:
+            etypes = self.rset.column_types(0)
+            if len(etypes) == 1:
+                plural = self.rset.rowcount > 1 and 'plural' or ''
+                etypelabel = display_name(self.req, iter(etypes).next(), plural)
+                title = u'%s - %s' % (title, etypelabel.lower())
+        box = BoxWidget(title, self.id, _class="greyBoxFrame")
+        # build list of actions
+        actions = self.vreg.possible_actions(self.req, self.rset)
+        add_menu = BoxMenu(_('add')) # 'addrelated' category
+        other_menu = BoxMenu(_('more actions')) # 'moreactions' category
+        searchstate = self.req.search_state[0]
+        for category, menu in (('mainactions', box),
+                               ('addrelated', add_menu),
+                               ('moreactions', other_menu)):
+            for action in actions.get(category, ()):
+                menu.append(self.box_action(action))
+        if self.rset and self.rset.rowcount == 1 and \
+               not self.schema[self.rset.description[0][0]].is_final() and \
+               searchstate == 'normal':
+            entity = self.rset.get_entity(0, 0)
+            #entity.complete()
+            if add_menu.items:
+                self.info('explicit actions defined, ignoring potential rtags for %s',
+                          entity.e_schema)
+            else:
+                # some addrelated actions may be specified but no one is selectable
+                # in which case we should not fallback to schema_actions. The proper
+                # way to avoid this is to override add_related_schemas() on the
+                # entity class to return an empty list
+                for action in self.schema_actions(entity):
+                    add_menu.append(action)            
+            if 'in_state' in entity.e_schema.subject_relations() and entity.in_state:
+                state = entity.in_state[0]
+                transitions = list(state.transitions(entity))
+                if transitions:
+                    menu_title = u'%s: %s' % (_('state'), state.view('text'))
+                    menu_items = []
+                    for tr in state.transitions(entity):
+                        url = entity.absolute_url(vid='statuschange', treid=tr.eid)
+                        menu_items.append(self.mk_action(_(tr.name), url))
+                    state_menu = BoxMenu(menu_title, menu_items)
+                    box.append(state_menu)
+                # when there are no possible transition, put state if the menu if
+                # there are some other actions
+                elif not box.is_empty():
+                    menu_title = u'<a title="%s">%s: <i>%s</i></a>' % (
+                        _('no possible transition'), _('state'), state.view('text'))
+                    box.append(RawBoxItem(menu_title, 'boxMainactions'))
+        if box.is_empty() and not other_menu.is_empty():
+            box.items = other_menu.items
+            other_menu.items = []
+        self.add_submenu(box, add_menu, _('add'))
+        self.add_submenu(box, other_menu)
+        if not box.is_empty():
+            box.render(self.w)
+
+    def add_submenu(self, box, submenu, label_prefix=None):
+        if len(submenu.items) == 1:
+            boxlink = submenu.items[0]
+            if label_prefix:
+                boxlink.label = u'%s %s' % (label_prefix, boxlink.label)
+            box.append(boxlink)
+        elif submenu.items:
+            box.append(submenu)
+        
+    def schema_actions(self, entity):
+        user = self.req.user
+        actions = []
+        _ = self.req._
+        eschema = entity.e_schema
+        for rschema, teschema, x in entity.add_related_schemas():
+            if x == 'subject':
+                label = 'add %s %s %s %s' % (eschema, rschema, teschema, x)
+                url = self.linkto_url(entity, rschema, teschema, 'object')
+            else:
+                label = 'add %s %s %s %s' % (teschema, rschema, eschema, x)
+                url = self.linkto_url(entity, rschema, teschema, 'subject')
+            actions.append(self.mk_action(_(label), url))
+        return actions
+
+
+    def linkto_url(self, entity, rtype, etype, target):
+        
+        return self.build_url(vid='creation', etype=etype,
+                              __linkto='%s:%s:%s' % (rtype, entity.eid, target),
+                              __redirectpath=entity.rest_path(), # should not be url quoted!
+                              __redirectvid=self.req.form.get('vid', ''))
+
+
+class SearchBox(BoxTemplate):
+    """display a box with a simple search form"""
+    id = 'search_box'
+    visible = True # enabled by default
+    title = _('search')
+    order = 0
+    need_resources = 'SEARCH_GO'
+    formdef = u"""<form action="%s">
+<table id="tsearch"><tr><td>
+<input id="norql" type="text" accesskey="q" tabindex="%s" title="search text" value="%s" name="rql" />
+<input type="hidden" name="__fromsearchbox" value="1" />
+<input type="hidden" name="subvid" value="tsearch" />
+</td><td>
+<input tabindex="%s" type="submit" id="rqlboxsubmit" value="" />
+</td></tr></table>
+</form>"""
+
+
+    def call(self, view=None, **kwargs):
+        req = self.req
+        if req.form.pop('__fromsearchbox', None):
+            rql = req.form.get('rql', '')
+        else:
+            rql = ''
+        form = self.formdef % (req.build_url('view'), req.next_tabindex(),
+                               html_escape(rql), req.next_tabindex())
+        title = u"""<span onclick="javascript: toggleVisibility('rqlinput')">%s</span>""" % req._(self.title)
+        box = BoxWidget(title, self.id, _class="searchBoxFrame", islist=False, escape=False)
+        box.append(BoxHtml(form))
+        box.render(self.w)            
+
+
+# boxes disabled by default ###################################################
+
+class PossibleViewsBox(BoxTemplate):
+    """display a box containing links to all possible views"""
+    id = 'possible_views_box'
+    
+    
+    title = _('possible views')
+    order = 10
+    require_groups = ('users', 'managers')
+    visible = False
+
+    def call(self, **kwargs):
+        box = BoxWidget(self.req._(self.title), self.id)
+        actions = [v for v in self.vreg.possible_views(self.req, self.rset)
+                   if v.category != 'startupview']
+        for category, actions in self.sort_actions(actions):
+            menu = BoxMenu(category)
+            for action in actions:
+                menu.append(self.box_action(action))
+            box.append(menu)
+        if not box.is_empty():
+            box.render(self.w)
+
+
+class RSSIconBox(ExtResourcesBoxTemplate):
+    """just display the RSS icon on uniform result set"""
+    __selectors__ = ExtResourcesBoxTemplate.__selectors__ + (nfentity_selector,)
+    
+    id = 'rss'
+    order = 999
+    need_resources = 'RSS_LOGO',
+    visible = False
+    
+    def call(self, **kwargs):
+        url = html_escape(self.build_url(rql=self.limited_rql(), vid='rss'))
+        rss = self.req.external_resource('RSS_LOGO')
+        self.w(u'<a href="%s"><img src="%s" border="0" /></a>\n' % (url, rss))
+
+
+## warning("schemabox ne marche plus pour le moment")
+## class SchemaBox(BoxTemplate):
+##     """display a box containing link to list of entities by type"""
+##     id = 'schema_box'
+##     visible = False # disabled by default
+##     title = _('entity list')
+##     order = 60
+        
+##     def call(self, **kwargs):
+##         box = BoxWidget(self.req._(title), self.id)
+##         for etype in self.config.etypes(self.req.user, 'read'):
+##             view = self.vreg.select_view('list', self.req, self.etype_rset(etype))
+##             box.append(self.mk_action(display_name(self.req, etype, 'plural'),
+##                                       view.url(), etype=etype))
+##         if not box.is_empty():
+##             box.render(self.w)
+
+
+class StartupViewsBox(BoxTemplate):
+    """display a box containing links to all startup views"""
+    id = 'startup_views_box'
+    visible = False # disabled by default
+    title = _('startup views')
+    order = 70
+
+    def call(self, **kwargs):
+        box = BoxWidget(self.req._(self.title), self.id)
+        for view in self.vreg.possible_views(self.req, None):
+            if view.category == 'startupview':
+                box.append(self.box_action(view))
+        
+        if not box.is_empty():
+            box.render(self.w)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/calendar.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,549 @@
+"""html calendar views
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from mx.DateTime import DateTime, RelativeDateTime, today, ISO
+from datetime import datetime
+
+from vobject import iCalendar, icalendar
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import ICalendarable
+from cubicweb.common.utils import date_range
+from cubicweb.common.uilib import ajax_replace_url
+from cubicweb.common.selectors import interface_selector, anyrset_selector
+from cubicweb.common.registerers import priority_registerer
+from cubicweb.common.view import EntityView
+
+
+# For backward compatibility
+from cubicweb.interfaces import ICalendarViews, ITimetableViews
+try:
+    from cubicweb.web.views.old_calendar import _CalendarView, AMPMWeekCalendarView
+except ImportError:
+    import logging
+    logger = logging.getLogger('cubicweb.registry')
+    logger.info("old calendar views could not be found and won't be registered")
+
+_ = unicode
+
+# useful constants & functions
+def mkdt(mxdate):
+    """
+    Build a stdlib datetime date from a mx.datetime 
+    """
+    d = mxdate
+    return datetime(d.year, d.month, d.day, d.hour, d.minute,
+                    tzinfo=icalendar.utc)
+def iso(mxdate):
+    """
+    Format a ms datetime in ISO 8601 string 
+    """
+    # XXX What about timezone?
+    return ISO.str(mxdate)
+
+# mx.DateTime and ustrftime could be used to build WEEKDAYS
+WEEKDAYS = (_("monday"), _("tuesday"), _("wednesday"), _("thursday"),
+            _("friday"), _("saturday"), _("sunday"))
+
+# used by i18n tools
+MONTHNAMES = ( _('january'), _('february'), _('march'), _('april'), _('may'),
+               _('june'), _('july'), _('august'), _('september'), _('october'),
+               _('november'), _('december')
+               )
+
+#################
+# In calendar views (views used as calendar cell item) 
+
+
+class CalendarItemView(EntityView):
+    id = 'calendaritem'
+
+    def cell_call(self, row, col, dates=False):
+        task = self.complete_entity(row)
+        task.view('oneline', w=self.w)
+        if dates:
+            if task.start and task.stop:
+                self.w('<br/>from %s'%self.format_date(task.start))
+                self.w('<br/>to %s'%self.format_date(task.stop))
+                
+class CalendarLargeItemView(CalendarItemView):
+    id = 'calendarlargeitem'
+        
+#################
+# Calendar views
+
+class iCalView(EntityView):
+    """A calendar view that generates a iCalendar file (RFC 2445)
+
+    Does apply to ICalendarable compatible entities
+    """
+    __registerer__ = priority_registerer
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ICalendarable,)
+    need_navigation = False
+    content_type = 'text/calendar'
+    title = _('iCalendar')
+    templatable = False
+    id = 'ical'
+
+    def call(self):
+        ical = iCalendar()
+        for i in range(len(self.rset.rows)):
+            task = self.complete_entity(i)
+            event = ical.add('vevent')
+            event.add('summary').value = task.dc_title()
+            event.add('description').value = task.dc_description()
+            if task.start:
+                event.add('dtstart').value = mkdt(task.start)
+            if task.stop:
+                event.add('dtend').value = mkdt(task.stop)
+
+        buff = ical.serialize()
+        if not isinstance(buff, unicode):
+            buff = unicode(buff, self.req.encoding)
+        self.w(buff)
+
+class hCalView(EntityView):
+    """A calendar view that generates a hCalendar file
+
+    Does apply to ICalendarable compatible entities
+    """
+    __registerer__ = priority_registerer
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ICalendarable,)
+    need_navigation = False
+    title = _('hCalendar')
+    templatable = False
+    id = 'hcal'
+
+    def call(self):
+        self.w(u'<div class="hcalendar">')
+        for i in range(len(self.rset.rows)):
+            task = self.complete_entity(i)
+            self.w(u'<div class="vevent">')
+            self.w(u'<h3 class="summary">%s</h3>' % html_escape(task.dc_title()))
+            self.w(u'<div class="description">%s</div>' % html_escape(task.dc_description()))
+            if task.start:
+                self.w(u'<abbr class="dtstart" title="%s">%s</abbr>' % (iso(task.start), self.format_date(task.start)))
+            if task.stop:
+                self.w(u'<abbr class="dtstop" title="%s">%s</abbr>' % (iso(task.stop), self.format_date(task.stop)))
+            self.w(u'</div>')
+        self.w(u'</div>')
+
+    
+class _TaskEntry(object):
+    def __init__(self, task, color, index=0):
+        self.task = task
+        self.color = color
+        self.index = index
+        self.length = 1
+
+class OneMonthCal(EntityView):
+    """At some point, this view will probably replace ampm calendars"""
+    __registerer__ = priority_registerer
+    __selectors__ = (interface_selector, anyrset_selector)
+    accepts_interfaces = (ICalendarable,)
+    need_navigation = False
+    id = 'onemonthcal'
+    title = _('one month')
+
+    def call(self):
+        self.req.add_js('cubicweb.ajax.js')
+        self.req.add_css('cubicweb.calendar.css')
+        # XXX: restrict courses directy with RQL
+        _today =  today()
+
+        if 'year' in self.req.form:
+            year = int(self.req.form['year'])
+        else:
+            year = _today.year
+        if 'month' in self.req.form:
+            month = int(self.req.form['month'])
+        else:
+            month = _today.month
+
+        first_day_of_month = DateTime(year, month, 1)
+        lastday = first_day_of_month + RelativeDateTime(months=1,weekday=(6,1))
+        firstday= first_day_of_month + RelativeDateTime(months=-1,weekday=(0,-1))
+        month_dates = list(date_range(firstday, lastday))
+        dates = {}
+        users = []
+        task_max = 0
+        for row in xrange(self.rset.rowcount):
+            task = self.rset.get_entity(row,0)
+            if len(self.rset[row]) > 1 and self.rset.description[row][1] == 'EUser':
+                user = self.rset.get_entity(row,1)
+            else:
+                user = None
+            the_dates = []
+            if task.start:
+                if task.start > lastday:
+                    continue
+                the_dates = [task.start]
+            if task.stop:
+                if task.stop < firstday:
+                    continue
+                the_dates = [task.stop]
+            if task.start and task.stop:
+                if task.start.absdate == task.stop.absdate:
+                    date = task.start
+                    if firstday<= date <= lastday:
+                        the_dates = [date]
+                else:
+                    the_dates = date_range(max(task.start,firstday),
+                                           min(task.stop,lastday))
+            if not the_dates:
+                continue
+            
+            for d in the_dates:
+                d_tasks = dates.setdefault((d.year, d.month, d.day), {})
+                t_users = d_tasks.setdefault(task,set())
+                t_users.add( user )
+                if len(d_tasks)>task_max:
+                    task_max = len(d_tasks)
+
+        days = []
+        nrows = max(3,task_max)
+        # colors here are class names defined in cubicweb.css
+        colors = [ "col%x"%i for i in range(12) ]
+        next_color_index = 0
+
+        visited_tasks = {} # holds a description of a task
+        task_colors = {}   # remember a color assigned to a task
+        for date in month_dates:
+            d_tasks = dates.get((date.year, date.month, date.day), {})
+            rows = [None] * nrows
+            # every task that is "visited" for the first time
+            # require a special treatment, so we put them in
+            # 'postpone'
+            postpone = []
+            for task in d_tasks:
+                if task in visited_tasks:
+                    task_descr = visited_tasks[ task ]
+                    rows[task_descr.index] = task_descr
+                else:
+                    postpone.append(task)
+            for task in postpone:
+                # to every 'new' task we must affect a color
+                # (which must be the same for every user concerned
+                # by the task)
+                for i,t in enumerate(rows):
+                    if t is None:
+                        if task in task_colors:
+                            color = task_colors[task]
+                        else:
+                            color = colors[next_color_index]
+                            next_color_index = (next_color_index+1)%len(colors)
+                            task_colors[task] = color
+                        task_descr = _TaskEntry(task, color, i)
+                        rows[i] = task_descr
+                        visited_tasks[task] = task_descr
+                        break
+                else:
+                    raise RuntimeError("is it possible we got it wrong?")
+
+            days.append( rows )
+
+        curdate = first_day_of_month
+        self.w(u'<div id="onemonthcalid">')
+        # build schedule
+        self.w(u'<table class="omcalendar">')
+        prevlink, nextlink = self._prevnext_links(curdate)  # XXX
+        self.w(u'<tr><th><a href="%s">&lt;&lt;</a></th><th colspan="5">%s %s</th>'
+               u'<th><a href="%s">&gt;&gt;</a></th></tr>' %
+               (html_escape(prevlink), self.req._(curdate.strftime('%B').lower()),
+                curdate.year, html_escape(nextlink)))
+
+        # output header
+        self.w(u'<tr><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th></tr>' %
+               tuple(self.req._(day) for day in WEEKDAYS))
+        
+        # build calendar
+        for date, task_rows in zip(month_dates, days):
+            if date.day_of_week == 0:
+                self.w(u'<tr>')
+            self._build_calendar_cell(date, task_rows, curdate)
+            if date.day_of_week == 6:
+                self.w(u'</tr>')
+        self.w(u'</table></div>')
+
+    def _prevnext_links(self, curdate):
+        prevdate = curdate - RelativeDateTime(months=1)
+        nextdate = curdate + RelativeDateTime(months=1)
+        rql = self.rset.rql
+        prevlink = ajax_replace_url('onemonthcalid', rql, 'onemonthcal',
+                                    year=prevdate.year, month=prevdate.month)
+        nextlink = ajax_replace_url('onemonthcalid', rql, 'onemonthcal',
+                                    year=nextdate.year, month=nextdate.month)
+        return prevlink, nextlink
+
+    def _build_calendar_cell(self, date, rows, curdate):
+        curmonth = curdate.month
+        classes = ""
+        if date.month != curmonth:
+            classes += " outOfRange"
+        if date == today():
+            classes += " today"
+        self.w(u'<td class="cell%s">' % classes)
+        self.w(u'<div class="calCellTitle%s">' % classes)
+        self.w(u'<div class="day">%s</div>' % date.day)
+        
+        if len(self.rset.column_types(0)) == 1:
+            etype = list(self.rset.column_types(0))[0]
+            url = self.build_url(vid='creation', etype=etype,
+                                 schedule=True,
+                                 start=self.format_date(date), stop=self.format_date(date),
+                                 __redirectrql=self.rset.rql,
+                                 __redirectparams=self.req.build_url_params(year=curdate.year, month=curmonth),
+                                 __redirectvid=self.id
+                                 )
+            self.w(u'<div class="cmd"><a href="%s">%s</a></div>' % (html_escape(url), self.req._(u'add')))
+            self.w(u'&nbsp;')
+        self.w(u'</div>')
+        self.w(u'<div class="cellContent">')
+        for task_descr in rows:
+            if task_descr:
+                task = task_descr.task
+                self.w(u'<div class="task %s">' % task_descr.color)
+                task.view('calendaritem', w=self.w )
+                url = task.absolute_url(vid='edition',
+                                        __redirectrql=self.rset.rql,
+                                        __redirectparams=self.req.build_url_params(year=curdate.year, month=curmonth),
+                                        __redirectvid=self.id
+                                        )
+
+                self.w(u'<div class="tooltip" ondblclick="stopPropagation(event); window.location.assign(\'%s\'); return false;">' % html_escape(url))
+                task.view('tooltip', w=self.w )
+                self.w(u'</div>')
+            else:
+                self.w(u'<div class="task">')
+                self.w(u"&nbsp;")
+            self.w(u'</div>')
+        self.w(u'</div>')
+        self.w(u'</td>')
+
+
+class OneWeekCal(EntityView):
+    """At some point, this view will probably replace ampm calendars"""
+    __registerer__ = priority_registerer
+    __selectors__ = (interface_selector, anyrset_selector)
+    accepts_interfaces = (ICalendarable,)
+    need_navigation = False
+    id = 'oneweekcal'
+    title = _('one week')
+    
+    def call(self):
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.calendar.js') )
+        self.req.add_css('cubicweb.calendar.css')
+        # XXX: restrict courses directy with RQL
+        _today =  today()
+
+        if 'year' in self.req.form:
+            year = int(self.req.form['year'])
+        else:
+            year = _today.year
+        if 'week' in self.req.form:
+            week = int(self.req.form['week'])
+        else:
+            week = _today.iso_week[1]        
+
+        first_day_of_week = ISO.ParseWeek("%s-W%s-1"%(year, week))
+        lastday = first_day_of_week + RelativeDateTime(days=6)
+        firstday= first_day_of_week
+        dates = [[] for i in range(7)]
+        task_max = 0
+        task_colors = {}   # remember a color assigned to a task
+        # colors here are class names defined in cubicweb.css
+        colors = [ "col%x"%i for i in range(12) ]
+        next_color_index = 0
+        done_tasks = []
+        for row in xrange(self.rset.rowcount):
+            task = self.rset.get_entity(row,0)
+            if task in done_tasks:
+                continue
+            done_tasks.append(task)
+            the_dates = []
+            if task.start:
+                if task.start > lastday:
+                    continue
+                the_dates = [task.start]
+            if task.stop:
+                if task.stop < firstday:
+                    continue
+                the_dates = [task.stop]
+            if task.start and task.stop:
+                the_dates = date_range(max(task.start,firstday),
+                                       min(task.stop,lastday))
+            if not the_dates:
+                continue
+                
+            if task not in task_colors:
+                task_colors[task] = colors[next_color_index]
+                next_color_index = (next_color_index+1)%len(colors)
+            
+            for d in the_dates:
+                day = d.day_of_week
+                task_descr = _TaskEntry(task, task_colors[task])  
+                dates[day].append(task_descr)
+            
+        self.w(u'<div id="oneweekcalid">')
+        # build schedule
+        self.w(u'<table class="omcalendar" id="week">')
+        prevlink, nextlink = self._prevnext_links(first_day_of_week)  # XXX
+        self.w(u'<tr><th class="transparent"></th>')
+        self.w(u'<th><a href="%s">&lt;&lt;</a></th><th colspan="5">%s %s %s</th>'
+               u'<th><a href="%s">&gt;&gt;</a></th></tr>' %
+               (html_escape(prevlink), first_day_of_week.year,
+                self.req._(u'week'), first_day_of_week.iso_week[1],
+                html_escape(nextlink)))
+
+        # output header
+        self.w(u'<tr>')
+        self.w(u'<th class="transparent"></th>') # column for hours
+        _today = today()
+        for i, day in enumerate(WEEKDAYS):
+            date = first_day_of_week + i
+            if date.absdate == _today.absdate:
+                self.w(u'<th class="today">%s<br/>%s</th>' % (self.req._(day), self.format_date(date)))
+            else:
+                self.w(u'<th>%s<br/>%s</th>' % (self.req._(day), self.format_date(date)))
+        self.w(u'</tr>')
+
+        
+        # build week calendar
+        self.w(u'<tr>')
+        self.w(u'<td style="width:5em;">') # column for hours
+        extra = ""
+        for h in range(8, 20):
+            self.w(u'<div class="hour" %s>'%extra)
+            self.w(u'%02d:00'%h)
+            self.w(u'</div>')            
+        self.w(u'</td>')
+        
+        for i, day in enumerate(WEEKDAYS):
+            date = first_day_of_week + i
+            classes = ""
+            if date.absdate == _today.absdate:
+                classes = " today"
+            self.w(u'<td class="column %s" id="%s">'%(classes, day))
+            if len(self.rset.column_types(0)) == 1:
+                etype = list(self.rset.column_types(0))[0]
+                url = self.build_url(vid='creation', etype=etype,
+                                     schedule=True,
+                                     __redirectrql=self.rset.rql,
+                                     __redirectparams=self.req.build_url_params(year=year, week=week),
+                                     __redirectvid=self.id
+                                     )
+                extra = ' ondblclick="addCalendarItem(event, hmin=%s, hmax=%s, year=%s, month=%s, day=%s, duration=%s, baseurl=\'%s\')"' % (8,20,date.year, date.month, date.day, 2, html_escape(url))
+            else:
+                extra = ""
+            self.w(u'<div class="columndiv"%s>'% extra)
+            for h in range(8, 20):
+                self.w(u'<div class="hourline" style="top:%sex;">'%((h-7)*8))
+                self.w(u'</div>')            
+            if dates[i]:
+                self._build_calendar_cell(date, dates[i])
+            self.w(u'</div>')
+            self.w(u'</td>')
+        self.w(u'</tr>')
+        self.w(u'</table></div>')
+        self.w(u'<div id="coord"></div>')
+        self.w(u'<div id="debug">&nbsp;</div>')
+ 
+    def _one_day_task(self, task):
+        """
+        Return true if the task is a "one day" task; ie it have a start and a stop the same day
+        """
+        if task.start and task.stop:
+            if task.start.absdate ==  task.stop.absdate:
+                return True
+        return False
+        
+    def _build_calendar_cell(self, date, task_descrs):
+        inday_tasks = [t for t in task_descrs if self._one_day_task(t.task) and  t.task.start.hour<20 and t.task.stop.hour>7]
+        wholeday_tasks = [t for t in task_descrs if not self._one_day_task(t.task)]
+
+        inday_tasks.sort(key=lambda t:t.task.start)
+        sorted_tasks = []
+        for i, t in enumerate(wholeday_tasks):
+            t.index = i
+        ncols = len(wholeday_tasks)
+        while inday_tasks:
+            t = inday_tasks.pop(0)
+            for i, c in enumerate(sorted_tasks):
+                if not c or c[-1].task.stop <= t.task.start:
+                    c.append(t)
+                    t.index = i+ncols
+                    break
+            else:
+                t.index = len(sorted_tasks) + ncols
+                sorted_tasks.append([t])
+        ncols += len(sorted_tasks)
+        if ncols == 0:
+            return
+
+        inday_tasks = []
+        for tasklist in sorted_tasks:
+            inday_tasks += tasklist
+        width = 100.0/ncols
+        for task_desc in wholeday_tasks + inday_tasks:
+            task = task_desc.task
+            start_hour = 8
+            start_min = 0
+            stop_hour = 20
+            stop_min = 0
+            if task.start:
+                if date < task.start < date + 1:
+                    start_hour = max(8, task.start.hour)
+                    start_min = task.start.minute
+            if task.stop:
+                if date < task.stop < date + 1:
+                    stop_hour = min(20, task.stop.hour)
+                    if stop_hour < 20:
+                        stop_min = task.stop.minute
+                    
+            height = 100.0*(stop_hour+stop_min/60.0-start_hour-start_min/60.0)/(20-8)
+            top = 100.0*(start_hour+start_min/60.0-8)/(20-8)
+            left = width*task_desc.index
+            style = "height: %s%%; width: %s%%; top: %s%%; left: %s%%; " % \
+                (height, width, top, left)
+            self.w(u'<div class="task %s" style="%s">' % \
+                       (task_desc.color, style))
+            task.view('calendaritem', dates=False, w=self.w)
+            url = task.absolute_url(vid='edition',
+                                    __redirectrql=self.rset.rql,
+                                    __redirectparams=self.req.build_url_params(year=date.year, week=date.iso_week[1]),
+                                    __redirectvid=self.id
+                                 )
+
+            self.w(u'<div class="tooltip" ondblclick="stopPropagation(event); window.location.assign(\'%s\'); return false;">' % html_escape(url))
+            task.view('tooltip', w=self.w)
+            self.w(u'</div>')
+            if task.start is None:
+                self.w(u'<div class="bottommarker">')
+                self.w(u'<div class="bottommarkerline" style="margin: 0px 3px 0px 3px; height: 1px;">')
+                self.w(u'</div>')
+                self.w(u'<div class="bottommarkerline" style="margin: 0px 2px 0px 2px; height: 1px;">')
+                self.w(u'</div>')
+                self.w(u'<div class="bottommarkerline" style="margin: 0px 1px 0px 1px; height: 3ex; color: white; font-size: x-small; vertical-align: center; text-align: center;">')
+                self.w(u'end')
+                self.w(u'</div>')
+                self.w(u'</div>')
+            self.w(u'</div>')
+
+            
+    def _prevnext_links(self, curdate):
+        prevdate = curdate - RelativeDateTime(days=7)
+        nextdate = curdate + RelativeDateTime(days=7)
+        rql = self.rset.rql
+        prevlink = ajax_replace_url('oneweekcalid', rql, 'oneweekcal',
+                                    year=prevdate.year, week=prevdate.iso_week[1])
+        nextlink = ajax_replace_url('oneweekcalid', rql, 'oneweekcal',
+                                    year=nextdate.year, week=nextdate.iso_week[1])
+        return prevlink, nextlink
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/card.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,36 @@
+"""Specific views for cards
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web.views import baseviews
+from logilab.mtconverter import html_escape
+
+_ = unicode
+
+class CardPrimaryView(baseviews.PrimaryView):
+    accepts = ('Card',)
+    skip_attrs = baseviews.PrimaryView.skip_attrs + ('title', 'synopsis', 'wikiid')
+    show_attr_label = False
+
+    def content_title(self, entity):
+        return html_escape(entity.dc_title())
+    
+    def summary(self, entity):
+        return html_escape(entity.dc_description())
+
+
+class CardInlinedView(CardPrimaryView):
+    """hide card title and summary"""
+    id = 'inlined'
+    title = _('inlined view')
+    main_related_section = False
+    
+    def render_entity_title(self, entity):
+        pass
+    
+    def render_entity_metadata(self, entity):
+        pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/debug.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,59 @@
+"""management and error screens
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from time import strftime, localtime
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.view import StartupView
+
+def dict_to_html(w, dict):
+    # XHTML doesn't allow emtpy <ul> nodes
+    if dict:
+        w(u'<ul>')
+        for key in sorted(dict):
+            w(u'<li><span class="label">%s</span>: <span>%s</span></li>' % (
+                html_escape(str(key)), html_escape(repr(dict[key]))))
+        w(u'</ul>')
+    
+class DebugView(StartupView):
+    id = 'debug'
+    title = _('server debug information')
+    require_groups = ('managers',)
+
+    def call(self, **kwargs):
+        """display server information"""
+        w = self.w
+        w(u'<h1>server sessions</h1>')
+        sessions = self.req.cnx._repo._sessions.items()
+        if sessions:
+            w(u'<ul>')
+            for sid, session in sessions:
+                w(u'<li>%s  (last usage: %s)<br/>' % (html_escape(str(session)),
+                                                      strftime('%Y-%m-%d %H:%M:%S',
+                                                               localtime(session.timestamp))))
+                dict_to_html(w, session.data)
+                w(u'</li>')
+            w(u'</ul>')
+        else:
+            w(u'<p>no server sessions found</p>')
+        from cubicweb.web.application import SESSION_MANAGER
+        w(u'<h1>web sessions</h1>')
+        sessions = SESSION_MANAGER.current_sessions()
+        if sessions:
+            w(u'<ul>')
+            for session in sessions:
+                w(u'<li>%s (last usage: %s)<br/>' % (session.sessionid,
+                                                     strftime('%Y-%m-%d %H:%M:%S',
+                                                              localtime(session.last_usage_time))))
+                dict_to_html(w, session.data)
+                w(u'</li>')
+            w(u'</ul>')
+        else:
+            w(u'<p>no web sessions found</p>')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/dynimages.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,182 @@
+"""dynamically generated image views
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+from tempfile import mktemp
+
+from logilab.common.graph import escape, GraphGenerator, DotBackend
+from yams import schema2dot as s2d
+
+from cubicweb.common.view import EntityView, StartupView
+
+
+class RestrictedSchemaDotPropsHandler(s2d.SchemaDotPropsHandler):
+    def __init__(self, req):
+        self.req = req
+        
+    def display_attr(self, rschema):
+        return not rschema.meta and (rschema.has_local_role('read')
+                                     or rschema.has_perm(self.req, 'read'))
+    
+    # XXX remove this method once yams > 0.20 is out
+    def node_properties(self, eschema):
+        """return default DOT drawing options for an entity schema"""
+        label = ['{',eschema.type,'|']
+        label.append(r'\l'.join(rel.type for rel in eschema.subject_relations()
+                                if rel.final and self.display_attr(rel)))
+        label.append(r'\l}') # trailing \l ensure alignement of the last one
+        return {'label' : ''.join(label), 'shape' : "record",
+                'fontname' : "Courier", 'style' : "filled"}
+    
+class RestrictedSchemaVisitorMiIn:
+    def __init__(self, req, *args, **kwargs):
+        # hack hack hack
+        assert len(self.__class__.__bases__) == 2
+        self.__parent = self.__class__.__bases__[1]
+        self.__parent.__init__(self, *args, **kwargs)
+        self.req = req
+        
+    def nodes(self):
+        for etype, eschema in self.__parent.nodes(self):
+            if eschema.has_local_role('read') or eschema.has_perm(self.req, 'read'):
+                yield eschema.type, eschema
+            
+    def edges(self):
+        for setype, oetype, rschema in self.__parent.edges(self):
+            if rschema.has_local_role('read') or rschema.has_perm(self.req, 'read'):
+                yield setype, oetype, rschema
+
+class FullSchemaVisitor(RestrictedSchemaVisitorMiIn, s2d.FullSchemaVisitor):
+    pass
+
+class OneHopESchemaVisitor(RestrictedSchemaVisitorMiIn, s2d.OneHopESchemaVisitor):
+    pass
+
+class OneHopRSchemaVisitor(RestrictedSchemaVisitorMiIn, s2d.OneHopRSchemaVisitor):
+    pass
+                
+        
+class TmpFileViewMixin(object):
+    binary = True
+    content_type = 'application/octet-stream'
+    cache_max_age = 60*60*2 # stay in http cache for 2 hours by default 
+    
+    def call(self):
+        self.cell_call()
+        
+    def cell_call(self, row=0, col=0):
+        self.row, self.col = row, col # in case one need it
+        tmpfile = mktemp('.png')
+        try:
+            self._generate(tmpfile)
+            self.w(open(tmpfile).read())
+        finally:
+            os.unlink(tmpfile)
+    
+class SchemaImageView(TmpFileViewMixin, StartupView):
+    id = 'schemagraph'
+    content_type = 'image/png'
+    skip_rels = ('owned_by', 'created_by', 'identity', 'is', 'is_instance_of')
+    def _generate(self, tmpfile):
+        """display global schema information"""
+        skipmeta = not int(self.req.form.get('withmeta', 0))
+        visitor = FullSchemaVisitor(self.req, self.schema, skiprels=self.skip_rels, skipmeta=skipmeta)
+        s2d.schema2dot(outputfile=tmpfile, visitor=visitor,
+                       prophdlr=RestrictedSchemaDotPropsHandler(self.req))
+
+class EETypeSchemaImageView(TmpFileViewMixin, EntityView):
+    id = 'eschemagraph'
+    content_type = 'image/png'
+    accepts = ('EEType',)
+    skip_rels = ('owned_by', 'created_by', 'identity', 'is', 'is_instance_of')
+    
+    def _generate(self, tmpfile):
+        """display schema information for an entity"""
+        entity = self.entity(self.row, self.col)
+        eschema = self.vreg.schema.eschema(entity.name)
+        visitor = OneHopESchemaVisitor(self.req, eschema, skiprels=self.skip_rels)
+        s2d.schema2dot(outputfile=tmpfile, visitor=visitor,
+                       prophdlr=RestrictedSchemaDotPropsHandler(self.req))
+
+class ERTypeSchemaImageView(EETypeSchemaImageView):
+    accepts = ('ERType',)
+    
+    def _generate(self, tmpfile):
+        """display schema information for an entity"""
+        entity = self.entity(self.row, self.col)
+        rschema = self.vreg.schema.rschema(entity.name)
+        visitor = OneHopRSchemaVisitor(self.req, rschema)
+        s2d.schema2dot(outputfile=tmpfile, visitor=visitor,
+                       prophdlr=RestrictedSchemaDotPropsHandler(self.req))
+
+
+
+class WorkflowDotPropsHandler(object):
+    def __init__(self, req):
+        self._ = req._
+        
+    def node_properties(self, stateortransition):
+        """return default DOT drawing options for a state or transition"""
+        props = {'label': stateortransition.name, 
+                 'fontname': 'Courier'}
+        if hasattr(stateortransition, 'state_of'):
+            props['shape'] = 'box'
+            props['style'] = 'filled'
+            if stateortransition.reverse_initial_state:
+                props['color'] = '#88CC88'
+        else:
+            props['shape'] = 'ellipse'
+            descr = []
+            tr = stateortransition
+            if tr.require_group:
+                descr.append('%s %s'% (
+                    self._('groups:'),
+                    ','.join(g.name for g in tr.require_group)))
+            if tr.condition:
+                descr.append('%s %s'% (self._('condition:'), tr.condition))
+            if descr:
+                props['label'] += escape('\n'.join(descr))
+        return props
+    
+    def edge_properties(self, transition, fromstate, tostate):
+        return {'label': '', 'dir': 'forward',
+                'color': 'black', 'style': 'filled'}
+
+class WorkflowVisitor:
+    def __init__(self, entity):
+        self.entity = entity
+
+    def nodes(self):
+        for state in self.entity.reverse_state_of:
+            state.complete()
+            yield state.eid, state
+            
+        for transition in self.entity.reverse_transition_of:
+            transition.complete()
+            yield transition.eid, transition
+            
+    def edges(self):
+        for transition in self.entity.reverse_transition_of:
+            for incomingstate in transition.reverse_allowed_transition:
+                yield incomingstate.eid, transition.eid, transition
+            yield transition.eid, transition.destination().eid, transition
+
+
+class EETypeWorkflowImageView(TmpFileViewMixin, EntityView):
+    id = 'ewfgraph'
+    content_type = 'image/png'
+    accepts = ('EEType',)
+    
+    def _generate(self, tmpfile):
+        """display schema information for an entity"""
+        entity = self.entity(self.row, self.col)
+        visitor = WorkflowVisitor(entity)
+        prophdlr = WorkflowDotPropsHandler(self.req)
+        generator = GraphGenerator(DotBackend('workflow', 'LR',
+                                              ratio='compress', size='30,12'))
+        return generator.generate(visitor, prophdlr, tmpfile)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/edit_attributes.pt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+  <table class="attributeForm" style="width:100%;"
+	 tal:attributes="id tab_id | nothing;
+			 class tab_class | nothing;">
+    <tr tal:iter="widget lines">
+      <th class="labelCol" tal:content="structure python:widget.render_label(entity)">attrname</th>
+      <td tal:define="error python:widget.render_error(entity)" style="width:100%;"
+          tal:attributes="class python:error and 'error' or nothing">
+	<div tal:replace="structure error">error message if any</div>
+	<div tal:replace="structure python:widget.edit_render(entity)" >widget (input, textarea, etc.)</div>
+	<div tal:replace="structure python:widget.render_help(entity)">format help if any</div>
+      </td>
+    </tr>
+  </table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/edit_multiple.pt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,38 @@
+<!-- rows are precomputed first to consume error messages if necessary -->
+<form method="post" id="entityForm" onsubmit="return validateForm('entityForm', null);"
+      action="%(action)s"
+      tal:define="rows python:[self.edit_form(e) for e in rset.entities()]"
+      >
+  <div tal:replace="structure self/error_message"/>
+  <div id="progress" tal:content="progress">validating changes...</div>
+  <fieldset>
+  <input type="hidden" name="__errorurl" value="#"
+         tal:attributes="value req/url;" />
+  <input type="hidden" name="__form_id" value="#"
+	 tal:attributes="value python:self.id"/>
+  <input type="hidden" name="__redirectvid" value="primary"
+	 tal:attributes="value python:req.form.get('__redirectvid', 'list');"/>
+  <input type="hidden" name="__redirectrql" value="#"
+	 tal:attributes="value python:req.form.get('__redirectrql', rset.printable_rql());"/>
+  <table class="listing">
+    <tr class="header">
+      <th align="left"><input type="checkbox" onclick="setCheckboxesState('eid', this.checked)" value="" title="toggle check boxes" /></th>
+      <tal:th tal:iter="rdef python:sampleentity.relations_by_category('primary', 'add')">
+	<th tal:condition="python: rdef[0].type != 'eid'"
+            tal:content="python: rdef[0].display_name(req, rdef[-1])"/>
+      </tal:th>
+    </tr>
+    <tr tal:iter="row rows" tal:attributes="class python: repeat['row'].getOdd() and 'even' or 'odd'" tal:content="structure row"/>
+  </table>
+  <table width="100%%">
+    <tr>
+      <td align="left">
+	<input class="validateButton" type="submit"  value="#"
+	       tal:attributes="value okbuttonmsg; title okbuttontitle;"/>
+	<input class="validateButton" type="reset" name="__action_cancel" value="#"
+	       tal:attributes="value  cancelbuttonmsg; title cancelbuttontitle;"/>
+      </td>
+    </tr>
+  </table>
+  </fieldset>
+</form>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/edit_relations.pt	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,51 @@
+<fieldset class="subentity">
+<legend class="iformTitle" tal:content="python: label">relations</legend>
+<table id="relatedEntities"
+       tal:define="pendings python: list(self.restore_pending_inserts(entity))">
+  <span tal:iter="row python: self.relations_table(entity)" tal:omit-tag="python: True">
+    <tr tal:condition="python: row[2]">
+      <th class="labelCol" tal:content="python: display_name(req, row[0].type, row[1])">relation name</th>
+      <td>
+	<ul>
+	  <li tal:iter="viewparams python: row[2]" class="invisible">
+	    <span tal:replace="structure python:viewparams[1]">[del it if you can]</span>
+	    <div tal:attributes="id python: 'span'+viewparams[0]; class python: viewparams[2]"
+                 tal:content="structure python: viewparams[3]">related entity view</div>
+	  </li>
+	  <li class="invisible"
+	      tal:condition="python: not self.force_display and self.maxrelitems &lt; len(row[2])"
+	      tal:content="structure python:self.force_display_link()"/>
+	</ul>
+      </td>
+    </tr>
+  </span>
+  <tr tal:iter="row pendings"
+      tal:attributes="id python: 'tr' + row[1]">
+    <!-- row: (relname, nodeid, js, url, eview) -->
+    <th tal:content="python: row[3]">relation name</th>
+    <td>
+      <a class="handle" title="cancel this insert"
+	 tal:attributes="href python: row[2]">[x]</a>
+      <a class="editionPending"
+	 tal:attributes="href python: row[4]; id python: 'a' + row[1]"
+	 tal:content="python: row[5]">entity\'s text_view</a>
+    </td>
+  </tr>
+  <tr tal:condition="not:pendings"><th>&nbsp;</th><td>&nbsp;</td></tr>
+  <tr class="separator" tal:attributes="id string: relationSelectorRow_$eid;">
+    <th class="labelCol">
+      <span i18n:content="add relation"></span>
+      <select tal:attributes="id string: relationSelector_${eid};
+                              tabindex req/next_tabindex;
+			      onchange string: javascript:showMatchingSelect(this.options[this.selectedIndex].value,${eid});">
+	<option value="" i18n:content="select a relation">select a relation</option>
+	<option tal:iter="rel python: entity.srelations_by_category(('generic', 'metadata'), 'add')" 
+                tal:attributes="value python: '%s_%s' % (rel[1], rel[2])"
+		tal:content="python: rel[0]">rel</option>
+      </select>
+    </th>
+    <td tal:attributes="id string: unrelatedDivs_$eid">
+    </td>
+  </tr>
+</table>
+</fieldset>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/editcontroller.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,347 @@
+"""The edit controller, handling form submitting.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+from decimal import Decimal
+
+from rql.utils import rqlvar_maker
+
+from cubicweb import Binary, ValidationError, typed_eid
+from cubicweb.web import INTERNAL_FIELD_VALUE, RequestError, NothingToEdit
+from cubicweb.web.controller import parse_relations_descr
+from cubicweb.web.views.basecontrollers import ViewController
+
+
+class ToDoLater(Exception):
+    """exception used in the edit controller to indicate that a relation
+    can't be handled right now and have to be handled later
+    """
+
+class EditController(ViewController):
+    id = 'edit'
+
+    def publish(self, rset=None, fromjson=False):
+        """edit / create / copy / delete entity / relations"""
+        self.fromjson = fromjson
+        req = self.req
+        form = req.form
+        for key in form:
+            # There should be 0 or 1 action
+            if key.startswith('__action_'):
+                cbname = key[1:]
+                try:
+                    callback = getattr(self, cbname)
+                except AttributeError:
+                    raise ValidationError(None,
+                                          {None: req._('invalid action %r' % key)})
+                else:
+                    return callback()
+        self._default_publish()
+        self.reset()
+
+    def _default_publish(self):
+        req = self.req
+        form = req.form
+        # no specific action, generic edition
+        self._to_create = req.data['eidmap'] = {}
+        self._pending_relations = []
+        todelete = self.req.get_pending_deletes()
+        toinsert = self.req.get_pending_inserts()
+        try:
+            methodname = form.pop('__method', None)
+            for eid in req.edited_eids():
+                formparams = req.extract_entity_params(eid)
+                if methodname is not None:
+                    entity = req.eid_rset(eid).get_entity(0, 0)
+                    method = getattr(entity, methodname)
+                    method(formparams)
+                eid = self.edit_entity(formparams)
+        except (RequestError, NothingToEdit):
+            if '__linkto' in form and 'eid' in form:
+                self.execute_linkto()
+            elif not ('__delete' in form or '__insert' in form or todelete or toinsert):
+                raise ValidationError(None, {None: req._('nothing to edit')})
+        # handle relations in newly created entities
+        if self._pending_relations:
+            for rschema, formparams, x, entity in self._pending_relations:
+                self.handle_relation(rschema, formparams, x, entity, True)
+            
+        # XXX this processes *all* pending operations of *all* entities
+        if form.has_key('__delete'):
+            todelete += req.list_form_param('__delete', form, pop=True)
+        if todelete:
+            self.delete_relations(parse_relations_descr(todelete))
+        if form.has_key('__insert'):
+            toinsert = req.list_form_param('__insert', form, pop=True)
+        if toinsert:
+            self.insert_relations(parse_relations_descr(toinsert))
+        self.req.remove_pending_operations()
+        
+    def edit_entity(self, formparams, multiple=False):
+        """edit / create / copy an entity and return its eid"""
+        etype = formparams['__type']
+        entity = self.vreg.etype_class(etype)(self.req, None, None)
+        entity.eid = eid = self._get_eid(formparams['eid'])
+        edited = self.req.form.get('__maineid') == formparams['eid']
+        # let a chance to do some entity specific stuff.
+        entity.pre_web_edit() 
+        # create a rql query from parameters
+        self.relations = []
+        self.restrictions = []
+        # process inlined relations at the same time as attributes
+        # this is required by some external source such as the svn source which
+        # needs some information provided by those inlined relation. Moreover
+        # this will generate less write queries.
+        for rschema in entity.e_schema.subject_relations():
+            if rschema.is_final():
+                self.handle_attribute(entity, rschema, formparams)
+            elif rschema.inlined:
+                self.handle_inlined_relation(rschema, formparams, entity)
+        execute = self.req.execute
+        if eid is None: # creation or copy
+            if self.relations: 
+                rql = 'INSERT %s X: %s' % (etype, ','.join(self.relations))
+            else:
+                rql = 'INSERT %s X' % etype
+            if self.restrictions:
+                rql += ' WHERE %s' % ','.join(self.restrictions)
+            try:
+                # get the new entity (in some cases, the type might have 
+                # changed as for the File --> Image mutation)
+                entity = execute(rql, formparams).get_entity(0, 0)
+                eid = entity.eid
+            except ValidationError, ex:
+                # ex.entity may be an int or an entity instance
+                self._to_create[formparams['eid']] = ex.entity
+                if self.fromjson:
+                    ex.entity = formparams['eid']
+                raise
+            self._to_create[formparams['eid']] = eid
+        elif self.relations: # edition of an existant entity
+            varmaker = rqlvar_maker()
+            var = varmaker.next()
+            while var in formparams:
+                var = varmaker.next()
+            rql = 'SET %s WHERE X eid %%(%s)s' % (','.join(self.relations), var)
+            if self.restrictions:
+                rql += ', %s' % ','.join(self.restrictions)
+            formparams[var] = eid
+            execute(rql, formparams)
+        for rschema in entity.e_schema.subject_relations():
+            if rschema.is_final() or rschema.inlined:
+                continue
+            self.handle_relation(rschema, formparams, 'subject', entity)
+        for rschema in entity.e_schema.object_relations():
+            if rschema.is_final():
+                continue
+            self.handle_relation(rschema, formparams, 'object', entity)
+        if edited:
+            self.notify_edited(entity)
+        if formparams.has_key('__delete'):
+            todelete = self.req.list_form_param('__delete', formparams, pop=True)
+            self.delete_relations(parse_relations_descr(todelete))
+        if formparams.has_key('__cloned_eid'):
+            entity.copy_relations(formparams['__cloned_eid'])
+        if formparams.has_key('__insert'):
+            toinsert = self.req.list_form_param('__insert', formparams, pop=True)
+            self.insert_relations(parse_relations_descr(toinsert))
+        if edited: # only execute linkto for the main entity
+            self.execute_linkto(eid)
+        return eid
+
+    def _action_apply(self):
+        self._default_publish()
+        self.reset()
+            
+    def _action_cancel(self):
+        errorurl = self.req.form.get('__errorurl')
+        if errorurl:
+            self.req.cancel_edition(errorurl)
+        return self.reset()
+
+    def _action_delete(self):
+        self.delete_entities(self.req.edited_eids(withtype=True))
+        return self.reset()
+
+    def _needs_edition(self, rtype, formparams):
+        """returns True and and the new value if `rtype` was edited"""
+        editkey = 'edits-%s' % rtype
+        if not editkey in formparams:
+            return False, None # not edited
+        value = formparams.get(rtype) or None
+        if (formparams.get(editkey) or None) == value:
+            return False, None # not modified
+        if value == INTERNAL_FIELD_VALUE:
+            value = None        
+        return True, value
+
+    def handle_attribute(self, entity, rschema, formparams):
+        """append to `relations` part of the rql query to edit the
+        attribute described by the given schema if necessary
+        """
+        attr = rschema.type
+        edition_needed, value = self._needs_edition(attr, formparams)
+        if not edition_needed:
+            return
+        # test if entity class defines a special handler for this attribute
+        custom_edit = getattr(entity, 'custom_%s_edit' % attr, None)
+        if custom_edit:
+            custom_edit(formparams, value, self.relations)
+            return
+        attrtype = rschema.objects(entity.e_schema)[0].type
+        # on checkbox or selection, the field may not be in params
+        if attrtype == 'Boolean':
+            value = bool(value)
+        elif attrtype == 'Decimal':
+            value = Decimal(value)
+        elif attrtype == 'Bytes':
+            # if it is a file, transport it using a Binary (StringIO)
+            if formparams.has_key('__%s_detach' % attr):
+                # drop current file value
+                value = None
+            # no need to check value when nor explicit detach nor new file submitted,
+            # since it will think the attribut is not modified
+            elif isinstance(value, unicode):
+                # file modified using a text widget
+                value = Binary(value.encode(entity.text_encoding(attr)))
+            else:
+                # (filename, mimetype, stream)
+                val = Binary(value[2].read())
+                if not val.getvalue(): # usually an unexistant file
+                    value = None
+                else:
+                    # XXX suppose a File compatible schema
+                    val.filename = value[0]
+                    if entity.has_format(attr):
+                        key = '%s_format' % attr
+                        formparams[key] = value[1]
+                        self.relations.append('X %s_format %%(%s)s'
+                                              % (attr, key))
+                    if entity.e_schema.has_subject_relation('name') \
+                           and not formparams.get('name'):
+                        formparams['name'] = value[0]
+                        self.relations.append('X name %(name)s')
+                    value = val
+        elif value is not None:
+            if attrtype in ('Date', 'Datetime', 'Time'):
+                try:
+                    value = self.parse_datetime(value, attrtype)
+                except ValueError:
+                    raise ValidationError(entity.eid,
+                                          {attr: self.req._("invalid date")})
+            elif attrtype == 'Password':
+                # check confirmation (see PasswordWidget for confirmation field name)
+                confirmval = formparams.get(attr + '-confirm')
+                if confirmval != value:
+                    raise ValidationError(entity.eid,
+                                          {attr: self.req._("password and confirmation don't match")})
+                # password should *always* be utf8 encoded
+                value = value.encode('UTF8')
+            else:
+                # strip strings
+                value = value.strip()
+        elif attrtype == 'Password':
+            # skip None password
+            return # unset password
+        formparams[attr] = value
+        self.relations.append('X %s %%(%s)s' % (attr, attr))
+
+    def _relation_values(self, rschema, formparams, x, entity, late=False):
+        """handle edition for the (rschema, x) relation of the given entity
+        """
+        rtype = rschema.type
+        editkey = 'edit%s-%s' % (x[0], rtype)
+        if not editkey in formparams:
+            return # not edited
+        try:
+            values = self._linked_eids(self.req.list_form_param(rtype, formparams), late)
+        except ToDoLater:
+            self._pending_relations.append((rschema, formparams, x, entity))
+            return
+        origvalues = set(typed_eid(eid) for eid in self.req.list_form_param(editkey, formparams))
+        return values, origvalues
+
+    def handle_inlined_relation(self, rschema, formparams, entity, late=False):
+        """handle edition for the (rschema, x) relation of the given entity
+        """
+        try:
+            values, origvalues = self._relation_values(rschema, formparams,
+                                                       'subject', entity, late)
+        except TypeError:
+            return # not edited / to do later
+        if values == origvalues:
+            return # not modified
+        attr = str(rschema)
+        if values:
+            formparams[attr] = iter(values).next()
+            self.relations.append('X %s %s' % (attr, attr.upper()))
+            self.restrictions.append('%s eid %%(%s)s' % (attr.upper(), attr))
+        elif entity.has_eid():
+            self.handle_relation(rschema, formparams, 'subject', entity, late)
+        
+    def handle_relation(self, rschema, formparams, x, entity, late=False):
+        """handle edition for the (rschema, x) relation of the given entity
+        """
+        try:
+            values, origvalues = self._relation_values(rschema, formparams, x,
+                                                       entity, late)
+        except TypeError:
+            return # not edited / to do later
+        etype = entity.e_schema
+        if values == origvalues:
+            return # not modified
+        if x == 'subject':
+            desttype = rschema.objects(etype)[0]
+            card = rschema.rproperty(etype, desttype, 'cardinality')[0]
+            subjvar, objvar = 'X', 'Y'
+        else:
+            desttype = rschema.subjects(etype)[0]
+            card = rschema.rproperty(desttype, etype, 'cardinality')[1]
+            subjvar, objvar = 'Y', 'X'
+        eid = entity.eid
+        if x == 'object' or not rschema.inlined or not values:
+            # this is not an inlined relation or no values specified,
+            # explicty remove relations
+            for reid in origvalues.difference(values):
+                rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
+                    subjvar, rschema, objvar)
+                self.req.execute(rql, {'x': eid, 'y': reid}, ('x', 'y'))
+        rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
+            subjvar, rschema, objvar)
+        for reid in values.difference(origvalues):
+            self.req.execute(rql, {'x': eid, 'y': reid}, ('x', 'y'))
+    
+    def _get_eid(self, eid):
+        # should be either an int (existant entity) or a variable (to be
+        # created entity)
+        assert eid or eid == 0, repr(eid) # 0 is a valid eid
+        try:
+            return typed_eid(eid)
+        except ValueError:
+            try:
+                return self._to_create[eid]
+            except KeyError:
+                self._to_create[eid] = None
+                return None
+
+    def _linked_eids(self, eids, late=False):
+        """return a list of eids if they are all known, else raise ToDoLater
+        """
+        result = set()
+        for eid in eids:
+            if not eid: # AutoCompletionWidget
+                continue
+            eid = self._get_eid(eid)
+            if eid is None:
+                if not late:
+                    raise ToDoLater()
+                # eid is still None while it's already a late call
+                # this mean that the associated entity has not been created
+                raise Exception('duh')
+            result.add(eid)
+        return result
+        
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/emailaddress.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,119 @@
+"""Specific views for email addresses entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common import Unauthorized
+from cubicweb.web.views import baseviews
+
+class EmailAddressPrimaryView(baseviews.PrimaryView):
+    accepts = ('EmailAddress',)
+    
+    def cell_call(self, row, col, skipeids=None):
+        self.skipeids = skipeids
+        super(EmailAddressPrimaryView, self).cell_call(row, col)
+        
+    def render_entity_attributes(self, entity, siderelations):
+        self.w(u'<h3>')
+        entity.view('oneline', w=self.w)
+        if not entity.canonical:
+            canonemailaddr = entity.canonical_form()
+            if canonemailaddr:
+                self.w(u'&nbsp;(<i>%s</i>)' % canonemailaddr.view('oneline'))
+            self.w(u'</h3>')
+        elif entity.identical_to:
+            self.w(u'</h3>')
+            identicaladdr = [e.view('oneline') for e in entity.identical_to]
+            self.field('identical_to', ', '.join(identicaladdr))
+        else:
+            self.w(u'</h3>')
+        try:
+            persons = entity.reverse_primary_email
+        except Unauthorized:
+            persons = []
+        if persons:
+            emailof = persons[0]
+            self.field(display_name(self.req, 'primary_email', 'object'), emailof.view('oneline'))
+            pemaileid = emailof.eid
+        else:
+            pemaileid = None
+        try:
+            emailof = 'use_email' in self.schema and entity.reverse_use_email or ()
+            emailof = [e for e in emailof if not e.eid == pemaileid]
+        except Unauthorized:
+            emailof = []
+        if emailof:
+            emailofstr = ', '.join(e.view('oneline') for e in emailof)
+            self.field(display_name(self.req, 'use_email', 'object'), emailofstr)
+
+    def render_entity_relations(self, entity, siderelations):
+        for i, email in enumerate(entity.related_emails(self.skipeids)):
+            self.w(u'<div class="%s">' % (i%2 and 'even' or 'odd'))
+            email.view('oneline', w=self.w, contexteid=entity.eid)
+            self.w(u'</div>')
+
+
+class EmailAddressShortPrimaryView(EmailAddressPrimaryView):
+    accepts = ('EmailAddress',)
+    id = 'shortprimary'
+    title = None # hidden view
+    def render_entity_attributes(self, entity, siderelations):
+        self.w(u'<h5>')
+        entity.view('oneline', w=self.w)
+        self.w(u'</h5>')
+
+    
+class EmailAddressOneLineView(baseviews.OneLineView):
+    accepts = ('EmailAddress',)
+    
+    def cell_call(self, row, col, **kwargs):
+        entity = self.entity(row, col)
+        if entity.reverse_primary_email:
+            self.w(u'<b>')
+        if entity.alias:
+            self.w(u'%s &lt;' % html_escape(entity.alias))
+        self.w('<a href="%s">%s</a>' % (html_escape(entity.absolute_url()),
+                                        html_escape(entity.display_address())))
+        if entity.alias:
+            self.w(u'&gt;\n')
+        if entity.reverse_primary_email:
+            self.w(u'</b>')
+
+class EmailAddressMailToView(baseviews.OneLineView):
+    """A one line view that builds a user clickable URL for an email with
+    'mailto:'"""
+
+    id = 'mailto'
+    accepts = ('EmailAddress',)
+    
+    def cell_call(self, row, col, **kwargs):
+        entity = self.entity(row, col)
+        if entity.reverse_primary_email:
+            self.w(u'<b>')
+        if entity.alias:
+            mailto = u'%s <%s>' % (entity.alias, entity.display_address())
+        elif entity.reverse_use_email:
+            mailto = "mailto:%s <%s>" % \
+                (entity.reverse_use_email[0].dc_title(),
+                 entity.display_address())
+        else:
+            mailto = "mailto:%s" % entity.display_address()
+        self.w(u'<a href="%s">%s</a>' % (html_escape(mailto),
+                                         html_escape(entity.display_address())))
+            
+        if entity.alias:
+            self.w(u'&gt;\n')
+        if entity.reverse_primary_email:
+            self.w(u'</b>')
+
+    
+class EmailAddressTextView(baseviews.TextView):
+    accepts = ('EmailAddress',)
+    
+    def cell_call(self, row, col, **kwargs):
+        self.w(self.entity(row, col).display_address())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/embedding.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,164 @@
+"""Objects interacting together to provides the external page embeding
+functionality.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import re
+from urlparse import urljoin
+from urllib2 import urlopen, Request, HTTPError
+
+from logilab.mtconverter import guess_encoding
+
+from cubicweb import urlquote # XXX should use view.url_quote method
+from cubicweb.interfaces import IEmbedable
+from cubicweb.common.uilib import soup2xhtml
+from cubicweb.common.selectors import (onelinerset_selector, score_entity_selector,
+                                    searchstate_selector, interface_selector)
+from cubicweb.common.view import NOINDEX, NOFOLLOW
+from cubicweb.web.controller import Controller
+from cubicweb.web.action import Action
+from cubicweb.web.views import basetemplates
+
+
+class ExternalTemplate(basetemplates.TheMainTemplate):
+    """template embeding an external web pages into CubicWeb web interface
+    """
+    id = 'external'
+    
+    def call(self, body):
+        # XXX fallback to HTML 4 mode when embeding ?
+        self.set_request_content_type()
+        self.process_rql(self.req.form.get('rql'))
+        self.req.search_state = ('normal',)
+        self.template_header(self.content_type, None, self.req._('external page'),
+                             [NOINDEX, NOFOLLOW])
+        self.content_header()
+        self.w(body)
+        self.content_footer()
+        self.template_footer()
+
+
+class EmbedController(Controller):
+    id = 'embed'
+    template = 'external'
+
+    def publish(self, rset=None):
+        req = self.req
+        if 'custom_css' in req.form:
+            req.add_css(req.form['custom_css'])
+        embedded_url = req.form['url']
+        allowed = self.config['embed-allowed']
+        _ = req._
+        if allowed is None or not allowed.match(embedded_url):
+            body = '<h2>%s</h2><h3>%s</h3>' % (
+                _('error while embedding page'),
+                _('embedding this url is forbidden'))
+        else:
+            prefix = req.build_url(self.id, url='')
+            authorization = req.get_header('Authorization')
+            if authorization:
+                headers = {'Authorization' : authorization}
+            else:
+                headers = {}
+            try:
+                body = embed_external_page(embedded_url, prefix,
+                                           headers, req.form.get('custom_css'))
+                body = soup2xhtml(body, self.req.encoding)
+            except HTTPError, err:
+                body = '<h2>%s</h2><h3>%s</h3>' % (
+                    _('error while embedding page'), err)
+        return self.vreg.main_template(req, self.template, body=body)
+
+
+class EmbedAction(Action):
+    """display an 'embed' link on entity implementing `embeded_url` method
+    if the returned url match embeding configuration
+    """
+    id = 'embed'
+    controller = 'embed'
+    __selectors__ = (onelinerset_selector, searchstate_selector,
+                     interface_selector, score_entity_selector)
+    accepts_interfaces = (IEmbedable,)
+    
+    title = _('embed')
+        
+    @classmethod
+    def score_entity(cls, entity):
+        """return a score telling how well I can display the given 
+        entity instance (required by the value_selector)
+        """
+        url = entity.embeded_url()
+        if not url or not url.strip():
+            return 0
+        allowed = cls.config['embed-allowed']
+        if allowed is None or not allowed.match(url):
+            return 0
+        return 1
+    
+    def url(self, row=0):
+        entity = self.rset.get_entity(row, 0)
+        url = urljoin(self.req.base_url(), entity.embeded_url())
+        if self.req.form.has_key('rql'):
+            return self.build_url(url=url, rql=self.req.form['rql'])
+        return self.build_url(url=url)
+
+
+
+# functions doing necessary substitutions to embed an external html page ######
+
+
+BODY_RGX = re.compile('<body.*?>(.*?)</body>', re.I | re.S | re.U)
+HREF_RGX = re.compile('<a\s+href="([^"]*)"', re.I | re.S | re.U)
+SRC_RGX = re.compile('<img\s+src="([^"]*)"', re.I | re.S | re.U)
+
+
+class replace_href:
+    def __init__(self, prefix, custom_css=None):
+        self.prefix = prefix
+        self.custom_css = custom_css
+        
+    def __call__(self, match):
+        original_url = match.group(1)
+        url = self.prefix + urlquote(original_url, safe='')
+        if self.custom_css is not None:
+            if '?' in url:
+                url = '%s&amp;custom_css=%s' % (url, self.custom_css)
+            else:
+                url = '%s?custom_css=%s' % (url, self.custom_css)
+        return '<a href="%s"' % url
+
+class absolutize_links:
+    def __init__(self, embedded_url, tag, custom_css=None):
+        self.embedded_url = embedded_url
+        self.tag = tag
+        self.custom_css = custom_css
+    
+    def __call__(self, match):
+        original_url = match.group(1)
+        if '://' in original_url:
+            return match.group(0) # leave it unchanged
+        return '%s="%s"' % (self.tag, urljoin(self.embedded_url, original_url))
+
+
+def prefix_links(body, prefix, embedded_url, custom_css=None):
+    filters = ((HREF_RGX, absolutize_links(embedded_url, '<a href', custom_css)),
+               (SRC_RGX, absolutize_links(embedded_url, '<img src')),
+               (HREF_RGX, replace_href(prefix, custom_css)))
+    for rgx, repl in filters:
+        body = rgx.sub(repl, body)
+    return body
+    
+def embed_external_page(url, prefix, headers=None, custom_css=None):
+    req = Request(url, headers=(headers or {}))
+    content = urlopen(req).read()
+    page_source = unicode(content, guess_encoding(content), 'replace')
+    page_source =page_source
+    match = BODY_RGX.search(page_source)
+    if match is None:
+        return page_source
+    return prefix_links(match.group(1), prefix, url, custom_css)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/eproperties.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+"""Specific views for EProperty
+
+
+:organization: Logilab
+:copyright: 2007-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web.views import baseviews
+
+class EPropertyPrimaryView(baseviews.PrimaryView):
+    accepts = ('EProperty',)
+    skip_none = False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/error.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+"""Set of HTML errors views. Error view are generally implemented
+as startup views and are used for standard error pages (404, 500, etc.)
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.view import StartupView
+
+class FourOhFour(StartupView):
+    id = '404'
+
+    def call(self):
+        _ = self.req._
+        self.w(u"<h1>%s</h1>" % _('this resource does not exist'))
+
+
+class ErrorOccured(StartupView):
+    id = '500'
+
+    def call(self):
+        _ = self.req._
+        self.w(u"<h1>%s</h1>" %
+               _('an error occured, the request cannot be fulfilled'))
+    
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/euser.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,97 @@
+"""Specific views for users
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached
+
+from cubicweb.schema import display_name
+from cubicweb.web import INTERNAL_FIELD_VALUE
+from cubicweb.web.form import EntityForm
+from cubicweb.web.views.baseviews import PrimaryView
+
+class EUserPrimaryView(PrimaryView):
+    accepts = ('EUser',)
+    skip_attrs = ('firstname', 'surname')
+    
+    def iter_relations(self, entity):
+        # don't want to display user's entities
+        for rschema, targetschemas, x in super(EUserPrimaryView, self).iter_relations(entity):
+            if x == 'object' and rschema.type in ('owned_by', 'for_user'):
+                continue
+            yield rschema, targetschemas, x
+
+    def content_title(self, entity):
+        return entity.name()
+
+    def is_side_related(self, rschema, eschema):
+        return  rschema.type in ['interested_in', 'tags', 
+                                 'todo_by', 'bookmarked_by',
+                                 ]
+
+
+class EditGroups(EntityForm):
+    """displays a simple euser / egroups editable table"""
+    
+    id = 'editgroups'
+    accepts = ('EUser',)
+    
+    def call(self):
+        self.req.add_css('cubicweb.acl.css')            
+        _ = self.req._
+        self.w(u'<form id="editgroup" method="post" action="edit">')
+        self.w(u'<table id="groupedit">\n')
+        self.w(u'<tr>')
+        self.w(u'<th>%s</th>' % display_name(self.req, 'EUser'))
+        self.w(u''.join(u'<th>%s</th>' % _(gname) for geid, gname in self.egroups))
+        self.w(u'</tr>')
+        for row in xrange(len(self.rset)):
+            self.build_table_line(row)
+        self.w(u'</table>')
+        self.w(u'<fieldset>')
+        self.w(self.button_cancel())
+        self.w(self.button_ok())
+        self.w(u'</fieldset>')
+        self.w(u'</form>')
+
+
+    def build_table_line(self, row):
+        euser = self.entity(row)
+        euser_groups = [group.name for group in euser.in_group]
+        if euser_groups:
+            self.w(u'<tr>')
+        else:
+            self.w(u'<tr class="nogroup">')
+        self.w(u'<th><fieldset>')
+        self.w(u'<input type="hidden" name="eid" value="%s" />' % euser.eid)
+        self.w(u'<input type="hidden" name="__type:%s" value="EUser" />' % euser.eid)
+        # this should not occur (for now) since in_group relation is mandatory
+        if not euser_groups:
+            self.w(u'<input type="hidden" name="edits-in_group:%s" value="%s">' %
+                   (euser.eid, INTERNAL_FIELD_VALUE))
+        self.w(euser.dc_title())
+        self.w(u'</fieldset></th>')
+        for geid, gname in self.egroups:
+            self.w(u'<td><fieldset>')
+            if gname in euser_groups:
+                self.w(u'<input type="hidden" name="edits-in_group:%s" value="%s" />' %
+                       (euser.eid, geid))
+                self.w(u'<input type="checkbox" name="in_group:%s" value="%s" checked="checked" />' %
+                       (euser.eid, geid))
+            else:
+                self.w(u'<input type="checkbox" name="in_group:%s" value="%s" />' %
+                       (euser.eid, geid))
+            self.w(u'</fieldset></td>')
+        self.w(u'</tr>\n')
+
+        
+    @property
+    @cached
+    def egroups(self):
+        groups = self.req.execute('Any G, N ORDERBY N WHERE G is EGroup, G name N')
+        return [(geid, gname) for geid, gname in groups.rows if gname != 'owners']
+                
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/facets.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,161 @@
+"""the facets box and some basic facets
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from simplejson import dumps
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.selectors import (chainfirst, chainall, nfentity_selector,
+                                    twolinerset_selector, contextprop_selector,
+                                    yes_selector, one_has_relation_selector)
+from cubicweb.web.box import BoxTemplate
+from cubicweb.web.facet import (AbstractFacet, VocabularyFacet, FacetStringWidget,
+                             RelationFacet, prepare_facets_rqlst, filter_hiddens)
+
+def contextview_selector(cls, req, rset, row=None, col=None, view=None,
+                         **kwargs):
+    if view and getattr(view, 'filter_box_context_info', lambda: None)():
+        return 1
+    return 0    
+
+
+class FilterBox(BoxTemplate):
+    """filter results of a query"""
+    id = 'filter_box'
+    __selectors__ = (chainfirst(contextview_selector,
+                                chainall(nfentity_selector, twolinerset_selector)),
+                     contextprop_selector)
+    context = 'left'
+    title = _('boxes_filter_box')
+    visible = True # functionality provided by the search box by default
+    order = 1
+
+    def facetargs(self):
+        """this method returns the list of extra arguments that should
+        be used by the facet
+        """
+        return {}
+        
+    def _get_context(self, view):
+        context = getattr(view, 'filter_box_context_info', lambda: None)()
+        if context:
+            rset, vid, divid, paginate = context
+        else:
+            rset = self.rset
+            vid, divid = None, 'pageContent'
+            paginate = view.need_navigation
+        return rset, vid, divid, paginate
+        
+    def call(self, view=None):
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.formfilter.js') )
+        rset, vid, divid, paginate = self._get_context(view)
+        if rset.rowcount < 2: # XXX done by selectors, though maybe necessary when rset has been hijacked
+            return
+        if vid is None:
+            vid = self.req.form.get('vid')
+        rqlst = rset.syntax_tree()
+        rqlst.save_state()
+        try:
+            mainvar, baserql = prepare_facets_rqlst(rqlst, rset.args)
+            widgets = []
+            for facet in self.get_facets(rset, mainvar):
+                if facet.propval('visible'):
+                    wdg = facet.get_widget()
+                    if wdg is not None:
+                        widgets.append(wdg)
+            if not widgets:
+                return
+            w = self.w
+            w(u'<form method="post" id="%sForm" cubicweb:facetargs="%s" action="">'  % (
+                divid, html_escape(dumps([divid, vid, paginate, self.facetargs()]))))
+            w(u'<fieldset>')
+            hiddens = {'facets': ','.join(wdg.facet.id for wdg in widgets),
+                       'baserql': baserql}
+            for param in ('subvid', 'vtitle'):
+                if param in self.req.form:
+                    hiddens[param] = self.req.form[param]
+            filter_hiddens(w, **hiddens)
+            for wdg in widgets:
+                wdg.render(w=self.w)
+            w(u'</fieldset>\n</form>\n')
+        finally:
+            rqlst.recover()
+            print 'after facets', rqlst
+
+    def get_facets(self, rset, mainvar):
+        return self.vreg.possible_vobjects('facets', self.req, rset,
+                                           context='facetbox',
+                                           filtered_variable=mainvar)
+        
+# facets ######################################################################
+
+class CreatedByFacet(RelationFacet):
+    id = 'created_by-facet'
+    rtype = 'created_by'
+    target_attr = 'login'
+
+class InGroupFacet(RelationFacet):
+    id = 'in_group-facet'
+    rtype = 'in_group'
+    target_attr = 'name'
+
+class InStateFacet(RelationFacet):
+    id = 'in_state-facet'
+    rtype = 'in_state'
+    target_attr = 'name'
+
+# inherit from RelationFacet to benefit from its possible_values implementation
+class ETypeFacet(RelationFacet):
+    id = 'etype-facet'
+    __selectors__ = (yes_selector,)
+    order = 1
+    rtype = 'is'
+    target_attr = 'name'
+
+    @property
+    def title(self):
+        return self.req._('entity type')
+
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        etypes = self.rset.column_types(0)
+        return sorted((self.req._(etype), etype) for etype in etypes)
+    
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self.req.form.get(self.id)
+        if not value:
+            return
+        self.rqlst.add_type_restriction(self.filtered_variable, value)
+
+
+class HasTextFacet(AbstractFacet):
+    __selectors__ = (one_has_relation_selector, contextprop_selector)
+    id = 'has_text-facet'
+    rtype = 'has_text'
+    role = 'subject'
+    order = 0
+    @property
+    def title(self):
+        return self.req._('has_text')
+    
+    def get_widget(self):
+        """return the widget instance to use to display this facet
+
+        default implentation expects a .vocabulary method on the facet and
+        return a combobox displaying this vocabulary
+        """
+        return FacetStringWidget(self)
+
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self.req.form.get(self.id)
+        if not value:
+            return
+        self.rqlst.add_constant_restriction(self.filtered_variable, 'has_text', value, 'String')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/ibreadcrumbs.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,88 @@
+"""navigation components definition for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import IBreadCrumbs
+from cubicweb.common.selectors import (contextprop_selector, onelinerset_selector, 
+                                    interface_selector)
+from cubicweb.common.view import EntityView
+from cubicweb.common.uilib import cut
+# don't use AnyEntity since this may cause bug with isinstance() due to reloading
+from cubicweb.common.entity import Entity
+from cubicweb.web.component import EntityVComponent
+
+_ = unicode
+
+def bc_title(entity):
+    textsize = entity.req.property_value('navigation.short-line-size')
+    return html_escape(cut(entity.dc_title(), textsize))
+    
+
+class BreadCrumbEntityVComponent(EntityVComponent):
+    id = 'breadcrumbs'
+    # register msg not generated since no entity implements IPrevNext in cubicweb itself
+    title = _('contentnavigation_breadcrumbs')
+    help = _('contentnavigation_breadcrumbs_description')
+    __selectors__ = (onelinerset_selector, contextprop_selector, interface_selector)
+    accepts_interfaces = (IBreadCrumbs,)
+    context = 'navtop'
+    order = 5
+    visible = False
+    separator = u'&nbsp;&gt;&nbsp;'
+
+    def call(self, view=None, first_separator=True):
+        entity = self.entity(0)
+        path = entity.breadcrumbs(view)
+        if path:
+            self.w(u'<span class="pathbar">')
+            if first_separator:
+                self.w(self.separator)
+            root = path.pop(0)
+            if isinstance(root, Entity):
+                self.w(u'<a href="%s">%s</a>' % (self.req.build_url(root.id),
+                                                 root.dc_type('plural')))
+                self.w(self.separator)
+            self.wpath_part(root, entity, not path)
+            for i, parent in enumerate(path):
+                self.w(self.separator)
+                self.w(u"\n")
+                self.wpath_part(parent, entity, i == len(path) - 1)
+            self.w(u'</span>')
+            
+    def wpath_part(self, part, contextentity, last=False):
+        if isinstance(part, Entity):
+            if last and part.eid == contextentity.eid:
+                self.w(bc_title(part))
+            else:
+                part.view('breadcrumbs', w=self.w)
+        elif isinstance(part, tuple):
+            url, title = part
+            textsize = self.req.property_value('navigation.short-line-size')
+            self.w(u'<a href="%s">%s</a>' % (
+                html_escape(url), html_escape(cut(title, textsize))))
+        else:
+            textsize = self.req.property_value('navigation.short-line-size')
+            self.w(cut(unicode(part), textsize))
+        
+
+class BreadCrumbComponent(BreadCrumbEntityVComponent):
+    __registry__ = 'components'
+    __selectors__ = (onelinerset_selector, interface_selector)
+    visible = True
+
+
+class BreadCrumbView(EntityView):
+    id = 'breadcrumbs'
+
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        desc = cut(entity.dc_description(), 50)
+        self.w(u'<a href="%s" title="%s">%s</a>' % (html_escape(entity.absolute_url()),
+                                                    html_escape(desc),
+                                                    bc_title(entity)))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/idownloadable.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,154 @@
+"""Specific views for entities implementing IDownloadable
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import BINARY_ENCODINGS, TransformError, html_escape
+
+from cubicweb.interfaces import IDownloadable
+from cubicweb.common.mttransforms import ENGINE
+from cubicweb.common.selectors import (onelinerset_selector, score_entity_selector,
+                                    interface_selector)
+from cubicweb.web.views import baseviews
+
+_ = unicode
+
+
+def download_box(w, entity):
+    w(u'<div class="sideRelated">')
+    w(u'<div class="sideBoxTitle downloadBoxTitle"><span>%s</span></div>' % _('download'))
+    w(u'<div class="sideBox downloadBox"><div class="sideBoxBody">')
+    w(u'<a href="%s"><img src="%s" alt="%s"/> %s</a>'
+      % (html_escape(entity.download_url()),
+         entity.req.external_resource('DOWNLOAD_ICON'),
+         _('download icon'), html_escape(entity.dc_title())))
+    w(u'</div>')
+    w(u'</div>\n</div>\n')
+
+class DownloadView(baseviews.EntityView):
+    """this view is replacing the deprecated 'download' controller and allow downloading
+    of entities providing the necessary interface
+    """
+    id = 'download'
+    __selectors__ = (onelinerset_selector, interface_selector)
+    accepts_interfaces = (IDownloadable,)
+
+    templatable = False
+    content_type = 'application/octet-stream'
+    binary = True
+    add_to_breadcrumbs = False
+
+    def set_request_content_type(self):
+        """overriden to set the correct filetype and filename"""
+        entity = self.complete_entity(0)
+        encoding = entity.download_encoding()
+        if encoding in BINARY_ENCODINGS:
+            contenttype = 'application/%s' % encoding
+            encoding = None
+        else:
+            contenttype = entity.download_content_type()
+        self.req.set_content_type(contenttype or self.content_type,
+                                  filename=entity.download_file_name(),
+                                  encoding=encoding)
+
+    def call(self):
+        self.w(self.complete_entity(0).download_data())
+
+
+class DownloadLinkView(baseviews.EntityView):
+    """view displaying a link to download the file"""
+    id = 'downloadlink'
+    title = None # should not be listed in possible views
+    __selectors__ = (interface_selector,)
+
+    accepts_interfaces = (IDownloadable,)
+    
+    def cell_call(self, row, col, title=None, **kwargs):
+        entity = self.entity(row, col)
+        url = html_escape(entity.download_url())
+        self.w(u'<a href="%s">%s</a>' % (url, html_escape(title or entity.dc_title())))
+
+
+                                                                                
+class IDownloadablePrimaryView(baseviews.PrimaryView):
+    __selectors__ = (interface_selector,)
+    #skip_attrs = ('eid', 'data',) # XXX
+    accepts_interfaces = (IDownloadable,)
+
+    def render_entity_title(self, entity):
+        self.w(u'<h1>%s %s</h1>'
+               % (entity.dc_type().capitalize(),
+                  html_escape(entity.dc_title())))
+    
+    def render_entity_attributes(self, entity, siderelations):
+        super(IDownloadablePrimaryView, self).render_entity_attributes(entity, siderelations)
+        self.wview('downloadlink', entity.rset, title=self.req._('download'), row=entity.row)
+        self.w(u'<div class="content">')
+        contenttype = entity.download_content_type()
+        if contenttype.startswith('image/'):
+            self.wview('image', entity.rset, row=entity.row)
+        else:
+            try:
+                if ENGINE.has_input(contenttype):
+                    self.w(entity.printable_value('data'))
+            except TransformError:
+                pass
+            except Exception, ex:
+                msg = self.req._("can't display data, unexpected error: %s") % ex
+                self.w('<div class="error">%s</div>' % msg)
+        self.w(u'</div>')
+            
+    def is_side_related(self, rschema, eschema):
+        """display all relations as side related"""
+        return True
+
+
+    def render_side_related(self, entity, siderelations):
+        download_box(self.w, entity)
+        super(IDownloadablePrimaryView, self).render_side_related(entity, siderelations)
+
+class IDownloadableLineView(baseviews.OneLineView):
+    __selectors__ = (interface_selector,)
+    # don't kick default oneline view
+    accepts_interfaces = (IDownloadable,)
+    
+
+    def cell_call(self, row, col, title=None, **kwargs):
+        """the secondary view is a link to download the file"""
+        entity = self.entity(row, col)
+        url = html_escape(entity.absolute_url())
+        name = html_escape(entity.download_file_name())
+        durl = html_escape(entity.download_url())
+        self.w(u'<a href="%s">%s</a> [<a href="%s">%s</a>]' %
+               (url, name, durl, self.req._('download')))
+
+
+class ImageView(baseviews.EntityView):
+    __selectors__ = (interface_selector, score_entity_selector)
+    id = 'image'
+    title = _('image')
+    accepts_interfaces = (IDownloadable,)
+    
+    def call(self):
+        rset = self.rset
+        for i in xrange(len(rset)):
+            self.w(u'<div class="efile">')
+            self.wview(self.id, rset, row=i, col=0)
+            self.w(u'</div>')
+
+    @classmethod
+    def score_entity(cls, entity):
+        mt = entity.download_content_type()
+        if not (mt and mt.startswith('image/')):
+            return 0
+        return 1
+    
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        #if entity.data_format.startswith('image/'):
+        self.w(u'<img src="%s" alt="%s"/>' % (html_escape(entity.download_url()),
+                                              html_escape(entity.download_file_name())))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/igeocodable.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+
+import simplejson
+
+from cubicweb.interfaces import IGeocodable
+from cubicweb.common.view import EntityView
+from cubicweb.common.selectors import interface_selector
+
+class GeocodingJsonView(EntityView):
+    id = 'geocoding-json'
+    binary = True
+    templatable = False
+    content_type = 'application/json'
+
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (IGeocodable,)
+    
+    def call(self):
+        extraparams = self.req.form.copy()
+        extraparams.pop('vid', None)
+        extraparams.pop('rql', None)
+        markers = [self.build_marker_data(rowidx, extraparams)
+                   for rowidx in xrange(len(self.rset))]
+        center = {
+            'latitude': sum(marker['latitude'] for marker in markers) / len(markers),
+            'longitude': sum(marker['longitude'] for marker in markers) / len(markers),
+            }
+        geodata = {
+            'center': center,
+            'markers': markers,
+            }
+        self.w(simplejson.dumps(geodata))
+        
+    def build_marker_data(self, row, extraparams):
+        entity = self.entity(row, 0)
+        return {'latitude': entity.latitude, 'longitude': entity.longitude,
+                'title': entity.dc_long_title(),
+                #icon defines : (icon._url, icon.size,  icon.iconAncho', icon.shadow)
+                'icon': entity.marker_icon() or (self.req.external_resource('GMARKER_ICON'), (20, 34), (4, 34), None), 
+                'bubbleUrl': entity.absolute_url(vid='gmap-bubble', __notemplate=1, **extraparams),
+                }
+
+
+class GoogleMapBubbleView(EntityView):
+    id = 'gmap-bubble'
+    
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (IGeocodable,)
+    
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        self.w(u'<div>%s</div>' % entity.view('oneline'))
+        # FIXME: we should call something like address-view if available
+        
+
+class GoogleMapsView(EntityView):
+    id = 'gmap-view'
+    
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (IGeocodable,)
+    need_navigation = False
+    
+    def call(self, gmap_key, width=400, height=400, uselabel=True, urlparams=None):
+        self.req.add_js('http://maps.google.com/maps?file=api&amp;v=2&amp;key=%s' % gmap_key,
+                        localfile=False);
+        self.req.add_js( ('cubicweb.widgets.js', 'cubicweb.gmap.js', 'gmap.utility.labeledmarker.js') )
+        rql = self.rset.printable_rql()
+        if urlparams is None:
+            loadurl = self.build_url(rql=rql, vid='geocoding-json')
+        else:
+            loadurl = self.build_url(rql=rql, vid='geocoding-json', **urlparams)
+        self.w(u'<div style="width: %spx; height: %spx;" class="widget gmap" '
+               u'cubicweb:wdgtype="GMapWidget" cubicweb:loadtype="auto" ' 
+               u'cubicweb:loadurl="%s" cubicweb:uselabel="%s"> </div>' % (width, height, loadurl, uselabel))
+
+        
+class GoogeMapsLegend(EntityView):
+    id = 'gmap-legend'
+    
+    def call(self):
+        self.w(u'<ol>')
+        for rowidx in xrange(len(self.rset)):
+            self.w(u'<li>')
+            self.wview('listitem', self.rset, row=rowidx, col=0)
+            self.w(u'</li>')
+        self.w(u'</ol>')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/iprogress.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,195 @@
+"""Specific views for entities implementing IProgress
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import IProgress, IMileStone
+from cubicweb.schema import display_name
+from cubicweb.common.view import EntityView
+from cubicweb.common.selectors import interface_selector, accept_selector
+from cubicweb.web.htmlwidgets import ProgressBarWidget
+
+
+class ProgressTableView(EntityView):
+    """The progress table view is able to display progress information
+    of any object implement IMileStone.
+
+    The default layout is composoed of 7 columns : parent task,
+    milestone, state, estimated date, cost, progressbar, and todo_by
+
+    The view accepts an optional ``columns`` paramater that lets you
+    remove or reorder some of those columns.
+
+    To add new columns, you should extend this class, define a new
+    ``columns`` class attribute and implement corresponding
+    build_COLNAME_cell methods
+
+    header_for_COLNAME methods allow to customize header's label
+    """
+    
+    id = 'progress_table_view'
+    title = _('task progression')
+    __selectors__ = (accept_selector, interface_selector)
+
+    accepts_interfaces = (IMileStone,)
+
+    # default columns of the table
+    columns = (_('project'), _('milestone'), _('state'), _('eta_date'),
+               _('cost'), _('progress'), _('todo_by'))
+
+
+    def call(self, columns=None):
+        """displays all versions in a table"""
+        self.req.add_css('cubicweb.iprogress.css')
+        _ = self.req._
+        self.columns = columns or self.columns
+        ecls = self.vreg.etype_class(self.rset.description[0][0])
+        self.w(u'<table class="progress">')
+        self.table_header(ecls)
+        self.w(u'<tbody>')
+        for row in xrange(self.rset.rowcount):
+            self.cell_call(row=row, col=0)
+        self.w(u'</tbody>')
+        self.w(u'</table>')
+
+    def cell_call(self, row, col):
+        _ = self.req._
+        entity = self.entity(row, col)
+        infos = {}
+        for col in self.columns:
+            meth = getattr(self, 'build_%s_cell' % col, None)
+            # find the build method or try to find matching attribute
+            if meth:
+                content = meth(entity)
+            else:
+                content = entity.printable_value(col)
+            infos[col] = content
+        if hasattr(entity, 'progress_class'):
+            cssclass = entity.progress_class()
+        else:
+            cssclass = u''
+        self.w(u"""<tr class="%s" onmouseover="addElementClass(this, 'highlighted');"
+            onmouseout="removeElementClass(this, 'highlighted')">""" % cssclass)
+        line = u''.join(u'<td>%%(%s)s</td>' % col for col in self.columns)
+        self.w(line % infos)
+        self.w(u'</tr>\n')
+
+    ## header management ######################################################
+
+    def header_for_project(self, ecls):
+        """use entity's parent type as label"""
+        return display_name(self.req, ecls.parent_type)
+
+    def header_for_milestone(self, ecls):
+        """use entity's type as label"""
+        return display_name(self.req, ecls.id)
+    
+    def table_header(self, ecls):
+        """builds the table's header"""
+        self.w(u'<thead><tr>')
+        _ = self.req._
+        for column in self.columns:
+            meth = getattr(self, 'header_for_%s' % column, None)
+            if meth:
+                colname = meth(ecls)
+            else:
+                colname = _(column)
+            self.w(u'<th>%s</th>' % html_escape(colname))
+        self.w(u'</tr></thead>\n')
+
+    
+    ## cell management ########################################################
+    def build_project_cell(self, entity):
+        """``project`` column cell renderer"""
+        project = entity.get_main_task()
+        if project:
+            return project.view('incontext')
+        return self.req._('no related project')
+
+    def build_milestone_cell(self, entity):
+        """``milestone`` column cell renderer"""
+        return entity.view('incontext')
+
+    def build_state_cell(self, entity):
+        """``state`` column cell renderer"""
+        return html_escape(self.req._(entity.state))
+    
+    def build_eta_date_cell(self, entity):
+        """``eta_date`` column cell renderer"""
+        if entity.finished():
+            return self.format_date(entity.completion_date())
+        formated_date = self.format_date(entity.initial_prevision_date())
+        if entity.in_progress():
+            eta_date = self.format_date(entity.eta_date())
+            _ = self.req._
+            if formated_date:
+                formated_date += u' (%s %s)' % (_('expected:'), eta_date)
+            else:
+                formated_date = u'%s %s' % (_('expected:'), eta_date)
+        return formated_date
+    
+    def build_todo_by_cell(self, entity):
+        """``todo_by`` column cell renderer"""
+        return u', '.join(p.view('outofcontext') for p in entity.contractors())
+
+    def build_cost_cell(self, entity):
+        """``cost`` column cell renderer"""
+        _ = self.req._
+        pinfo = entity.progress_info()
+        totalcost = pinfo.get('estimatedcorrected', pinfo['estimated'])
+        missing = pinfo.get('notestimatedcorrected', pinfo.get('notestimated', 0))
+        costdescr = []
+        if missing:
+            # XXX: link to unestimated entities
+            costdescr.append(_('%s not estimated') % missing)
+        estimated = pinfo['estimated']
+        if estimated and estimated != totalcost:
+            costdescr.append(_('initial estimation %s') % estimated)
+        if costdescr:
+            return u'%s (%s)' % (totalcost, ', '.join(costdescr))
+        return unicode(totalcost)
+    
+    def build_progress_cell(self, entity):
+        """``progress`` column cell renderer"""
+        progress =  u'<div class="progress_data">%s (%.2f%%)</div>' % (
+            entity.done, entity.progress())
+        return progress + entity.view('progressbar')
+
+
+class InContextProgressTableView(ProgressTableView):
+    """this views redirects to ``progress_table_view`` but removes
+    the ``project`` column
+    """
+    id = 'ic_progress_table_view'
+    
+    def call(self):
+        view = self.vreg.select_view('progress_table_view', self.req, self.rset)
+        columns = list(view.columns)
+        try:
+            columns.remove('project')
+        except ValueError:
+            self.info('[ic_progress_table_view] could not remove project from columns')
+        view.dispatch(w=self.w, columns=columns)
+
+
+class ProgressBarView(EntityView):
+    """displays a progress bar"""
+    id = 'progressbar'
+    title = _('progress bar')
+    __selectors__ = (accept_selector, interface_selector)
+
+    accepts_interfaces = (IProgress,)
+
+    def cell_call(self, row, col):
+        self.req.add_css('cubicweb.iprogress.css')
+        entity = self.entity(row, col)
+        widget = ProgressBarWidget(entity.done, entity.todo,
+                                   entity.revised_cost)
+        self.w(widget.render())
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/magicsearch.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,424 @@
+"""a query preprocesser to handle quick search shortcuts for cubicweb
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+import re
+from logging import getLogger
+
+from rql import RQLSyntaxError, BadRQLQuery, parse
+from rql.nodes import Relation
+
+from cubicweb import Unauthorized
+from cubicweb.common.appobject import Component, SingletonComponent
+
+LOGGER = getLogger('cubicweb.magicsearch')
+
+def _get_approriate_translation(translations_found, eschema):
+    """return the first (should be the only one) possible translation according
+    to the given entity type
+    """
+    # get the list of all attributes / relations for this kind of entity
+    existing_relations = set(eschema.subject_relations())
+    consistent_translations = translations_found & existing_relations
+    if len(consistent_translations) == 0:
+        return None
+    return consistent_translations.pop()
+
+
+def translate_rql_tree(rqlst, translations, schema):
+    """Try to translate each relation in the RQL syntax tree
+
+    :type rqlst: `rql.stmts.Statement`
+    :param rqlst: the RQL syntax tree
+
+    :type translations: dict
+    :param translations: the reverted l10n dict
+
+    :type schema: `cubicweb.schema.Schema`
+    :param schema: the application's schema    
+    """
+    # var_types is used as a map : var_name / var_type
+    vartypes = {}
+    # ambiguous_nodes is used as a map : relation_node / (var_name, available_translations)
+    ambiguous_nodes = {}
+    # For each relation node, check if it's a localized relation name
+    # If it's a localized name, then use the original relation name, else
+    # keep the existing relation name
+    for relation in rqlst.get_nodes(Relation):
+        rtype = relation.r_type
+        lhs, rhs = relation.get_variable_parts()
+        if rtype == 'is':
+            try:
+                etype = translations[rhs.value]
+                rhs.value = etype
+            except KeyError:
+                # If no translation found, leave the entity type as is
+                etype = rhs.value
+            # Memorize variable's type
+            vartypes[lhs.name] = etype
+        else:
+            try:
+                translation_set = translations[rtype]
+            except KeyError:
+                pass # If no translation found, leave the relation type as is
+            else:
+                # Only one possible translation, no ambiguity
+                if len(translation_set) == 1:
+                    relation.r_type = iter(translations[rtype]).next()
+                # More than 1 possible translation => resolve it later
+                else:
+                    ambiguous_nodes[relation] = (lhs.name, translation_set)
+    if ambiguous_nodes:
+        resolve_ambiguities(vartypes, ambiguous_nodes, schema)
+
+
+def resolve_ambiguities(var_types, ambiguous_nodes, schema):
+    """Tries to resolve remaining ambiguities for translation
+    /!\ An ambiguity is when two different string can be localized with
+        the same string
+    A simple example:
+      - 'name' in a company context will be localized as 'nom' in French
+      - but ... 'surname' will also be localized as 'nom'
+
+    :type var_types: dict
+    :param var_types: a map : var_name / var_type
+
+    :type ambiguous_nodes: dict
+    :param ambiguous_nodes: a map : relation_node / (var_name, available_translations)
+
+    :type schema: `cubicweb.schema.Schema`
+    :param schema: the application's schema
+    """
+    # Now, try to resolve ambiguous translations
+    for relation, (var_name, translations_found) in ambiguous_nodes.items():
+        try:
+            vartype = var_types[var_name]
+        except KeyError:
+            continue
+        # Get schema for this entity type
+        eschema = schema.eschema(vartype)
+        rtype = _get_approriate_translation(translations_found, eschema)
+        if rtype is None:
+            continue
+        relation.r_type = rtype
+    
+
+
+QUOTED_SRE = re.compile(r'(.*?)(["\'])(.+?)\2')
+
+TRANSLATION_MAPS = {}
+def trmap(config, schema, lang):
+    try:
+        return TRANSLATION_MAPS[lang]
+    except KeyError:
+        assert lang in config.translations, '%s %s' % (lang, config.translations)
+        tr = config.translations[lang]
+        langmap = {}
+        for etype in schema.entities():
+            etype = str(etype)
+            langmap[tr(etype).capitalize()] = etype
+            langmap[etype.capitalize()] = etype
+        for rtype in schema.relations():
+            rtype = str(rtype)
+            langmap.setdefault(tr(rtype).lower(), set()).add(rtype)
+            langmap.setdefault(rtype, set()).add(rtype)
+        TRANSLATION_MAPS[lang] = langmap
+        return langmap
+
+
+class BaseQueryProcessor(Component):
+    __abstract__ = True
+    id = 'magicsearch_processor'
+    # set something if you want explicit component search facility for the
+    # component
+    name = None
+
+    def process_query(self, uquery, req):
+        args = self.preprocess_query(uquery, req)
+        try:
+            return req.execute(*args)
+        finally:
+            # rollback necessary to avoid leaving the connection in a bad state
+            req.cnx.rollback() 
+
+    def preprocess_query(self, uquery, req):
+        raise NotImplementedError()
+
+
+
+
+class DoNotPreprocess(BaseQueryProcessor):
+    """this one returns the raw query and should be placed in first position
+    of the chain
+    """
+    name = 'rql'
+    priority = 0
+    def preprocess_query(self, uquery, req):
+        return uquery,
+    
+
+class QueryTranslator(BaseQueryProcessor):
+    """ parses through rql and translates into schema language entity names 
+    and attributes
+    """
+    priority = 2
+    def preprocess_query(self, uquery, req):
+        try:
+            rqlst = parse(uquery, print_errors=False)
+        except (RQLSyntaxError, BadRQLQuery), err:
+            return uquery,
+        schema = self.vreg.schema
+        # rql syntax tree will be modified in place if necessary
+        translate_rql_tree(rqlst, trmap(self.config, schema, req.lang), schema)
+        return rqlst.as_string(),
+
+
+class QSPreProcessor(BaseQueryProcessor):
+    """Quick search preprocessor
+
+    preprocessing query in shortcut form to their RQL form
+    """
+    priority = 4
+    
+    def preprocess_query(self, uquery, req):
+        """"""
+        args = None
+        self.req = req
+        try:
+            # Process as if there was a quoted part
+            args = self._quoted_words_query(uquery)
+        ## No quoted part  
+        except BadRQLQuery:
+            words = uquery.split()
+            if len(words) == 1:
+                args = self._one_word_query(*words)
+            elif len(words) == 2:
+                args = self._two_words_query(*words)
+            elif len(words) == 3:
+                args = self._three_words_query(*words)
+            else:
+                args = self._multiple_words_query(words)
+        return args
+    
+    def _get_entity_type(self, word):
+        """check if the given word is matching an entity type, return it if
+        it's the case or raise BadRQLQuery if not
+        """
+        etype = word.capitalize()
+        try:
+            return trmap(self.config, self.vreg.schema, self.req.lang)[etype]
+        except KeyError:
+            raise BadRQLQuery('%s is not a valid entity name' % etype)        
+
+    def _get_attribute_name(self, word, eschema):
+        """check if the given word is matching an attribute of the given entity type,
+        return it normalized if found or return it untransformed else
+        """
+        """Returns the attributes's name as stored in the DB"""
+        # Need to convert from unicode to string (could be whatever)
+        rtype = word.lower()
+        # Find the entity name as stored in the DB
+        translations = trmap(self.config, self.vreg.schema, self.req.lang)
+        try:
+            translations = translations[rtype]
+        except KeyError:
+            raise BadRQLQuery('%s is not a valid attribute for %s entity type'
+                              % (word, eschema))
+        rtype = _get_approriate_translation(translations, eschema)
+        if rtype is None:
+            raise BadRQLQuery('%s is not a valid attribute for %s entity type'
+                              % (word, eschema))
+        return rtype
+
+    def _one_word_query(self, word):
+        """Specific process for one word query (case (1) of preprocess_rql)
+        """
+        # if this is an integer, then directly go to eid
+        try:
+            eid = int(word)
+            return 'Any X WHERE X eid %(x)s', {'x': eid}, 'x'
+        except ValueError:
+            etype = self._get_entity_type(word)
+            return '%s %s' % (etype, etype[0]),
+
+    def _complete_rql(self, searchstr, etype, rtype=None, var=None, searchattr=None):
+        searchop = ''
+        if '%' in searchstr:
+            if rtype:
+                possible_etypes = self.schema.rschema(rtype).objects(etype)
+            else:
+                possible_etypes = [self.schema.eschema(etype)]
+            if searchattr or len(possible_etypes) == 1:
+                searchattr = searchattr or possible_etypes[0].main_attribute()
+                searchop = 'LIKE '
+        searchattr = searchattr or 'has_text'
+        if var is None:
+            var = etype[0]
+        return '%s %s %s%%(text)s' % (var, searchattr, searchop)
+        
+    def _two_words_query(self, word1, word2):
+        """Specific process for two words query (case (2) of preprocess_rql)
+        """
+        etype = self._get_entity_type(word1)
+        # this is a valid RQL query : ("Person X", or "Person TMP1")
+        if len(word2) == 1 and word2.isupper():
+            return '%s %s' % (etype, word2),
+        # else, suppose it's a shortcut like : Person Smith
+        rql = '%s %s WHERE %s' % (etype, etype[0], self._complete_rql(word2, etype))
+        return rql, {'text': word2}
+           
+    def _three_words_query(self, word1, word2, word3):
+        """Specific process for three words query (case (3) of preprocess_rql)
+        """
+        etype = self._get_entity_type(word1)
+        eschema = self.schema.eschema(etype)
+        rtype = self._get_attribute_name(word2, eschema)
+        # expand shortcut if rtype is a non final relation
+        if not self.schema.rschema(rtype).is_final():
+            return self._expand_shortcut(etype, rtype, word3)
+        if '%' in word3:
+            searchop = 'LIKE '
+        else:
+            searchop = ''
+        rql = '%s %s WHERE %s' % (etype, etype[0],
+                                  self._complete_rql(word3, etype, searchattr=rtype))
+        return rql, {'text': word3}
+
+    def _multiple_words_query(self, words):
+        """specific process for more than 3 words query"""
+        return ' '.join(words),
+
+
+    def _expand_shortcut(self, etype, rtype, searchstr):
+        """Expands shortcut queries on a non final relation to use has_text or
+        the main attribute (according to possible entity type) if '%' is used in the
+        search word
+
+        Transforms : 'person worksat IBM' into
+        'Personne P WHERE P worksAt C, C has_text "IBM"'
+        """
+        # check out all possilbe entity types for the relation represented
+        # by 'rtype'
+        mainvar = etype[0]
+        searchvar = mainvar  + '1'
+        rql =  '%s %s WHERE %s %s %s, %s' % (etype, mainvar,  # Person P
+                                             mainvar, rtype, searchvar, # P worksAt C
+                                             self._complete_rql(searchstr, etype,
+                                                                rtype=rtype, var=searchvar))
+        return rql, {'text': searchstr}
+
+
+    def _quoted_words_query(self, ori_rql):
+        """Specific process when there's a "quoted" part
+        """
+        m = QUOTED_SRE.match(ori_rql)
+        # if there's no quoted part, then no special pre-processing to do
+        if m is None:
+            raise BadRQLQuery("unable to handle request %r" % ori_rql)
+        left_words = m.group(1).split()
+        quoted_part = m.group(3)
+        # Case (1) : Company "My own company"
+        if len(left_words) == 1:
+            try:
+                word1 = left_words[0]
+                return self._two_words_query(word1, quoted_part)
+            except BadRQLQuery, error:
+                raise BadRQLQuery("unable to handle request %r" % ori_rql)
+        # Case (2) : Company name "My own company";
+        elif len(left_words) == 2:
+            word1, word2 = left_words
+            return self._three_words_query(word1, word2, quoted_part)
+            # return ori_rql
+        raise BadRQLQuery("unable to handle request %r" % ori_rql)
+    
+
+ 
+class FullTextTranslator(BaseQueryProcessor):
+    priority = 10
+    name = 'text'
+    
+    def preprocess_query(self, uquery, req):
+        """suppose it's a plain text query"""
+        return 'Any X WHERE X has_text %(text)s', {'text': uquery}
+
+
+
+class MagicSearchComponent(SingletonComponent):
+    id  = 'magicsearch'
+    def __init__(self, req, rset=None):
+        super(MagicSearchComponent, self).__init__(req, rset)
+        processors = []
+        self.by_name = {}
+        for processorcls in self.vreg.registry_objects('components',
+                                                       'magicsearch_processor'):
+            # instantiation needed
+            processor = processorcls()
+            processors.append(processor)
+            if processor.name is not None:
+                assert not processor.name in self.by_name
+                self.by_name[processor.name.lower()] = processor
+        self.processors = sorted(processors, key=lambda x: x.priority)
+
+    def process_query(self, uquery, req):
+        assert isinstance(uquery, unicode)
+        try:
+            procname, query = uquery.split(':', 1)
+            proc = self.by_name[procname.strip().lower()]
+            uquery = query.strip()
+        except:
+            # use processor chain
+            unauthorized = None
+            for proc in self.processors:
+                try:
+                    return proc.process_query(uquery, req)
+                # FIXME : we don't want to catch any exception type here !
+                except (RQLSyntaxError, BadRQLQuery):
+                    pass
+                except Unauthorized, ex:
+                    unauthorized = ex
+                    continue
+                except Exception, ex:
+                    LOGGER.debug('%s: %s', ex.__class__.__name__, ex)
+                    continue
+            if unauthorized:
+                raise unauthorized
+        else:
+            # let exception propagate
+            return proc.process_query(uquery, req)
+        raise BadRQLQuery(req._('sorry, the server is unable to handle this query'))
+
+
+# Do not make a strong dependency on NlpTools
+try:
+    from NlpTools.rqltools.client import RQLClient
+except ImportError:
+    LOGGER.info('could not import RQLClient (NlpTools)')
+else:
+    try:
+        from Pyro.errors import NamingError
+    except ImportError:
+        LOGGER.warning("pyro is not installed, can't try to connect to nlp server")
+    else:
+        try:
+            class NLPProcessor(BaseQueryProcessor):
+                priority = 8
+                nlp_agent = RQLClient('ivan')
+                def preprocess_query(self, uquery, req):
+                    try:
+                        answer = self.nlp_agent.get_translation(uquery)
+                        if not answer:
+                            raise BadRQLQuery(uquery)
+                        return answer or uquery,
+                    except Exception, ex:
+                        LOGGER.exception(str(ex))
+                        return uquery,
+
+        except NamingError: # NlpTools available but no server registered
+            LOGGER.warning('could not find any RQLServer object named "ivan"')
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/management.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,535 @@
+"""management and error screens
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from logilab.common.decorators import cached
+
+from cubicweb.common.utils import UStringIO
+from cubicweb.common.view import AnyRsetView, StartupView, EntityView
+from cubicweb.common.uilib import (html_traceback, rest_traceback, html_escape,
+                                toggle_link)
+from cubicweb.common.selectors import (yes_selector, onelinerset_selector,
+                                    accept_rset_selector, norset_selector,
+                                    chainfirst, chainall)
+from cubicweb.web import INTERNAL_FIELD_VALUE, eid_param, stdmsgs
+from cubicweb.web.widgets import StaticComboBoxWidget
+from cubicweb.web.form import FormMixIn
+
+_ = unicode
+            
+def begin_form(w, entity, redirectvid, redirectpath=None, msg=None):
+    w(u'<form method="post" action="%s">\n' % entity.req.build_url('edit'))
+    w(u'<fieldset>\n')
+    w(u'<input type="hidden" name="__redirectvid" value="%s"/>\n' % redirectvid)
+    w(u'<input type="hidden" name="__redirectpath" value="%s"/>\n' % (
+        html_escape(redirectpath or entity.rest_path())))
+    w(u'<input type="hidden" name="eid" value="%s"/>\n' % entity.eid)
+    w(u'<input type="hidden" name="%s" value="%s"/>\n' % (
+        eid_param('__type', entity.eid), entity.e_schema))
+    if msg:
+        w(u'<input type="hidden" name="__message" value="%s"/>\n'
+          % html_escape(msg))
+
+
+class SecurityManagementView(EntityView):
+    """display security information for a given entity"""
+    id = 'security'
+    title = _('security')
+        
+    def cell_call(self, row, col):
+        self.req.add_js('cubicweb.edition.js')            
+        self.req.add_css('cubicweb.acl.css')            
+        entity = self.entity(row, col)
+        w = self.w
+        _ = self.req._
+        w(u'<h1><span class="etype">%s</span> <a href="%s">%s</a></h1>'
+          % (entity.dc_type().capitalize(),
+             html_escape(entity.absolute_url()),
+             html_escape(entity.dc_title())))
+        # first show permissions defined by the schema
+        self.w('<h2>%s</h2>' % _('schema\'s permissions definitions'))
+        self.schema_definition(entity)
+        self.w('<h2>%s</h2>' % _('manage security'))
+        # ownership information
+        if self.schema.rschema('owned_by').has_perm(self.req, 'add',
+                                                    fromeid=entity.eid):
+            self.owned_by_edit_form(entity)
+        else:
+            self.owned_by_information(entity)
+        # epermissions
+        if 'require_permission' in entity.e_schema.subject_relations():
+            w('<h3>%s</h3>' % _('permissions for this entity'))
+            reqpermschema = self.schema.rschema('require_permission')
+            self.require_permission_information(entity, reqpermschema)
+            if reqpermschema.has_perm(self.req, 'add', fromeid=entity.eid):
+                self.require_permission_edit_form(entity)
+
+    def schema_definition(self, entity):
+        w = self.w
+        _ = self.req._
+        w(u'<table class="schemaInfo">')
+        w(u'<tr><th>%s</th><th>%s</th><th>%s</th></tr>' % (
+            _("access type"), _('granted to groups'), _('rql expressions')))
+        for access_type in ('read', 'add', 'update', 'delete'):
+            w(u'<tr>')
+            w(u'<th>%s</th>' % self.req.__('%s_permission' % access_type))
+            groups = entity.e_schema.get_groups(access_type)
+            l = []
+            for group in groups:
+                l.append(u'<a href="%s">%s</a>' % (
+                    self.build_url('egroup/%s' % group), _(group)))
+            w(u'<td>%s</td>' % u', '.join(l))
+            rqlexprs = entity.e_schema.get_rqlexprs(access_type)
+            w(u'<td>%s</td>' % u'<br/>'.join(expr.expression for expr in rqlexprs))
+            w(u'</tr>\n')
+        w(u'</table>')
+        
+    def owned_by_edit_form(self, entity):
+        self.w('<h3>%s</h3>' % self.req._('ownership'))
+        begin_form(self.w, entity, 'security', msg= _('ownerships have been changed'))
+        self.w(u'<table border="0">\n')
+        self.w(u'<tr><td>\n')
+        wdg = entity.get_widget('owned_by')
+        self.w(wdg.edit_render(entity))
+        self.w(u'</td><td>\n')
+        self.w(self.button_ok())
+        self.w(u'</td></tr>\n</table>\n')
+        self.w(u'</fieldset></form>\n')
+
+    def owned_by_information(self, entity):
+        ownersrset = entity.related('owned_by')
+        if ownersrset:
+            self.w('<h3>%s</h3>' % self.req._('ownership'))
+            self.w(u'<div class="ownerInfo">')
+            self.w(self.req._('this entity is currently owned by') + ' ')
+            self.wview('csv', entity.related('owned_by'), 'null')
+            self.w(u'</div>')
+        # else we don't know if this is because entity has no owner or becayse
+        # user as no access to owner users entities
+
+    def require_permission_information(self, entity, reqpermschema):
+        if entity.require_permission:
+            w = self.w
+            _ = self.req._
+            if reqpermschema.has_perm(self.req, 'delete', fromeid=entity.eid):
+                delurl = self.build_url('edit', __redirectvid='security',
+                                        __redirectpath=entity.rest_path())
+                delurl = delurl.replace('%', '%%')
+                # don't give __delete value to build_url else it will be urlquoted
+                # and this will replace %s by %25s
+                delurl += '&__delete=%s:require_permission:%%s' % entity.eid
+                dellinktempl = u'[<a href="%s" title="%s">-</a>]&nbsp;' % (
+                    html_escape(delurl), _('delete this permission'))
+            else:
+                dellinktempl = None
+            w(u'<table class="schemaInfo">')
+            w(u'<tr><th>%s</th><th>%s</th></tr>' % (_("permission"),
+                                                    _('granted to groups')))
+            for eperm in entity.require_permission:
+                w(u'<tr>')
+                if dellinktempl:
+                    w(u'<td>%s%s</td>' % (dellinktempl % eperm.eid,
+                                          eperm.view('oneline')))
+                else:
+                    w(u'<td>%s</td>' % eperm.view('oneline'))
+                w(u'<td>%s</td>' % self.view('csv', eperm.related('require_group'), 'null'))
+                w(u'</tr>\n')
+            w(u'</table>')
+        else:
+            self.w(self.req._('no associated epermissions'))
+
+    def require_permission_edit_form(self, entity):
+        w = self.w
+        _ = self.req._
+        newperm = self.vreg.etype_class('EPermission')(self.req, None)
+        newperm.eid = self.req.varmaker.next()
+        w(u'<p>%s</p>' % _('add a new permission'))
+        begin_form(w, newperm, 'security', entity.rest_path())
+        w(u'<input type="hidden" name="%s" value="__cubicweb_internal_field__"/>'
+          % eid_param('edito-require_permission', newperm.eid))
+        w(u'<input type="hidden" name="%s" value="%s"/>'
+          % (eid_param('require_permission', newperm.eid), entity.eid))
+        w(u'<table border="0">\n')
+        w(u'<tr><th>%s</th><th>%s</th><th>%s</th><th>&nbsp;</th></tr>\n'
+               % (_("name"), _("label"), _('granted to groups')))
+        if getattr(entity, '__permissions__', None):
+            wdg = StaticComboBoxWidget(self.vreg, self.schema['EPermission'],
+                                       self.schema['name'], self.schema['String'],
+                                       vocabfunc=lambda x: entity.__permissions__)
+        else:
+            wdg = newperm.get_widget('name')
+        w(u'<tr><td>%s</td>\n' % wdg.edit_render(newperm))
+        wdg = newperm.get_widget('label')
+        w(u'<td>%s</td>\n' % wdg.edit_render(newperm))
+        wdg = newperm.get_widget('require_group')
+        w(u'<td>%s</td>\n' % wdg.edit_render(newperm))            
+        w(u'<td>%s</td></tr>\n' % self.button_ok())
+        w(u'</table>')
+        w(u'</fieldset></form>\n')
+
+    def button_ok(self):
+        return (u'<input class="validateButton" type="submit" name="submit" value="%s"/>'
+                % self.req._(stdmsgs.BUTTON_OK))
+
+        
+class ErrorView(AnyRsetView):
+    """default view when no result has been found"""
+    __selectors__ = (yes_selector,)
+    id = 'error'
+    
+    def page_title(self):
+        """returns a title according to the result set - used for the
+        title in the HTML header
+        """
+        return self.req._('an error occured')
+
+    def call(self):
+        req = self.req.reset_headers()
+        _ = req._
+        ex = req.data.get('ex')#_("unable to find exception information"))
+        excinfo = req.data.get('excinfo')
+        title = _('an error occured')
+        self.w(u'<h2>%s</h2>' % title)
+        if 'errmsg' in req.data:
+            ex = req.data['errmsg']
+        else:
+            ex = exc_message(ex, req.encoding)
+        if excinfo is not None and self.config['print-traceback']:
+            exclass = ex.__class__.__name__
+            self.w(u'<div class="tb">%s: %s</div>' % (exclass, html_escape(ex).replace("\n","<br />")))
+            self.w(u'<hr />')
+            self.w(u'<div class="tb">%s</div>' % html_traceback(excinfo, ex, ''))
+        else:
+            self.w(u'<div class="tb">%s</div>' % (html_escape(ex).replace("\n","<br />")))
+        # if excinfo is not None, it's probably not a bug
+        if excinfo is None:
+            return
+        vcconf = self.config.vc_config()
+        self.w(u"<div>")
+        eversion = vcconf.get('cubicweb', _('no version information'))
+        # NOTE: tuple wrapping needed since eversion is itself a tuple
+        self.w(u"<b>CubicWeb version:</b> %s<br/>\n" % (eversion,))
+        for pkg in self.config.cubes():
+            pkgversion = vcconf.get(pkg, _('no version information'))
+            self.w(u"<b>Package %s version:</b> %s<br/>\n" % (pkg, pkgversion))
+        self.w(u"</div>")
+        # creates a bug submission link if SUBMIT_URL is set
+        submiturl = self.config['submit-url']
+        if submiturl:
+            binfo = text_error_description(ex, excinfo, req, eversion,
+                                           [(pkg, vcconf.get(pkg, _('no version information')))
+                                            for pkg in self.config.cubes()])
+            self.w(u'<form action="%s" method="post">\n' % html_escape(submiturl))
+            self.w(u'<fieldset>\n')
+            self.w(u'<textarea class="hidden" name="description">%s</textarea>' % html_escape(binfo))
+            self.w(u'<input type="hidden" name="description_format" value="text/rest"/>')
+            self.w(u'<input type="hidden" name="__bugreporting" value="1"/>')
+            self.w(u'<input type="submit" value="%s"/>' % _('Submit bug report'))
+            self.w(u'</fieldset>\n')
+            self.w(u'</form>\n')
+        submitmail = self.config['submit-mail']
+        if submitmail:
+            binfo = text_error_description(ex, excinfo, req, eversion,
+                                           [(pkg, vcconf.get(pkg, _('no version information')))
+                                            for pkg in self.config.cubes()])
+            self.w(u'<form action="%s" method="post">\n' % req.build_url('reportbug'))
+            self.w(u'<fieldset>\n')
+            self.w(u'<input type="hidden" name="description" value="%s"/>' % html_escape(binfo))
+            self.w(u'<input type="hidden" name="__bugreporting" value="1"/>')
+            self.w(u'<input type="submit" value="%s"/>' % _('Submit bug report by mail'))
+            self.w(u'</fieldset>\n')
+            self.w(u'</form>\n')
+
+
+def exc_message(ex, encoding):
+    try:
+        return unicode(ex)
+    except:
+        try:
+            return unicode(str(ex), encoding, 'replace')
+        except:
+            return unicode(repr(ex), encoding, 'replace')
+    
+def text_error_description(ex, excinfo, req, eversion, cubes):
+    binfo = rest_traceback(excinfo, html_escape(ex))
+    binfo += u'\n\n:URL: %s\n' % req.url()
+    if not '__bugreporting' in req.form:
+        binfo += u'\n:form params:\n'
+        binfo += u'\n'.join(u'  * %s = %s' % (k, v) for k, v in req.form.iteritems())
+    binfo += u'\n\n:CubicWeb version: %s\n'  % (eversion,)
+    for pkg, pkgversion in cubes:
+        binfo += u":Package %s version: %s\n" % (pkg, pkgversion)
+    binfo += '\n'
+    return binfo
+
+# some string we want to be internationalizable for nicer display of eproperty
+# groups
+_('navigation')
+_('ui')
+_('actions')
+_('boxes')
+_('components')
+_('contentnavigation')
+
+class SystemEpropertiesForm(FormMixIn, StartupView):
+    controller = 'edit'
+    id = 'systemepropertiesform'
+    title = _('site configuration')
+    require_groups = ('managers',)
+    category = 'startupview'
+    
+    def linkable(self):
+        return True
+    
+    def url(self):
+        """return the url associated with this view. We can omit rql here"""
+        return self.build_url('view', vid=self.id)
+    
+    def call(self, **kwargs):
+        """The default view representing the application's index"""
+        self.req.add_js('cubicweb.edition.js')
+        self.req.add_css('cubicweb.preferences.css')
+        vreg = self.vreg
+        values = self.defined_keys
+        groupedopts = {}
+        mainopts = {}
+        # "self.id=='systemepropertiesform'" to skip site wide properties on
+        # user's preference but not site's configuration
+        for key in vreg.user_property_keys(self.id=='systemepropertiesform'):
+            parts = key.split('.')
+            if parts[0] in vreg:
+                # appobject configuration
+                reg, oid, propid = parts
+                groupedopts.setdefault(reg, {}).setdefault(oid, []).append(key)
+            else:
+                mainopts.setdefault(parts[0], []).append(key)
+        # precompute form to consume error message
+        for group, keys in mainopts.items():
+            mainopts[group] = self.form(keys, False)
+        for group, objects in groupedopts.items():
+            for oid, keys in objects.items():
+                groupedopts[group][oid] = self.form(keys, True)
+        
+        w = self.w
+        req = self.req
+        _ = req._
+        w(u'<h1>%s</h1>\n' % _(self.title))
+        w(self.error_message())
+        for label, group, form in sorted((_(g), g, f)
+                                         for g, f in mainopts.iteritems()):
+            w(u'<h2 class="propertiesform">%s</h2>\n' %
+              (toggle_link('fieldset_' + group, label)))
+            w(u'<div id="fieldset_%s" class="hidden">' % group)
+            w(u'<fieldset class="subentity">')
+            w(form)
+            w(u'</fieldset></div>')
+        for label, group, objects in sorted((_(g), g, o)
+                                            for g, o in groupedopts.iteritems()):
+            w(u'<h2 class="propertiesform">%s</h2>\n' %
+              (toggle_link('fieldset_' + group, label)))
+            w(u'<div id="fieldset_%s" class="hidden">' % group)
+            for label, oid, form in sorted((self.req.__('%s_%s' % (group, o)), o, f)
+                                           for o, f in objects.iteritems()):
+                w(u'<fieldset class="subentity">')
+                w(u'<legend class="componentTitle">%s</legend>\n' % label)
+                docmsgid = '%s_%s_description' % (group, oid)
+                doc = _(docmsgid)
+                if doc != docmsgid:
+                    w(u'<p class="description">%s</p>' % html_escape(doc))
+                w(form)
+                w(u'</fieldset>')
+            w(u'</div>')
+            
+                
+
+    @property
+    @cached
+    def eprops_rset(self):
+        return self.req.execute('Any P,K,V WHERE P is EProperty, P pkey K, P value V, NOT P for_user U')
+    
+    @property
+    def defined_keys(self):
+        values = {}
+        for i, entity in enumerate(self.eprops_rset.entities()):
+            values[entity.pkey] = i
+        return values
+    
+    def entity_for_key(self, key):
+        values = self.defined_keys
+        if key in values:
+            entity = self.eprops_rset.get_entity(values[key], 0)
+        else:
+            entity = self.vreg.etype_class('EProperty')(self.req, None, None)
+            entity.eid = self.req.varmaker.next()
+            entity['value'] = self.vreg.property_value(key)
+        return entity
+
+    def form(self, keys, splitlabel=False):
+        stream = UStringIO()
+        w = stream.write
+        w(u'<form action="%s" method="post">\n' % self.build_url())
+        w(u'<fieldset>\n')
+        w(u'<input type="hidden" name="__errorurl" value="%s"/>\n'
+          % html_escape(self.req.url()))
+        w(u'<input type="hidden" name="__form_id" value="%s"/>\n' % self.id)
+        path = self.req.relative_path()
+        if '?' in path:
+            path, params = path.split('?', 1)
+            w(u'<input type="hidden" name="__redirectparams" value="%s"/>\n'
+              % html_escape(params))
+        w(u'<input type="hidden" name="__redirectpath" value="%s"/>\n' % path)
+        #w(u'<input type="hidden" name="__redirectrql" value=""/>\n') 
+        w(u'<input type="hidden" name="__message" value="%s"/>\n'
+          % self.req._('changes applied'))
+        w(u'<table><tr><td>\n')
+       
+        w(u'<table>\n')
+        for key in keys:
+            w(u'<tr>\n')
+            self.form_row(w, key, splitlabel)
+            w(u'</tr>\n')
+        w(u'</table>\n')
+        w(u'</td></tr><tr><td>\n')
+        w(self.button_ok())
+        w(self.button_cancel())
+        w(u'</td></tr></table>\n')
+        w(u'</fieldset>\n')
+        w(u'</form>\n')
+        return stream.getvalue()
+        
+    def form_row(self, w, key, splitlabel):
+        entity = self.entity_for_key(key)
+        eid = entity.eid
+        if splitlabel:
+            w(u'<td class="label">%s</td>' % self.req._(key.split('.')[-1]))
+        else:
+            w(u'<td class="label">%s</td>' % self.req._(key))
+        wdg = self.vreg.property_value_widget(key, req=self.req)
+        error = wdg.render_error(entity)
+        w(u'<td class="%s">' % (error and 'error' or ''))
+        w(error)
+        self.form_row_hiddens(w, entity, key)
+        w(wdg.edit_render(entity))
+        w(u'</td>\n')
+        w(u'<td>%s</td>' % wdg.render_help(entity))
+        return entity
+
+    def form_row_hiddens(self, w, entity, key):
+        eid = entity.eid
+        w(u'<input type="hidden" name="eid" value="%s"/>' % eid)
+        w(u'<input type="hidden" name="%s" value="EProperty"/>' % eid_param('__type', eid))
+        w(u'<input type="hidden" name="%s" value="%s"/>' % (eid_param('pkey', eid), key))
+        w(u'<input type="hidden" name="%s" value="%s"/>' % (eid_param('edits-pkey', eid), ''))
+
+        
+class EpropertiesForm(SystemEpropertiesForm):
+    id = 'epropertiesform'
+    title = _('preferences')    
+    require_groups = ('users', 'managers') # we don't want guests to be able to come here
+    __selectors__ = chainfirst(norset_selector,
+                               chainall(onelinerset_selector, accept_rset_selector)),
+    accepts = ('EUser',)
+
+    @classmethod
+    def accept_rset(cls, req, rset, row, col):
+        if row is None:
+            row = 0
+        score = super(EpropertiesForm, cls).accept_rset(req, rset, row, col)
+        # check current user is the rset user or he is in the managers group
+        if score and (req.user.eid == rset[row][col or 0] 
+                      or req.user.matching_groups('managers')):
+            return score
+        return 0
+
+    @property
+    def user(self):
+        if self.rset is None:
+            return self.req.user
+        return self.rset.get_entity(self.row or 0, self.col or 0)
+    
+    @property
+    @cached
+    def eprops_rset(self):
+        return self.req.execute('Any P,K,V WHERE P is EProperty, P pkey K, P value V,'
+                                'P for_user U, U eid %(x)s', {'x': self.user.eid})
+
+    def form_row_hiddens(self, w, entity, key):
+        super(EpropertiesForm, self).form_row_hiddens(w, entity, key)
+        # if user is in the managers group and the property is being created,
+        # we have to set for_user explicitly
+        if not entity.has_eid() and self.user.matching_groups('managers'):
+            eid = entity.eid
+            w(u'<input type="hidden" name="%s" value="%s"/>'
+              % (eid_param('edits-for_user', eid), INTERNAL_FIELD_VALUE))
+            w(u'<input type="hidden" name="%s" value="%s"/>'
+              % (eid_param('for_user', eid), self.user.eid))
+    
+                   
+    
+
+class ProcessInformationView(StartupView):
+    id = 'info'
+    title = _('server information')
+    require_groups = ('managers',)
+
+    def call(self, **kwargs):
+        """display server information"""
+        vcconf = self.config.vc_config()
+        req = self.req
+        _ = req._
+        # display main information
+        self.w(u'<h3>%s</h3>' % _('Application'))
+        self.w(u'<table border="1">')
+        self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
+            'CubicWeb', vcconf.get('cubicweb', _('no version information'))))
+        for pkg in self.config.cubes():
+            pkgversion = vcconf.get(pkg, _('no version information'))
+            self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
+                pkg, pkgversion))
+        self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
+            _('home'), self.config.apphome))
+        self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
+            _('base url'), req.base_url()))
+        self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
+            _('data directory url'), req.datadir_url))
+        self.w(u'</table>')
+        self.w(u'<br/>')
+        # environment and request and server information
+        try:
+            # need to remove our adapter and then modpython-apache wrapper...
+            env = req._areq._req.subprocess_env
+        except AttributeError:
+            return
+        self.w(u'<h3>%s</h3>' % _('Environment'))
+        self.w(u'<table border="1">')
+        for attr in env.keys():
+            self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>'
+                   % (attr, html_escape(env[attr])))
+        self.w(u'</table>')
+        self.w(u'<h3>%s</h3>' % _('Request'))
+        self.w(u'<table border="1">')
+        for attr in ('filename', 'form', 'hostname', 'main', 'method',
+                     'path_info', 'protocol',
+                     'search_state', 'the_request', 'unparsed_uri', 'uri'):
+            val = getattr(req, attr)
+            self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>'
+                   % (attr, html_escape(val)))
+        self.w(u'</table>')
+        server = req.server
+        self.w(u'<h3>%s</h3>' % _('Server'))
+        self.w(u'<table border="1">')
+        for attr in dir(server):
+            val = getattr(server, attr)
+            if attr.startswith('_') or callable(val):
+                continue
+            self.w(u'<tr><th align="left">%s</th><td>%s</td></tr>'
+                   % (attr, html_escape(val)))
+        self.w(u'</table>')
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/massmailing.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,129 @@
+"""Mass mailing form views
+
+:organization: Logilab
+:copyright: 2007-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+import operator
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import IEmailable
+from cubicweb.common.view import EntityView
+from cubicweb.common.selectors import interface_selector, in_group_selector
+from cubicweb.web.action import EntityAction
+from cubicweb.web import stdmsgs
+
+
+class SendEmailAction(EntityAction):
+    category = 'mainactions'
+    __selectors__ = (interface_selector, in_group_selector)
+    accepts_interfaces = (IEmailable,) # XXX should check email is set as well
+    require_groups = ('managers', 'users')
+
+    id = 'sendemail'
+    title = _('send email')
+
+    def url(self):
+        params = {'vid': 'massmailing', '__force_display': 1}
+        if self.req.form.has_key('rql'):
+            params['rql'] = self.req.form['rql']
+        return self.build_url(self.req.relative_path(includeparams=False),
+                              **params)
+
+
+class MassMailingForm(EntityView):
+    id = 'massmailing'
+    __selectors__ = (interface_selector, in_group_selector)
+    accepts_interfaces = (IEmailable,)
+    require_groups = ('managers', 'users')
+    
+
+    form_template = u"""
+<div id="compose">
+<form id="sendemail" action="sendmail" method="post">
+<table class="headersform">
+<tr>
+  <td class="hlabel">%(from_header)s</td>
+  <td class="hvalue">%(from)s</td>
+</tr>
+<tr>
+  <td class="hlabel">%(recipients_header)s</td>
+  <td class="hvalue">%(recipients)s</td>
+</tr>
+<tr>
+  <td class="hlabel">%(subject)s</td>
+  <td class="hvalue"><input id="mailsubj" name="mailsubject" value="" /></td>
+</tr>
+</table>
+<div id="toolbar">
+<ul>
+<li><a id="sendbutton" href="javascript: $('sendemail').submit()">
+    <img src="%(sendimgpath)s" alt="%(send)s"/>%(send)s</a></li>
+<li><a id="cancelbutton" href="javascript: history.back()">
+    <img src="%(cancelimgpath)s" alt="%(cancel)s"/>%(cancel)s</a></li>
+ </ul>
+</div>
+<table>
+<tr>
+  <td>
+    <div>
+      <div id="emailbody" class="widget" cubicweb:loadtype="auto" cubicweb:wdgtype="TemplateTextField"
+           cubicweb:inputid="emailarea" cubicweb:inputname="mailbody" cubicweb:variables="%(variables)s"/>
+    </div>
+  </td>
+  <td>%(substitutions)s</td>
+</tr>
+</table>
+</form>
+</div>
+    """    
+
+    def call(self):
+        req = self.req
+        req.add_js('cubicweb.widgets.js')
+        req.add_css('cubicweb.mailform.css')
+        from_addr = '%s <%s>' % (req.user.dc_title(), req.user.get_email())
+        ctx = {
+            'from_header' : req._('From: '),
+            'from' : html_escape(from_addr),
+            'substitutions' : self._build_substitutions_help(),
+            'recipients_header' : req._('Recipients: '),
+            'subject' : req._('Subject: '),
+            'body' : req._('Email body: '),
+            'variables' : ','.join(self._get_allowed_substitutions()),
+            'recipients' : self._build_recipients_list(),
+            'cancel' : req._(stdmsgs.BUTTON_CANCEL),
+            'cancelimgpath' : req.external_resource('CANCEL_EMAIL_ICON'),
+            'send' : req._('send email'),
+            'sendimgpath' : req.external_resource('SEND_EMAIL_ICON'),
+            }
+        self.w(self.form_template % ctx)
+
+
+    def _get_allowed_substitutions(self):
+        coltypes = self.rset.column_types(0)
+        attrs = []
+        for coltype in coltypes:
+            eclass = self.vreg.etype_class(coltype)
+            attrs.append(eclass.allowed_massmail_keys())
+        return sorted(reduce(operator.and_, attrs))
+            
+    def _build_recipients_list(self):
+        emails = ((entity.eid, entity.get_email()) for entity in self.rset.entities())
+        checkboxes = (u'<input name="recipient" type="checkbox" value="%s" checked="checked" />%s'
+                      % (eid, html_escape(email)) for eid, email in emails if email)
+        boxes = (u'<div class="recipient">%s</div>' % cbox for cbox in checkboxes)
+        return u'<div id="recipients">%s</div>' % u'\n'.join(boxes)
+            
+
+    def _build_substitutions_help(self):
+        insertLink = u'<a href="javascript: insertText(\'%%(%s)s\', \'emailarea\');">%%(%s)s</a>'
+        substs = (u'<div class="substitution">%s</div>' % (insertLink % (subst, subst))
+                  for subst in self._get_allowed_substitutions())
+        helpmsg = self.req._('You can use any of the following substitutions in your text')
+        return u'<div id="substitutions"><span>%s</span>%s</div>' % (
+            helpmsg, u'\n'.join(substs))
+
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/navigation.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,216 @@
+"""navigation components definition for CubicWeb web client
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from rql.nodes import VariableRef, Constant
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import IPrevNext
+from cubicweb.common.selectors import (largerset_selector, sortedrset_selector,
+                                    primaryview_selector, contextprop_selector,
+                                    onelinerset_selector, interface_selector)
+from cubicweb.common.uilib import cut
+from cubicweb.web.component import EntityVComponent, NavigationComponent
+
+_ = unicode
+
+
+class PageNavigation(NavigationComponent):
+
+    def call(self):
+        """displays a resultset by page"""
+        w = self.w
+        req = self.req
+        rset = self.rset
+        page_size = self.page_size
+        start = 0
+        blocklist = []
+        params = dict(req.form)
+        self.clean_params(params)
+        basepath = req.relative_path(includeparams=False)
+        while start < rset.rowcount:
+            stop = min(start + page_size - 1, rset.rowcount - 1)
+            blocklist.append(self.page_link(basepath, params, start, stop,
+                                            u'%s - %s' % (start+1, stop+1)))
+            start = stop + 1
+        w(u'<div class="pagination">')
+        w(u'%s&nbsp;' % self.previous_link(params))
+        w(u'[&nbsp;%s&nbsp;]' % u'&nbsp;| '.join(blocklist))
+        w(u'&nbsp;%s' % self.next_link(params))
+        w(u'</div>')
+
+    
+class SortedNavigation(NavigationComponent):
+    """sorted navigation apply if navigation is needed (according to page size)
+    and if the result set is sorted
+    """
+    __selectors__ = (largerset_selector, sortedrset_selector)
+    
+    # number of considered chars to build page links
+    nb_chars = 5
+    
+    def display_func(self, rset, col, attrname):
+        req = self.req
+        if attrname is not None:
+            def index_display(row):
+                entity = rset.get_entity(row, col)
+                return entity.printable_value(attrname, format='text/plain')
+        elif self.schema.eschema(rset.description[0][col]).is_final():
+            def index_display(row):
+                return unicode(rset[row][col])
+        else:
+            def index_display(row):
+                return rset.get_entity(row, col).view('text')
+        return index_display
+    
+    def call(self):
+        """displays links to navigate accross pages of a result set
+
+        Displayed result is done according to a variable on which the sort
+        is done, and looks like:
+        [ana - cro] | [cro - ghe] | ... | [tim - zou]
+        """
+        w = self.w
+        rset = self.rset
+        page_size = self.page_size
+        rschema = self.schema.rschema
+        # attrname = the name of attribute according to which the sort
+        # is done if any
+        for sorterm in rset.syntax_tree().children[0].orderby:
+            if isinstance(sorterm.term, Constant):
+                col = sorterm.term.value - 1
+                index_display = self.display_func(rset, col, None)
+                break
+            var = sorterm.term.get_nodes(VariableRef)[0].variable
+            col = None
+            for ref in var.references():
+                rel = ref.relation()
+                if rel is None:
+                    continue
+                attrname = rel.r_type
+                if attrname == 'is':
+                    continue
+                if not rschema(attrname).is_final():
+                    col = var.selected_index()
+                    attrname = None
+                if col is None:
+                    # final relation or not selected non final relation
+                    if var is rel.children[0]:
+                        relvar = rel.children[1].children[0].get_nodes(VariableRef)[0]
+                    else:
+                        relvar = rel.children[0].variable
+                    col = relvar.selected_index()
+                if col is not None:
+                    break
+            else:
+                # no relation but maybe usable anyway if selected
+                col = var.selected_index()
+                attrname = None
+            if col is not None:
+                index_display = self.display_func(rset, col, attrname)
+                break
+        else:
+            # nothing usable found, use the first column
+            index_display = self.display_func(rset, 0, None)
+        blocklist = []
+        params = dict(self.req.form)
+        self.clean_params(params)
+        start = 0
+        basepath = self.req.relative_path(includeparams=False)
+        while start < rset.rowcount:
+            stop = min(start + page_size - 1, rset.rowcount - 1)
+            cell = self.format_link_content(index_display(start), index_display(stop))
+            blocklist.append(self.page_link(basepath, params, start, stop, cell))
+            start = stop + 1
+        self.write_links(params, blocklist)
+
+    def format_link_content(self, startstr, stopstr):
+        text = u'%s - %s' % (startstr.lower()[:self.nb_chars],
+                             stopstr.lower()[:self.nb_chars])
+        return html_escape(text)
+
+    def write_links(self, params, blocklist):
+        self.w(u'<div class="pagination">')
+        self.w(u'%s&nbsp;' % self.previous_link(params))
+        self.w(u'[&nbsp;%s&nbsp;]' % u'&nbsp;| '.join(blocklist))
+        self.w(u'&nbsp;%s' % self.next_link(params))
+        self.w(u'</div>')
+
+
+def limit_rset_using_paged_nav(self, req, rset, w, forcedisplay=False, show_all_option=True):
+    showall = forcedisplay or req.form.get('__force_display') is not None
+    nav = not showall and self.vreg.select_component('navigation', req, rset)
+    if nav:
+        # get boundaries before component rendering
+        start, stop = nav.page_boundaries()
+        nav.dispatch(w=w)
+        params = dict(req.form)
+        nav.clean_params(params)
+        # make a link to see them all
+        if show_all_option:
+            url = html_escape(self.build_url(__force_display=1, **params))
+            w(u'<p><a href="%s">%s</a></p>\n'
+              % (url, req._('show %s results') % len(rset)))
+        rset.limit(offset=start, limit=stop-start, inplace=True)
+
+
+# monkey patch base View class to add a .pagination(req, rset, w, forcedisplay)
+# method to be called on view's result set and printing pages index in the view
+from cubicweb.common.view import View
+# XXX deprecated, use paginate
+View.pagination = limit_rset_using_paged_nav
+
+def paginate(view, show_all_option=True, w=None):
+    limit_rset_using_paged_nav(view, view.req, view.rset, w or view.w,
+                               not view.need_navigation, show_all_option)
+View.paginate = paginate
+
+class NextPrevNavigationComponent(EntityVComponent):
+    id = 'prevnext'
+    # register msg not generated since no entity implements IPrevNext in cubicweb
+    # itself
+    title = _('contentnavigation_prevnext')
+    help = _('contentnavigation_prevnext_description')
+    __selectors__ = (onelinerset_selector, primaryview_selector,
+                     contextprop_selector, interface_selector)
+    accepts_interfaces = (IPrevNext,)
+    context = 'navbottom'
+    order = 10
+    def call(self, view=None):
+        entity = self.entity(0)
+        previous = entity.previous_entity()
+        next = entity.next_entity()
+        if previous or next:
+            textsize = self.req.property_value('navigation.short-line-size')
+            self.w(u'<div class="prevnext">')
+            if previous:
+                self.w(u'<div class="previousEntity left">')
+                self.w(self.previous_link(previous, textsize))
+                self.w(u'</div>')
+                self.req.html_headers.add_raw('<link rel="prev" href="%s" />'
+                                              % html_escape(previous.absolute_url()))
+            if next:
+                self.w(u'<div class="nextEntity right">')
+                self.w(self.next_link(next, textsize))
+                self.w(u'</div>')
+                self.req.html_headers.add_raw('<link rel="next" href="%s" />'
+                                              % html_escape(next.absolute_url()))
+            self.w(u'</div>')
+            self.w(u'<div class="clear"></div>')
+
+    def previous_link(self, previous, textsize):
+        return u'<a href="%s" title="%s">&lt;&lt; %s</a>' % (
+            html_escape(previous.absolute_url()),
+            self.req._('i18nprevnext_previous'),
+            html_escape(cut(previous.dc_title(), textsize)))
+    
+    def next_link(self, next, textsize):
+        return u'<a href="%s" title="%s">%s &gt;&gt;</a>' % (
+            html_escape(next.absolute_url()),
+            self.req._('i18nprevnext_next'),
+            html_escape(cut(next.dc_title(), textsize)))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/old_calendar.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,544 @@
+"""html calendar views
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from mx.DateTime import DateTime, RelativeDateTime, Date, Time, today, Sunday
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import ICalendarViews
+from cubicweb.common.utils import date_range
+from cubicweb.common.selectors import interface_selector
+from cubicweb.common.registerers import priority_registerer
+from cubicweb.common.view import EntityView
+
+# Define some useful constants
+ONE_MONTH = RelativeDateTime(months=1)
+TODAY = today()
+THIS_MONTH = TODAY.month
+THIS_YEAR = TODAY.year
+# mx.DateTime and ustrftime could be used to build WEEKDAYS
+WEEKDAYS = [_("monday"), _("tuesday"), _("wednesday"), _("thursday"),
+            _("friday"), _("saturday"), _("sunday")]
+
+# used by i18n tools
+MONTHNAMES = [ _('january'), _('february'), _('march'), _('april'), _('may'),
+               _('june'), _('july'), _('august'), _('september'), _('october'),
+               _('november'), _('december')
+               ]
+
+class _CalendarView(EntityView):
+    """base calendar view containing helpful methods to build calendar views"""
+    __registerer__ = priority_registerer
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ICalendarViews,)
+    need_navigation = False
+
+    # Navigation building methods / views ####################################
+
+    PREV = u'<a href="%s">&lt;&lt;</a>&nbsp;&nbsp;<a href="%s">&lt;</a>'
+    NEXT = u'<a href="%s">&gt;</a>&nbsp;&nbsp;<a href="%s">&gt;&gt;</a>'
+    NAV_HEADER = u"""<table class="calendarPageHeader">
+<tr><td class="prev">%s</td><td class="next">%s</td></tr>
+</table>
+""" % (PREV, NEXT)
+    
+    def nav_header(self, date, smallshift=3, bigshift=9):
+        """prints shortcut links to go to previous/next steps (month|week)"""
+        prev1 = date - RelativeDateTime(months=smallshift)
+        prev2 = date - RelativeDateTime(months=bigshift)
+        next1 = date + RelativeDateTime(months=smallshift)
+        next2 = date + RelativeDateTime(months=bigshift)
+        rql, vid = self.rset.printable_rql(), self.id
+        return self.NAV_HEADER % (
+            html_escape(self.build_url(rql=rql, vid=vid, year=prev2.year, month=prev2.month)),
+            html_escape(self.build_url(rql=rql, vid=vid, year=prev1.year, month=prev1.month)),
+            html_escape(self.build_url(rql=rql, vid=vid, year=next1.year, month=next1.month)),
+            html_escape(self.build_url(rql=rql, vid=vid, year=next2.year, month=next2.month)))
+        
+    
+    # Calendar building methods ##############################################
+    
+    def build_calendars(self, schedule, begin, end):
+        """build several HTML calendars at once, one for each month
+        between begin and end
+        """
+        return [self.build_calendar(schedule, date)
+                for date in date_range(begin, end, incr=ONE_MONTH)]
+    
+    def build_calendar(self, schedule, first_day):
+        """method responsible for building *one* HTML calendar"""
+        # FIXME  iterates between [first_day-first_day.day_of_week ;
+        #                          last_day+6-last_day.day_of_week]
+        umonth = self.format_date(first_day, '%B %Y') # localized month name
+        rows = []
+        current_row = [NO_CELL] * first_day.day_of_week
+        for daynum in xrange(0, first_day.days_in_month):
+            # build cell day
+            day = first_day + daynum
+            events = schedule.get(day)
+            if events:
+                events = [u'\n'.join(event) for event in events.values()]
+                current_row.append(CELL % (daynum+1, '\n'.join(events)))
+            else:
+                current_row.append(EMPTY_CELL % (daynum+1))
+            # store & reset current row on Sundays
+            if day.day_of_week == Sunday:
+                rows.append(u'<tr>%s%s</tr>' % (WEEKNUM_CELL % day.iso_week[1], ''.join(current_row)))
+                current_row = []
+        current_row.extend([NO_CELL] * (Sunday-day.day_of_week))
+        rql = self.rset.printable_rql()
+        if day.day_of_week != Sunday:
+            rows.append(u'<tr>%s%s</tr>' % (WEEKNUM_CELL % day.iso_week[1], ''.join(current_row)))
+        url = self.build_url(rql=rql, vid='calendarmonth',
+                             year=first_day.year, month=first_day.month)
+        monthlink = u'<a href="%s">%s</a>' % (html_escape(url), umonth)
+        return CALENDAR(self.req) % (monthlink, '\n'.join(rows))
+
+    def _mk_schedule(self, begin, end, itemvid='calendaritem'):
+        """private method that gathers information from resultset
+        and builds calendars according to it
+
+        :param begin: begin of date range
+        :param end: end of date rangs
+        :param itemvid: which view to call to render elements in cells
+
+        returns { day1 : { hour : [views] },
+                  day2 : { hour : [views] } ... }
+        """
+        # put this here since all sub views are calling this method        
+        self.req.add_css('cubicweb.calendar.css') 
+        schedule = {}
+        for row in xrange(len(self.rset.rows)):
+            entity = self.entity(row)
+            infos = u'<div class="event">'
+            infos += self.view(itemvid, self.rset, row=row)
+            infos += u'</div>'
+            for date in entity.matching_dates(begin, end):
+                day = Date(date.year, date.month, date.day)
+                time = Time(date.hour, date.minute, date.second) 
+                schedule.setdefault(day, {})
+                schedule[day].setdefault(time, []).append(infos)
+        return schedule
+        
+
+    @staticmethod
+    def get_date_range(day=TODAY, shift=4):
+        """returns a couple (begin, end)
+
+        <begin> is the first day of current_month - shift
+        <end> is the last day of current_month + (shift+1)
+        """
+        first_day_in_month = DateTime(day.year, day.month, 1)
+        begin = first_day_in_month - RelativeDateTime(months=shift)
+        end = (first_day_in_month + RelativeDateTime(months=shift+1)) - 1
+        return begin, end
+
+
+    def _build_ampm_cells(self, daynum, events):
+        """create a view without any hourly details.
+
+        :param daynum: day of the built cell
+        :param events: dictionnary with all events classified by hours"""
+        # split events according am/pm
+        am_events = [event for e_time, e_list in events.iteritems()
+                     if 0 <= e_time.hour < 12
+                     for event in e_list]
+        pm_events = [event for e_time, e_list in events.iteritems()
+                     if 12 <= e_time.hour < 24
+                     for event in e_list]
+        # format each am/pm cell
+        if am_events:
+            am_content = AMPM_CONTENT % ("amCell", "am", '\n'.join(am_events))
+        else:
+            am_content = AMPM_EMPTY % ("amCell", "am")
+        if pm_events:
+            pm_content = AMPM_CONTENT % ("pmCell", "pm", '\n'.join(pm_events))
+        else:
+            pm_content = AMPM_EMPTY % ("pmCell", "pm")
+        return am_content, pm_content
+
+
+
+class YearCalendarView(_CalendarView):
+    id = 'calendaryear'
+    title = _('calendar (year)')
+
+    def call(self, year=THIS_YEAR, month=THIS_MONTH):
+        """this view renders a 3x3 calendars' table"""
+        year = int(self.req.form.get('year', year))
+        month = int(self.req.form.get('month', month))
+        center_date = DateTime(year, month)
+        begin, end = self.get_date_range(day=center_date)
+        schedule = self._mk_schedule(begin, end)
+        self.w(self.nav_header(center_date))
+        calendars = tuple(self.build_calendars(schedule, begin, end))
+        self.w(SMALL_CALENDARS_PAGE % calendars)
+
+
+class SemesterCalendarView(_CalendarView):
+    """this view renders three semesters as three rows of six columns,
+    one column per month
+    """
+    id = 'calendarsemester'
+    title = _('calendar (semester)')
+
+    def call(self, year=THIS_YEAR, month=THIS_MONTH):
+        year = int(self.req.form.get('year', year))
+        month = int(self.req.form.get('month', month))
+        begin = DateTime(year, month) - RelativeDateTime(months=2)
+        end = DateTime(year, month) + RelativeDateTime(months=3)
+        schedule = self._mk_schedule(begin, end)
+        self.w(self.nav_header(DateTime(year, month), 1, 6))
+        self.w(u'<table class="semesterCalendar">')
+        self.build_calendars(schedule, begin, end)
+        self.w(u'</table>')
+        self.w(self.nav_header(DateTime(year, month), 1, 6))
+
+    def build_calendars(self, schedule, begin, end):
+        self.w(u'<tr>')
+        rql = self.rset.printable_rql()
+        for cur_month in date_range(begin, end, incr=ONE_MONTH):
+            umonth = u'%s&nbsp;%s' % (self.format_date(cur_month, '%B'), cur_month.year)
+            url = self.build_url(rql=rql, vid=self.id,
+                                 year=cur_month.year, month=cur_month.month)
+            self.w(u'<th colspan="2"><a href="%s">%s</a></th>' % (html_escape(url),
+                                                                  umonth))
+        self.w(u'</tr>')
+        _ = self.req._
+        for day_num in xrange(31):
+            self.w(u'<tr>')
+            for cur_month in date_range(begin, end, incr=ONE_MONTH):
+                if day_num >= cur_month.days_in_month:
+                    self.w(u'%s%s' % (NO_CELL, NO_CELL))
+                else:
+                    day = DateTime(cur_month.year, cur_month.month, day_num+1)
+                    events = schedule.get(day)
+                    self.w(u'<td>%s&nbsp;%s</td>\n' % (_(WEEKDAYS[day.day_of_week])[0].upper(), day_num+1))
+                    self.format_day_events(day, events)
+            self.w(u'</tr>')
+            
+    def format_day_events(self, day, events):
+        if events:
+            events = ['\n'.join(event) for event in events.values()]
+            self.w(WEEK_CELL % '\n'.join(events))
+        else:
+            self.w(WEEK_EMPTY_CELL)
+        
+
+class MonthCalendarView(_CalendarView):
+    """this view renders a 3x1 calendars' table"""
+    id = 'calendarmonth'
+    title = _('calendar (month)')
+    
+    def call(self, year=THIS_YEAR, month=THIS_MONTH):
+        year = int(self.req.form.get('year', year))
+        month = int(self.req.form.get('month', month))
+        center_date = DateTime(year, month)
+        begin, end = self.get_date_range(day=center_date, shift=1)
+        schedule = self._mk_schedule(begin, end)
+        calendars = self.build_calendars(schedule, begin, end)
+        self.w(self.nav_header(center_date, 1, 3))
+        self.w(BIG_CALENDARS_PAGE % tuple(calendars))
+        self.w(self.nav_header(center_date, 1, 3))
+
+        
+class WeekCalendarView(_CalendarView):
+    """this view renders a calendar for week events"""
+    id = 'calendarweek'
+    title = _('calendar (week)')
+    
+    def call(self, year=THIS_YEAR, week=TODAY.iso_week[1]):
+        year = int(self.req.form.get('year', year))
+        week = int(self.req.form.get('week', week))
+        day0 = DateTime(year)
+        first_day_of_week = (day0-day0.day_of_week) + 7*week
+        begin, end = first_day_of_week-7, first_day_of_week+14
+        schedule = self._mk_schedule(begin, end, itemvid='calendarlargeitem')
+        self.w(self.nav_header(first_day_of_week))
+        self.w(u'<table class="weekCalendar">')
+        _weeks = [(first_day_of_week-7, first_day_of_week-1),
+                  (first_day_of_week, first_day_of_week+6),
+                  (first_day_of_week+7, first_day_of_week+13)]
+        self.build_calendar(schedule, _weeks)
+        self.w(u'</table>')
+        self.w(self.nav_header(first_day_of_week))
+ 
+    def build_calendar(self, schedule, weeks):
+        rql = self.rset.printable_rql()
+        _ = self.req._
+        for monday, sunday in weeks:            
+            umonth = self.format_date(monday, '%B %Y')
+            url = self.build_url(rql=rql, vid='calendarmonth',
+                                 year=monday.year, month=monday.month)
+            monthlink = '<a href="%s">%s</a>' % (html_escape(url), umonth)
+            self.w(u'<tr><th colspan="3">%s %s (%s)</th></tr>' \
+                  % (_('week'), monday.iso_week[1], monthlink))
+            for day in date_range(monday, sunday):
+                self.w(u'<tr>')
+                self.w(u'<td>%s</td>' % _(WEEKDAYS[day.day_of_week]))
+                self.w(u'<td>%s</td>' % (day.strftime('%Y-%m-%d')))
+                events = schedule.get(day)
+                if events:
+                    events = ['\n'.join(event) for event in events.values()]
+                    self.w(WEEK_CELL % '\n'.join(events))
+                else:
+                    self.w(WEEK_EMPTY_CELL)
+                self.w(u'</tr>')
+        
+    def nav_header(self, date, smallshift=1, bigshift=3):
+        """prints shortcut links to go to previous/next steps (month|week)"""
+        prev1 = date - RelativeDateTime(weeks=smallshift)
+        prev2 = date - RelativeDateTime(weeks=bigshift)
+        next1 = date + RelativeDateTime(weeks=smallshift)
+        next2 = date + RelativeDateTime(weeks=bigshift)
+        rql, vid = self.rset.printable_rql(), self.id
+        return self.NAV_HEADER % (
+            html_escape(self.build_url(rql=rql, vid=vid, year=prev2.year, week=prev2.iso_week[1])),
+            html_escape(self.build_url(rql=rql, vid=vid, year=prev1.year, week=prev1.iso_week[1])),
+            html_escape(self.build_url(rql=rql, vid=vid, year=next1.year, week=next1.iso_week[1])),
+            html_escape(self.build_url(rql=rql, vid=vid, year=next2.year, week=next2.iso_week[1])))
+
+
+        
+class AMPMYearCalendarView(YearCalendarView):
+    id = 'ampmcalendaryear'
+    title = _('am/pm calendar (year)')
+    
+    def build_calendar(self, schedule, first_day):
+        """method responsible for building *one* HTML calendar"""
+        umonth = self.format_date(first_day, '%B %Y') # localized month name
+        rows = [] # each row is: (am,pm), (am,pm) ... week_title
+        current_row = [(NO_CELL, NO_CELL, NO_CELL)] * first_day.day_of_week
+        rql = self.rset.printable_rql()
+        for daynum in xrange(0, first_day.days_in_month):
+            # build cells day
+            day = first_day + daynum
+            events = schedule.get(day)
+            if events:
+                current_row.append((AMPM_DAY % (daynum+1),) + self._build_ampm_cells(daynum, events))
+            else:
+                current_row.append((AMPM_DAY % (daynum+1),
+                                    AMPM_EMPTY % ("amCell", "am"),
+                                    AMPM_EMPTY % ("pmCell", "pm")))
+            # store & reset current row on Sundays
+            if day.day_of_week == Sunday:
+                url = self.build_url(rql=rql, vid='ampmcalendarweek',
+                                     year=day.year, week=day.iso_week[1])
+                weeklink = '<a href="%s">%s</a>' % (html_escape(url),
+                                                    day.iso_week[1])
+                current_row.append(WEEKNUM_CELL % weeklink)
+                rows.append(current_row)
+                current_row = []
+        current_row.extend([(NO_CELL, NO_CELL, NO_CELL)] * (Sunday-day.day_of_week))
+        url = self.build_url(rql=rql, vid='ampmcalendarweek',
+                             year=day.year, week=day.iso_week[1])
+        weeklink = '<a href="%s">%s</a>' % (html_escape(url), day.iso_week[1])
+        current_row.append(WEEKNUM_CELL % weeklink)
+        rows.append(current_row)
+        # build two rows for each week: am & pm
+        formatted_rows = []
+        for row in rows:
+            week_title = row.pop()
+            day_row = [day for day, am, pm in row]
+            am_row = [am for day, am, pm in row]
+            pm_row = [pm for day, am, pm in row]
+            formatted_rows.append('<tr>%s%s</tr>'% (week_title, '\n'.join(day_row)))
+            formatted_rows.append('<tr class="amRow"><td>&nbsp;</td>%s</tr>'% '\n'.join(am_row))
+            formatted_rows.append('<tr class="pmRow"><td>&nbsp;</td>%s</tr>'% '\n'.join(pm_row))
+        # tigh everything together
+        url = self.build_url(rql=rql, vid='ampmcalendarmonth',
+                             year=first_day.year, month=first_day.month)
+        monthlink = '<a href="%s">%s</a>' % (html_escape(url), umonth)
+        return CALENDAR(self.req) % (monthlink, '\n'.join(formatted_rows))
+        
+
+
+class AMPMSemesterCalendarView(SemesterCalendarView):
+    """this view renders a 3x1 calendars' table"""
+    id = 'ampmcalendarsemester'
+    title = _('am/pm calendar (semester)')
+
+    def build_calendars(self, schedule, begin, end):
+        self.w(u'<tr>')
+        rql = self.rset.printable_rql()
+        for cur_month in date_range(begin, end, incr=ONE_MONTH):
+            umonth = u'%s&nbsp;%s' % (self.format_date(cur_month, '%B'), cur_month.year)
+            url = self.build_url(rql=rql, vid=self.id,
+                                 year=cur_month.year, month=cur_month.month)
+            self.w(u'<th colspan="3"><a href="%s">%s</a></th>' % (html_escape(url),
+                                                                  umonth))
+        self.w(u'</tr>')
+        _ = self.req._
+        for day_num in xrange(31):
+            self.w(u'<tr>')
+            for cur_month in date_range(begin, end, incr=ONE_MONTH):
+                if day_num >= cur_month.days_in_month:
+                    self.w(u'%s%s%s' % (NO_CELL, NO_CELL, NO_CELL))
+                else:
+                    day = DateTime(cur_month.year, cur_month.month, day_num+1)
+                    events = schedule.get(day)
+                    self.w(u'<td>%s&nbsp;%s</td>\n' % (_(WEEKDAYS[day.day_of_week])[0].upper(),
+                                                       day_num+1))
+                    self.format_day_events(day, events)
+            self.w(u'</tr>')
+    
+    def format_day_events(self, day, events):
+        if events:
+            self.w(u'\n'.join(self._build_ampm_cells(day, events)))
+        else:
+            self.w(u'%s %s'% (AMPM_EMPTY % ("amCell", "am"), 
+                              AMPM_EMPTY % ("pmCell", "pm")))
+
+
+class AMPMMonthCalendarView(MonthCalendarView):
+    """this view renders a 3x1 calendars' table"""
+    id = 'ampmcalendarmonth'
+    title = _('am/pm calendar (month)')
+
+    def build_calendar(self, schedule, first_day):
+        """method responsible for building *one* HTML calendar"""
+        umonth = self.format_date(first_day, '%B %Y') # localized month name
+        rows = [] # each row is: (am,pm), (am,pm) ... week_title
+        current_row = [(NO_CELL, NO_CELL, NO_CELL)] * first_day.day_of_week
+        rql = self.rset.printable_rql()
+        for daynum in xrange(0, first_day.days_in_month):
+            # build cells day
+            day = first_day + daynum
+            events = schedule.get(day)
+            if events:
+                current_row.append((AMPM_DAY % (daynum+1),) + self._build_ampm_cells(daynum, events))
+            else:
+                current_row.append((AMPM_DAY % (daynum+1),
+                                    AMPM_EMPTY % ("amCell", "am"),
+                                    AMPM_EMPTY % ("pmCell", "pm")))
+            # store & reset current row on Sundays
+            if day.day_of_week == Sunday:
+                url = self.build_url(rql=rql, vid='ampmcalendarweek',
+                                     year=day.year, week=day.iso_week[1])
+                weeklink = '<a href="%s">%s</a>' % (html_escape(url),
+                                                    day.iso_week[1])
+                current_row.append(WEEKNUM_CELL % weeklink)
+                rows.append(current_row)
+                current_row = []
+        current_row.extend([(NO_CELL, NO_CELL, NO_CELL)] * (Sunday-day.day_of_week))
+        url = self.build_url(rql=rql, vid='ampmcalendarweek',
+                             year=day.year, week=day.iso_week[1])
+        weeklink = '<a href="%s">%s</a>' % (html_escape(url),
+                                            day.iso_week[1])
+        current_row.append(WEEKNUM_CELL % weeklink)
+        rows.append(current_row)
+        # build two rows for each week: am & pm
+        formatted_rows = []
+        for row in rows:
+            week_title = row.pop()
+            day_row = [day for day, am, pm in row]
+            am_row = [am for day, am, pm in row]
+            pm_row = [pm for day, am, pm in row]
+            formatted_rows.append('<tr>%s%s</tr>'% (week_title, '\n'.join(day_row)))
+            formatted_rows.append('<tr class="amRow"><td>&nbsp;</td>%s</tr>'% '\n'.join(am_row))
+            formatted_rows.append('<tr class="pmRow"><td>&nbsp;</td>%s</tr>'% '\n'.join(pm_row))
+        # tigh everything together
+        url = self.build_url(rql=rql, vid='ampmcalendarmonth',
+                             year=first_day.year, month=first_day.month)
+        monthlink = '<a href="%s">%s</a>' % (html_escape(url),
+                                             umonth)
+        return CALENDAR(self.req) % (monthlink, '\n'.join(formatted_rows))      
+    
+
+    
+class AMPMWeekCalendarView(WeekCalendarView):
+    """this view renders a 3x1 calendars' table"""
+    id = 'ampmcalendarweek'
+    title = _('am/pm calendar (week)')
+
+    def build_calendar(self, schedule, weeks):
+        rql = self.rset.printable_rql()
+        w = self.w
+        _ = self.req._
+        for monday, sunday in weeks:
+            umonth = self.format_date(monday, '%B %Y')
+            url = self.build_url(rql=rql, vid='ampmcalendarmonth',
+                                 year=monday.year, month=monday.month)
+            monthlink = '<a href="%s">%s</a>' % (html_escape(url), umonth)
+            w(u'<tr>%s</tr>' % (
+                WEEK_TITLE % (_('week'), monday.iso_week[1], monthlink)))
+            w(u'<tr><th>%s</th><th>&nbsp;</th></tr>'% _(u'Date'))
+            for day in date_range(monday, sunday):
+                events = schedule.get(day)
+                style = day.day_of_week % 2 and "even" or "odd"
+                w(u'<tr class="%s">' % style)
+                if events:
+                    hours = events.keys()
+                    hours.sort()
+                    w(AMPM_DAYWEEK % (
+                        len(hours), _(WEEKDAYS[day.day_of_week]),
+                        self.format_date(day)))
+                    w(AMPM_WEEK_CELL % (
+                        hours[0].hour, hours[0].minute,
+                        '\n'.join(events[hours[0]])))
+                    w(u'</tr>')
+                    for hour in hours[1:]:
+                        w(u'<tr class="%s">%s</tr>'% (
+                            style, AMPM_WEEK_CELL % (hour.hour, hour.minute,
+                                                     '\n'.join(events[hour]))))
+                else:
+                    w(AMPM_DAYWEEK_EMPTY % (
+                        _(WEEKDAYS[day.day_of_week]),
+                        self.format_date(day)))
+                    w(WEEK_EMPTY_CELL)
+                    w(u'</tr>')
+
+
+SMALL_CALENDARS_PAGE = u"""<table class="smallCalendars">
+<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
+<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
+<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
+</table>
+"""
+
+BIG_CALENDARS_PAGE = u"""<table class="bigCalendars">
+<tr><td class="calendar">%s</td></tr>
+<tr><td class="calendar">%s</td></tr>
+<tr><td class="calendar">%s</td></tr>
+</table>
+"""
+
+WEEKNUM_CELL = u'<td class="weeknum">%s</td>'
+
+def CALENDAR(req):
+    _ = req._
+    WEEKNUM_HEADER = u'<th class="weeknum">%s</th>' % _('week')
+    CAL_HEADER = WEEKNUM_HEADER + u' \n'.join([u'<th class="weekday">%s</th>' % _(day)[0].upper()
+                                               for day in WEEKDAYS])
+    return u"""<table>
+<tr><th class="month" colspan="8">%%s</th></tr>
+<tr>
+  %s
+</tr>
+%%s
+</table>
+""" % (CAL_HEADER,)
+
+
+DAY_TEMPLATE = """<tr><td class="weekday">%(daylabel)s</td><td>%(dmydate)s</td><td>%(dayschedule)s</td>
+"""
+
+NO_CELL = u'<td class="noday"></td>'
+EMPTY_CELL = u'<td class="cellEmpty"><span class="cellTitle">%s</span></td>'
+CELL = u'<td class="cell"><span class="cellTitle">%s</span><div class="cellContent">%s</div></td>'
+
+AMPM_DAY = u'<td class="cellDay">%d</td>'
+AMPM_EMPTY = u'<td class="%sEmpty"><span class="cellTitle">%s</span></td>'
+AMPM_CONTENT = u'<td class="%s"><span class="cellTitle">%s</span><div class="cellContent">%s</div></td>'
+
+WEEK_TITLE = u'<th class="weekTitle" colspan="2">%s %s (%s)</th>'
+WEEK_EMPTY_CELL = u'<td class="weekEmptyCell">&nbsp;</td>'
+WEEK_CELL = u'<td class="weekCell"><div class="cellContent">%s</div></td>'
+
+AMPM_DAYWEEK_EMPTY = u'<td>%s&nbsp;%s</td>'
+AMPM_DAYWEEK = u'<td rowspan="%d">%s&nbsp;%s</td>'
+AMPM_WEEK_CELL = u'<td class="ampmWeekCell"><div class="cellContent">%02d:%02d - %s</div></td>'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/plots.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,103 @@
+import os
+
+from logilab.common import flatten
+
+from cubicweb.web.views import baseviews
+
+def plot_selector(cls, req, rset, *args, **kwargs):
+    """accept result set with at least one line and two columns of result
+    all columns after second must be of numerical types"""
+    if rset is None:
+        return 0
+    if not len(rset):
+        return 0
+    if len(rset.rows[0]) < 2:
+        return 0
+    for etype in rset.description[0]:
+        if etype not in ('Int', 'Float'):
+            return 0
+    return 1
+
+try:
+    import matplotlib
+    import sys
+    if 'matplotlib.backends' not in sys.modules:
+        matplotlib.use('Agg')
+    from matplotlib.ticker import FormatStrFormatter
+    from pylab import figure, show
+except ImportError:
+    pass
+else:
+    class PlotView(baseviews.AnyRsetView):
+        id = 'plot'
+        title = _('generic plot')
+        binary = True
+        content_type = 'image/png'
+        _plot_count = 0
+        __selectors__ = (plot_selector,)
+
+        def call(self, width=None, height=None):
+            # compute dimensions
+            if width is None:
+                if 'width' in self.req.form:
+                    width = int(self.req.form['width'])
+                else:
+                    width = 500
+
+            if height is None:
+                if 'height' in self.req.form:
+                    height = int(self.req.form['height'])
+                else:
+                    height = 400
+            dpi = 100.
+
+            # compute data
+            abscisses = [row[0] for row in self.rset]
+            courbes = []
+            nbcols = len(self.rset.rows[0])
+            for col in range(1,nbcols):
+                courbe = [row[col] for row in self.rset]
+                courbes.append(courbe)
+            if not courbes:
+                raise Exception('no data')
+            # plot data
+            fig = figure(figsize=(width/dpi, height/dpi), dpi=dpi)
+            ax = fig.add_subplot(111)
+            colors = 'brgybrgy'
+            try:
+                float(abscisses[0])
+                xlabels = None
+            except ValueError:
+                xlabels = abscisses
+                abscisses = range(len(xlabels))
+            for idx,courbe in enumerate(courbes):
+                ax.plot(abscisses, courbe, '%sv-' % colors[idx], label=self.rset.description[0][idx+1])
+            ax.autoscale_view()
+            alldata = flatten(courbes)
+            m, M = min(alldata or [0]), max(alldata or [1])
+            if m is None: m = 0
+            if M is None: M = 0
+            margin = float(M-m)/10
+            ax.set_ylim(m-margin, M+margin)
+            ax.grid(True)
+            ax.legend(loc='best')
+            if xlabels is not None:
+                ax.set_xticks(abscisses)
+                ax.set_xticklabels(xlabels)
+            try:
+                fig.autofmt_xdate()
+            except AttributeError:
+                # XXX too old version of matplotlib. Ignore safely.
+                pass
+
+            # save plot
+            filename = self.build_figname()
+            fig.savefig(filename, dpi=100)
+            img = open(filename, 'rb')
+            self.w(img.read())
+            img.close()
+            os.remove(filename)
+
+        def build_figname(self):
+            self.__class__._plot_count += 1
+            return '/tmp/burndown_chart_%s_%d.png' % (self.config.appid, self.__class__._plot_count)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/schemaentities.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,123 @@
+"""Specific views for schema related entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.schemaviewer import SchemaViewer
+from cubicweb.common.uilib import ureport_as_html
+from cubicweb.common.view import EntityView
+from cubicweb.web.views import baseviews
+
+
+class ImageView(EntityView):
+    accepts = ('EEType',)
+    id = 'image'
+    title = _('image')
+
+    def cell_call(self, row, col):
+        entity = self.entity(row, col)
+        url = entity.absolute_url(vid='eschemagraph')
+        self.w(u'<img src="%s" alt="%s"/>' % (
+            html_escape(url),
+            html_escape(self.req._('graphical schema for %s') % entity.name)))
+
+
+class _SchemaEntityPrimaryView(baseviews.PrimaryView):
+    show_attr_label = False
+    cache_max_age = 60*60*2 # stay in http cache for 2 hours by default 
+    
+    def content_title(self, entity):
+        return html_escape(entity.dc_long_title())
+    
+class EETypePrimaryView(_SchemaEntityPrimaryView):
+    accepts = ('EEType',)
+    skip_attrs = _SchemaEntityPrimaryView.skip_attrs + ('name', 'meta', 'final')
+
+class ERTypePrimaryView(_SchemaEntityPrimaryView):
+    accepts = ('ERType',)
+    skip_attrs = _SchemaEntityPrimaryView.skip_attrs + ('name', 'meta', 'final',
+                                                        'symetric', 'inlined')
+
+class ErdefPrimaryView(_SchemaEntityPrimaryView):
+    accepts = ('EFRDef', 'ENFRDef')
+    show_attr_label = True
+
+class EETypeSchemaView(EETypePrimaryView):
+    id = 'eschema'
+    title = _('in memory entity schema')
+    main_related_section = False
+    skip_rels = ('is', 'is_instance_of', 'identity', 'created_by', 'owned_by',
+                 'has_text',)
+    
+    def render_entity_attributes(self, entity, siderelations):
+        super(EETypeSchemaView, self).render_entity_attributes(entity, siderelations)
+        eschema = self.vreg.schema.eschema(entity.name)
+        viewer = SchemaViewer(self.req)
+        layout = viewer.visit_entityschema(eschema, skiprels=self.skip_rels)
+        self.w(ureport_as_html(layout))
+        if not eschema.is_final():
+            self.w(u'<img src="%s" alt="%s"/>' % (
+                html_escape(entity.absolute_url(vid='eschemagraph')),
+                html_escape(self.req._('graphical schema for %s') % entity.name)))
+
+class ERTypeSchemaView(ERTypePrimaryView):
+    id = 'eschema'
+    title = _('in memory relation schema')
+    main_related_section = False
+
+    def render_entity_attributes(self, entity, siderelations):
+        super(ERTypeSchemaView, self).render_entity_attributes(entity, siderelations)
+        rschema = self.vreg.schema.rschema(entity.name)
+        viewer = SchemaViewer(self.req)
+        layout = viewer.visit_relationschema(rschema)
+        self.w(ureport_as_html(layout))
+        if not rschema.is_final():
+            self.w(u'<img src="%s" alt="%s"/>' % (
+                html_escape(entity.absolute_url(vid='eschemagraph')),
+                html_escape(self.req._('graphical schema for %s') % entity.name)))
+
+        
+class EETypeWorkflowView(EntityView):
+    id = 'workflow'
+    accepts = ('EEType',)
+    cache_max_age = 60*60*2 # stay in http cache for 2 hours by default 
+    
+    def cell_call(self, row, col, **kwargs):
+        entity = self.entity(row, col)
+        self.w(u'<h1>%s</h1>' % (self.req._('workflow for %s')
+                                 % display_name(self.req, entity.name)))
+        self.w(u'<img src="%s" alt="%s"/>' % (
+            html_escape(entity.absolute_url(vid='ewfgraph')),
+            html_escape(self.req._('graphical workflow for %s') % entity.name)))
+
+
+class EETypeOneLineView(baseviews.OneLineView):
+    accepts = ('EEType',)
+    
+    def cell_call(self, row, col, **kwargs):
+        entity = self.entity(row, col)
+        final = entity.final
+        if final:
+            self.w(u'<em class="finalentity">')
+        super(EETypeOneLineView, self).cell_call(row, col, **kwargs)
+        if final:
+            self.w(u'</em>')
+        
+
+from cubicweb.web.action import EntityAction
+
+class ViewWorkflowAction(EntityAction):
+    id = 'workflow'
+    category = 'mainactions'
+    title = _('view workflow')
+    accepts = ('EEType',)
+    condition = 'S state_of X' # must have at least one state associated
+    def url(self):
+        entity = self.rset.get_entity(self.row or 0, self.col or 0)
+        return entity.absolute_url(vid='workflow')
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/searchrestriction.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,38 @@
+"""contains utility functions and some visual component to restrict results of
+a search
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from simplejson import dumps
+
+from logilab.common.graph import has_path
+from logilab.common.decorators import cached
+from logilab.common.compat import all
+
+from logilab.mtconverter import html_escape
+
+from rql import nodes
+
+
+
+from cubicweb.web.facet import (VocabularyFacet, prepare_facets_rqlst)
+
+
+"""Set of base controllers, which are directly plugged into the application
+object to handle publication.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.deprecation import moved
+
+insert_attr_select_relation = moved('cubicweb.web.facet',
+                                    'insert_attr_select_relation')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/sessions.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,69 @@
+"""web session component: by dfault the session is actually the db connection
+object :/
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web import ExplicitLogin, InvalidSession
+from cubicweb.web.application import AbstractSessionManager
+
+
+class InMemoryRepositorySessionManager(AbstractSessionManager):
+    """manage session data associated to a session identifier"""
+    
+    def __init__(self):
+        AbstractSessionManager.__init__(self)
+        # XXX require a RepositoryAuthenticationManager which violates
+        #     authenticate interface by returning a session instead of a user
+        #assert isinstance(self.authmanager, RepositoryAuthenticationManager)
+        self._sessions = {}
+
+    def current_sessions(self):
+        return self._sessions.values()
+    
+    def get_session(self, req, sessionid):
+        """return existing session for the given session identifier"""
+        if not sessionid in self._sessions:
+            raise InvalidSession()
+        session = self._sessions[sessionid]
+        if self.has_expired(session):
+            self.close_session(session)
+            raise InvalidSession()
+        # give an opportunity to auth manager to hijack the session
+        # (necessary with the RepositoryAuthenticationManager in case
+        #  the connection to the repository has expired)
+        try:
+            session = self.authmanager.validate_session(req, session)
+            # necessary in case session has been hijacked
+            self._sessions[session.sessionid] = session
+        except InvalidSession:
+            # invalid session
+            del self._sessions[sessionid]
+            raise
+        return session
+
+    def open_session(self, req):
+        """open and return a new session for the given request
+        
+        :raise ExplicitLogin: if authentication is required
+        """
+        session = self.authmanager.authenticate(req)
+        self._sessions[session.sessionid] = session
+        return session
+    
+    def close_session(self, session):
+        """close session on logout or on invalid session detected (expired out,
+        corrupted...)
+        """
+        self.info('closing http session %s' % session)
+        del self._sessions[session.sessionid]
+        try:
+            session.close()
+        except:
+            # already closed, may occurs if the repository session expired but
+            # not the web session
+            pass
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/startup.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,192 @@
+"""Set of HTML startup views. A startup view is global, e.g. doesn't
+apply to a result set.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.uilib import ureport_as_html, unormalize, ajax_replace_url
+from cubicweb.common.view import StartupView
+from cubicweb.web.httpcache import EtagHTTPCacheManager
+
+_ = unicode
+
+
+class ManageView(StartupView):
+    id = 'manage'
+    title = _('manage')    
+    http_cache_manager = EtagHTTPCacheManager
+
+    def display_folders(self):
+        return False
+    
+    def call(self, **kwargs):
+        """The default view representing the application's management"""
+        self.w(u'<div>\n')
+        if not self.display_folders():
+            self._main_index()
+        else:
+            self.w(u'<table><tr>\n')
+            self.w(u'<td style="width:40%">')
+            self._main_index()
+            self.w(u'</td><td style="width:60%">')            
+            self.folders()
+            self.w(u'</td>')
+            self.w(u'</tr></table>\n')
+        self.w(u'</div>\n')
+
+    def _main_index(self):
+        req = self.req
+        manager = req.user.matching_groups('managers')
+        if not manager and 'Card' in self.schema:
+            rset = self.req.execute('Card X WHERE X wikiid "index"')
+        else:
+            rset = None
+        if rset:
+            self.wview('inlined', rset, row=0)
+        else:
+            self.entities()
+            self.w(u'<div class="hr">&nbsp;</div>')
+            self.startup_views()
+        if manager and 'Card' in self.schema:
+            self.w(u'<div class="hr">&nbsp;</div>')
+            if rset:
+                href = rset.get_entity(0, 0).absolute_url(vid='edition')
+                label = self.req._('edit the index page')
+            else:
+                href = req.build_url('view', vid='creation', etype='Card', wikiid='index')
+                label = self.req._('create an index page')
+            self.w(u'<br/><a href="%s">%s</a>\n' % (html_escape(href), label))
+        
+    def folders(self):
+        self.w(u'<h4>%s</h4>\n' % self.req._('Browse by category'))
+        self.vreg.select_view('tree', self.req, None).dispatch(w=self.w)
+        
+    def startup_views(self):
+        self.w(u'<h4>%s</h4>\n' % self.req._('Startup views'))
+        self.startupviews_table()
+        
+    def startupviews_table(self):
+        for v in self.vreg.possible_views(self.req, None):
+            if v.category != 'startupview' or v.id in ('index', 'tree', 'manage'):
+                continue
+            self.w('<p><a href="%s">%s</a></p>' % (
+                html_escape(v.url()), html_escape(self.req._(v.title).capitalize())))
+        
+    def entities(self):
+        schema = self.schema
+        self.w(u'<h4>%s</h4>\n' % self.req._('The repository holds the following entities'))
+        manager = self.req.user.matching_groups('managers')
+        self.w(u'<table class="startup">')
+        if manager:
+            self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self.req._('application entities'))
+        self.entity_types_table(eschema for eschema in schema.entities()
+                                if not eschema.meta and not eschema.is_subobject())
+        if manager: 
+            self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self.req._('system entities'))
+            self.entity_types_table(eschema for eschema in schema.entities()
+                                    if eschema.meta and not eschema.schema_entity())
+            if 'EFRDef' in schema: # check schema support
+                self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self.req._('schema entities'))
+                self.entity_types_table(schema.eschema(etype)
+                                        for etype in schema.schema_entity_types())
+        self.w(u'</table>')
+        
+    def entity_types_table(self, eschemas):
+        newline = 0
+        infos = sorted(self.entity_types(eschemas),
+                       key=lambda (l,a,e):unormalize(l))
+        q, r = divmod(len(infos), 2)
+        if r:
+            infos.append( (None, '&nbsp;', '&nbsp;') )
+        infos = zip(infos[:q+r], infos[q+r:])
+        for (_, etypelink, addlink), (_, etypelink2, addlink2) in infos:
+            self.w(u'<tr>\n')
+            self.w(u'<td class="addcol">%s</td><td>%s</td>\n' % (addlink,  etypelink))
+            self.w(u'<td class="addcol">%s</td><td>%s</td>\n' % (addlink2, etypelink2))
+            self.w(u'</tr>\n')
+        
+        
+    def entity_types(self, eschemas):
+        """return a list of formatted links to get a list of entities of
+        a each entity's types
+        """
+        req = self.req
+        for eschema in eschemas:
+            if eschema.is_final() or (not eschema.has_perm(req, 'read') and
+                                      not eschema.has_local_role('read')):
+                continue
+            etype = eschema.type
+            label = display_name(req, etype, 'plural')
+            nb = req.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0]
+            if nb > 1:
+                view = self.vreg.select_view('list', req, req.etype_rset(etype))
+                url = view.url()
+            else:
+                url = self.build_url('view', rql='%s X' % etype)
+            etypelink = u'&nbsp;<a href="%s">%s</a> (%d)' % (
+                html_escape(url), label, nb)
+            yield (label, etypelink, self.add_entity_link(eschema, req))
+    
+    def add_entity_link(self, eschema, req):
+        """creates a [+] link for adding an entity if user has permission to do so"""
+        if not eschema.has_perm(req, 'add'):
+            return u''
+        return u'[<a href="%s" title="%s">+</a>]' % (
+            html_escape(self.create_url(eschema.type)),
+            self.req.__('add a %s' % eschema))
+
+    
+class IndexView(ManageView):
+    id = 'index'
+    title = _('index')
+    
+    def display_folders(self):
+        return 'Folder' in self.schema
+    
+
+
+class SchemaView(StartupView):
+    id = 'schema'
+    title = _('application schema')
+
+    def call(self):
+        """display schema information"""
+        self.req.add_js('cubicweb.ajax.js')
+        self.req.add_css('cubicweb.schema.css')
+        withmeta = int(self.req.form.get('withmeta', 0))
+        self.w(u'<img src="%s" alt="%s"/>\n' % (
+            html_escape(self.req.build_url('view', vid='schemagraph', withmeta=withmeta)),
+            self.req._("graphical representation of the application'schema")))
+        if withmeta:
+            self.w(u'<div><a href="%s">%s</a></div>' % (
+                self.build_url('schema', withmeta=0),
+                self.req._('hide meta-data')))
+        else:
+            self.w(u'<div><a href="%s">%s</a></div>' % (
+                self.build_url('schema', withmeta=1),
+                self.req._('show meta-data')))
+        self.w(u'<div id="detailed_schema"><a href="%s">%s</a></div>' %
+               (html_escape(ajax_replace_url('detailed_schema', '', 'schematext',
+                                             skipmeta=int(not withmeta))),
+                self.req._('detailed schema view')))
+
+
+class SchemaUreportsView(StartupView):
+    id = 'schematext'
+
+    def call(self):
+        from cubicweb.schemaviewer import SchemaViewer
+        skipmeta = int(self.req.form.get('skipmeta', True))
+        schema = self.schema
+        viewer = SchemaViewer(self.req)
+        layout = viewer.visit_schema(schema, display_relations=True,
+                                     skiprels=('is', 'is_instance_of', 'identity',
+                                               'owned_by', 'created_by'),
+                                     skipmeta=skipmeta)
+        self.w(ureport_as_html(layout))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/tableview.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,340 @@
+"""generic table view, including filtering abilities
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from simplejson import dumps
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.utils import make_uid
+from cubicweb.common.uilib import toggle_action, limitsize, jsonize, htmlescape
+from cubicweb.common.view import EntityView, AnyRsetView
+from cubicweb.common.selectors import (anyrset_selector,  req_form_params_selector,
+                                    accept_rset_selector)
+from cubicweb.web.htmlwidgets import (TableWidget, TableColumn, MenuWidget,
+                                   PopupBoxMenu, BoxLink)
+from cubicweb.web.facet import prepare_facets_rqlst, filter_hiddens
+
+class TableView(AnyRsetView):
+    id = 'table'
+    title = _('table')
+    finalview = 'final'
+    
+    def generate_form(self, divid, baserql, facets, hidden=True, vidargs={}):
+        """display a form to filter table's content. This should only
+        occurs when a context eid is given
+        """
+        self.req.add_js( ('cubicweb.ajax.js', 'cubicweb.formfilter.js'))
+        # drop False / None values from vidargs
+        vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
+        self.w(u'<form method="post" cubicweb:facetargs="%s" action="">' %
+               html_escape(dumps([divid, 'table', False, vidargs])))
+        self.w(u'<fieldset id="%sForm" class="%s">' % (divid, hidden and 'hidden' or ''))
+        self.w(u'<input type="hidden" name="divid" value="%s" />' % divid)
+        filter_hiddens(self.w, facets=','.join(facet.id for facet in facets), baserql=baserql)
+        self.w(u'<table class="filter">\n')
+        self.w(u'<tr>\n')
+        for facet in facets:
+            wdg = facet.get_widget()
+            print 'FACT WIDGET', wdg
+            if wdg is not None:
+                self.w(u'<td>')
+                wdg.render(w=self.w)
+                self.w(u'</td>\n')
+        self.w(u'</tr>\n')
+        self.w(u'</table>\n')
+        self.w(u'</fieldset>\n')
+        self.w(u'</form>\n')
+
+    def main_var_index(self):
+        """returns the index of the first non-attribute variable among the RQL
+        selected variables
+        """
+        eschema = self.vreg.schema.eschema
+        for i, etype in enumerate(self.rset.description[0]):
+            try:
+                if not eschema(etype).is_final():
+                    return i
+            except KeyError: # XXX possible?
+                continue
+        return None
+
+    def displaycols(self, displaycols):
+        if displaycols is None:
+            if 'displaycols' in self.req.form:
+                displaycols = [int(idx) for idx in self.req.form['displaycols']]
+            else:
+                displaycols = range(len(self.rset.syntax_tree().children[0].selection))
+        return displaycols
+    
+    def call(self, title=None, subvid=None, displayfilter=None, headers=None,
+             displaycols=None, displayactions=None, actions=(),
+             cellvids=None, cellattrs=None):
+        """Dumps a table displaying a composite query
+
+        :param title: title added before table
+        :param subvid: cell view
+        :param displayfilter: filter that selects rows to display
+        :param headers: columns' titles
+        """
+        rset = self.rset
+        req = self.req
+        req.add_js('jquery.tablesorter.js')
+        req.add_css('cubicweb.tablesorter.css')
+        rqlst = rset.syntax_tree()
+        # get rql description first since the filter form may remove some
+        # necessary information
+        rqlstdescr = rqlst.get_description()[0] # XXX missing Union support
+        mainindex = self.main_var_index()
+        hidden = True
+        if not subvid and 'subvid' in req.form:
+            subvid = req.form.pop('subvid')
+        divid = req.form.get('divid') or 'rs%s' % make_uid(id(rset))
+        actions = list(actions)
+        if mainindex is None:
+            displayfilter, displayactions = False, False
+        else:
+            if displayfilter is None and 'displayfilter' in req.form:
+                displayfilter = True
+                if req.form['displayfilter'] == 'shown':
+                    hidden = False
+            if displayactions is None and 'displayactions' in req.form:
+                displayactions = True
+        displaycols = self.displaycols(displaycols)
+        fromformfilter = 'fromformfilter' in req.form
+        # if fromformfilter is true, this is an ajax call and we only want to
+        # replace the inner div, so don't regenerate everything under the if
+        # below
+        if not fromformfilter:
+            div_class = 'section'
+            self.w(u'<div class="%s">' % div_class)
+            if not title and 'title' in req.form:
+                title = req.form['title']
+            if title:
+                self.w(u'<h2 class="tableTitle">%s</h2>\n' % title)
+            if displayfilter:
+                rqslt.save_state()
+                try:
+                    mainvar, baserql = prepare_facets_rqlst(rqlst, rset.args)
+                except NotImplementedError:
+                    # UNION query
+                    facets = None
+                else:
+                    facets = list(self.vreg.possible_vobjects('facets', req, rset,
+                                                              context='tablefilter',
+                                                              filtered_variable=mainvar))
+                    self.generate_form(divid, baserql, facets, hidden,
+                                       vidargs={'displaycols': displaycols,
+                                                'displayfilter': displayfilter,
+                                                'displayactions': displayactions})
+                    actions += self.show_hide_actions(divid, not hidden)
+                rqlst.recover()
+        elif displayfilter:
+            actions += self.show_hide_actions(divid, True)
+        self.w(u'<div id="%s"' % divid)
+        if displayactions:
+            for action in self.vreg.possible_actions(req, self.rset).get('mainactions', ()):
+                actions.append( (action.url(), req._(action.title), action.html_class(), None) )
+            self.w(u' cubicweb:displayactions="1">') # close <div tag
+        else:
+            self.w(u'>') # close <div tag
+        # render actions menu
+        if actions:
+            self.render_actions(divid, actions)
+        # render table
+        table = TableWidget(self)
+        for column in self.get_columns(rqlstdescr, displaycols, headers, subvid,
+                                       cellvids, cellattrs, mainindex):
+            table.append_column(column)
+        table.render(self.w)
+        self.w(u'</div>\n')
+        if not fromformfilter:
+            self.w(u'</div>\n')
+
+
+    def show_hide_actions(self, divid, currentlydisplayed=False):
+        showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:]
+                             for what in ('Form', 'Show', 'Hide', 'Actions'))
+        showhide = 'javascript:' + showhide
+        showlabel = self.req._('show filter form')
+        hidelabel = self.req._('hide filter form')
+        if currentlydisplayed:
+            return [(showhide, showlabel, 'hidden', '%sShow' % divid),
+                    (showhide, hidelabel, None, '%sHide' % divid)]
+        return [(showhide, showlabel, None, '%sShow' % divid), 
+                (showhide, hidelabel, 'hidden', '%sHide' % divid)]
+
+    def render_actions(self, divid, actions):
+        box = MenuWidget('', 'tableActionsBox', _class='', islist=False)
+        label = '<img src="%s" alt="%s"/>' % (
+            self.req.datadir_url + 'liveclipboard-icon.png',
+            html_escape(self.req._('action(s) on this selection')))
+        menu = PopupBoxMenu(label, isitem=False, link_class='actionsBox',
+                            ident='%sActions' % divid)
+        box.append(menu)
+        for url, label, klass, ident in actions:
+            menu.append(BoxLink(url, label, klass, ident=ident, escape=True))
+        box.render(w=self.w)
+        self.w(u'<div class="clear"/>')
+        
+    def get_columns(self, rqlstdescr, displaycols, headers, subvid, cellvids,
+                    cellattrs, mainindex):
+        columns = []
+        for colindex, attr in enumerate(rqlstdescr):
+            if colindex not in displaycols:
+                continue
+            # compute column header
+            if headers is not None:
+                label = headers[displaycols.index(colindex)]
+            elif colindex == 0 or attr == 'Any': # find a better label
+                label = ','.join(display_name(self.req, et)
+                                 for et in self.rset.column_types(colindex))
+            else:
+                label = display_name(self.req, attr)
+            if colindex == mainindex:
+                label += ' (%s)' % self.rset.rowcount
+            column = TableColumn(label, colindex)
+            coltype = self.rset.description[0][colindex]
+            # compute column cell view (if coltype is None, it's a left outer
+            # join, use the default non final subvid)
+            if cellvids and colindex in cellvids:
+                column.append_renderer(cellvids[colindex], colindex)
+            elif coltype is not None and self.schema.eschema(coltype).is_final():
+                column.append_renderer(self.finalview, colindex)
+            else:
+                column.append_renderer(subvid or 'incontext', colindex)
+
+
+            if cellattrs and colindex in cellattrs:
+                for name, value in cellattrs[colindex].iteritems():
+                    column.add_attr(name,value)
+            # add column
+            columns.append(column)
+        return columns
+        
+
+    def render(self, cellvid, row, col, w):
+        self.view('cell', self.rset, row=row, col=col, cellvid=cellvid, w=w)
+        
+    def get_rows(self):
+        return self.rset
+
+    @htmlescape
+    @jsonize
+    @limitsize(10)
+    def sortvalue(self, row, col):
+        # XXX it might be interesting to try to limit value's
+        #     length as much as possible (e.g. by returning the 10
+        #     first characters of a string)
+        val = self.rset[row][col]
+        if val is None:
+            return u''
+        etype = self.rset.description[row][col]
+        if self.schema.eschema(etype).is_final():
+            entity, rtype = self.rset.related_entity(row, col)
+            if entity is None:
+                return val # remove_html_tags() ?
+            return entity.sortvalue(rtype)
+        entity = self.rset.get_entity(row, col)
+        return entity.sortvalue()
+
+class EditableTableView(TableView):
+    id = 'editable-table'
+    finalview = 'editable-final'
+    title = _('editable-table')
+
+    
+class CellView(EntityView):
+    __selectors__ = (anyrset_selector, accept_rset_selector)
+    
+    id = 'cell'
+    accepts = ('Any',)
+    
+    def cell_call(self, row, col, cellvid=None):
+        """
+        :param row, col: indexes locating the cell value in view's result set
+        :param cellvid: cell view (defaults to 'outofcontext')
+        """
+        etype, val = self.rset.description[row][col], self.rset[row][col]
+        if val is not None and not self.schema.eschema(etype).is_final():
+            e = self.rset.get_entity(row, col)
+            e.view(cellvid or 'outofcontext', w=self.w)
+        elif val is None:
+            # This is usually caused by a left outer join and in that case,
+            # regular views will most certainly fail if they don't have
+            # a real eid
+            self.wview('final', self.rset, row=row, col=col)
+        else:
+            self.wview(cellvid or 'final', self.rset, 'null', row=row, col=col)
+
+
+class InitialTableView(TableView):
+    """same display as  table view but consider two rql queries :
+    
+    * the default query (ie `rql` form parameter), which is only used to select
+      this view and to build the filter form. This query should have the same
+      structure as the actual without actual restriction (but link to
+      restriction variables) and usually with a limit for efficiency (limit set
+      to 2 is advised)
+      
+    * the actual query (`actualrql` form parameter) whose results will be
+      displayed with default restrictions set
+    """
+    id = 'initialtable'
+    __selectors__ = anyrset_selector, req_form_params_selector
+    form_params = ('actualrql',)
+    # should not be displayed in possible view since it expects some specific
+    # parameters
+    title = None
+    
+    def call(self, title=None, subvid=None, headers=None,
+             displaycols=None, displayactions=None):
+        """Dumps a table displaying a composite query"""
+        actrql = self.req.form['actualrql']
+        self.ensure_ro_rql(actrql)
+        displaycols = self.displaycols(displaycols)
+        if displayactions is None and 'displayactions' in self.req.form:
+            displayactions = True
+        self.w(u'<div class="section">')
+        if not title and 'title' in self.req.form:
+            # pop title so it's not displayed by the table view as well
+            title = self.req.form.pop('title')
+        if title:
+            self.w(u'<h2>%s</h2>\n' % title)
+        mainindex = self.main_var_index()
+        if mainindex is not None:
+            rqlst = self.rset.syntax_tree()
+            # union not yet supported
+            if len(rqlst.children) == 1:
+                rqlst.save_state()
+                mainvar, baserql = prepare_facets_rqlst(rqlst, self.rset.args)
+                facets = list(self.vreg.possible_vobjects('facets', self.req, self.rset,
+                                                          context='tablefilter',
+                                                          filtered_variable=mainvar))
+                
+                if facets:
+                    divid = self.req.form.get('divid', 'filteredTable')
+                    self.generate_form(divid, baserql, facets, 
+                                       vidargs={'displaycols': displaycols,
+                                                'displayactions': displayactions,
+                                                'displayfilter': True})
+                    actions = self.show_hide_actions(divid, False)
+                rqlst.recover()
+        if not subvid and 'subvid' in self.req.form:
+            subvid = self.req.form.pop('subvid')
+        self.view('table', self.req.execute(actrql),
+                  'noresult', w=self.w, displayfilter=False, subvid=subvid,
+                  displayactions=displayactions, displaycols=displaycols,
+                  actions=actions, headers=headers)
+        self.w(u'</div>\n')
+
+
+class EditableInitiableTableView(InitialTableView):
+    id = 'editable-initialtable'
+    finalview = 'editable-final'
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/timeline.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,124 @@
+"""basic support for SIMILE's timline widgets
+
+cf. http://code.google.com/p/simile-widgets/
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import simplejson
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import ICalendarable
+from cubicweb.common.view import EntityView, StartupView
+from cubicweb.common.selectors import interface_selector
+
+
+# 
+class TimelineJsonView(EntityView):
+    """generates a json file to feed Timeline.loadJSON()
+    NOTE: work in progress (image_url, bubbleUrl and so on
+    should be properties of entity classes or subviews)
+    """
+    id = 'timeline-json'
+    binary = True
+    templatable = False
+    content_type = 'application/json'
+
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ICalendarable,)
+    date_fmt = '%Y/%m/%d'
+    
+    def call(self):
+        events = []
+        for entity in self.rset.entities():
+            event = self.build_event(entity)
+            if event is not None:
+                events.append(event)
+        timeline_data = {'dateTimeFormat': self.date_fmt,
+                         'events': events}
+        self.w(simplejson.dumps(timeline_data))
+
+    # FIXME: those properties should be defined by the entity class
+    def onclick_url(self, entity):
+        return entity.absolute_url()
+    
+    def onclick(self, entity):
+        url = self.onclick_url(entity)
+        if url:
+            return u"javascript: document.location.href='%s'" % url
+        return None
+    
+    def build_event(self, entity):
+        """converts `entity` into a JSON object
+        {'start': '1891',
+        'end': '1915',
+        'title': 'Portrait of Horace Brodsky',
+        'description': 'by Henri Gaudier-Brzeska, French Sculptor, 1891-1915',
+        'image': 'http://imagecache2.allposters.com/images/BRGPOD/102770_b.jpg',
+        'link': 'http://www.allposters.com/-sp/Portrait-of-Horace-Brodsky-Posters_i1584413_.htm'
+        }
+        """
+        start = entity.start
+        stop = entity.stop
+        start = start or stop
+        if start is None and stop is None:
+            return None
+        event_data = {'start': start.strftime(self.date_fmt),
+                      'title': html_escape(entity.dc_title()),
+                      'description': entity.dc_description(),
+                      'link': entity.absolute_url(),
+                      }
+        onclick = self.onclick(entity)
+        if onclick:
+            event_data['onclick'] = onclick
+        if stop:
+            event_data['end'] = stop.strftime(self.date_fmt)
+        return event_data
+
+    
+class TimelineViewMixIn(object):
+    widget_class = 'TimelineWidget'
+    jsfiles = ('cubicweb.timeline-bundle.js', 'cubicweb.widgets.js',
+               'cubicweb.timeline-ext.js', 'cubicweb.ajax.js')
+    
+    def render(self, loadurl, tlunit=None):
+        tlunit = tlunit or self.req.form.get('tlunit')
+        self.req.add_js(self.jsfiles)
+        self.req.add_css('timeline-bundle.css')
+        if tlunit:
+            additional = u' cubicweb:tlunit="%s"' % tlunit
+        else:
+            additional = u''
+        self.w(u'<div class="widget" cubicweb:wdgtype="%s" '
+               u'cubicweb:loadtype="auto" cubicweb:loadurl="%s" %s >' %
+               (self.widget_class, html_escape(loadurl),
+                additional))
+        self.w(u'</div>')
+
+
+class TimelineView(TimelineViewMixIn, EntityView):
+    """builds a cubicweb timeline widget node"""
+    id = 'timeline'
+    __selectors__ = (interface_selector,)
+    accepts_interfaces = (ICalendarable,)
+    need_navigation = False
+    def call(self, tlunit=None):
+        self.req.html_headers.define_var('Timeline_urlPrefix', self.req.datadir_url)
+        rql = self.rset.printable_rql()
+        loadurl = self.build_url(rql=rql, vid='timeline-json')
+        self.render(loadurl, tlunit)
+        
+    
+class StaticTimelineView(TimelineViewMixIn, StartupView):
+    """similar to `TimelineView` but loads data from a static
+    JSON file instead of one after a RQL query.
+    """
+    id = 'static-timeline'
+    
+    def call(self, loadurl, tlunit=None, wdgclass=None):
+        self.widget_class = wdgclass or self.widget_clas
+        self.render(loadurl, tlunit)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/timetable.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,203 @@
+"""html calendar views
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import ITimetableViews
+from cubicweb.common.utils import date_range
+from cubicweb.common.selectors import interface_selector, anyrset_selector
+from cubicweb.common.view import AnyRsetView
+
+
+class _TaskEntry(object):
+    def __init__(self, task, color, column):
+        self.task = task
+        self.color = color
+        self.column = column
+        self.lines = 1
+
+MIN_COLS = 3  # minimum number of task columns for a single user
+
+class TimeTableView(AnyRsetView):
+    id = 'timetable'
+    title = _('timetable')
+    __selectors__ = (interface_selector, anyrset_selector)
+    accepts_interfaces = (ITimetableViews,)
+    need_navigation = False
+
+    def call(self, title=None):
+        """Dumps a timetable from a resultset composed of a note (anything
+        with start/stop) and a user (anything)"""
+        self.req.add_css('cubicweb.timetable.css')
+        dates = {}
+        users = []
+        users_max = {}
+
+        # XXX: try refactoring with calendar.py:OneMonthCal
+        for row in xrange(self.rset.rowcount):
+            task = self.rset.get_entity(row,0)
+            if len(self.rset[row])>1:
+                user = self.rset.get_entity(row,1)
+            else:
+                user = u"*"
+            the_dates = []
+            if task.start and task.stop:
+                if task.start.absdate == task.stop.absdate:
+                    the_dates.append(task.start)
+                else:
+                    the_dates += date_range( task.start, task.stop )
+            elif task.start:
+                the_dates.append(task.start)
+            elif task.stop:
+                the_dates.append(task.stop)
+            for d in the_dates:
+                d_users = dates.setdefault(d, {})
+                u_tasks = d_users.setdefault(user,set())
+                u_tasks.add( task )
+                task_max = users_max.setdefault(user,0)
+                if len(u_tasks)>task_max:
+                    users_max[user] = len(u_tasks)
+            if user not in users:
+                # keep original ordering
+                users.append(user)
+        if not dates:
+            return
+        date_min = min(dates)
+        date_max = max(dates)
+        #users = list(sorted(users, key=lambda u:u.login))
+
+        rows = []
+        # colors here are class names defined in cubicweb.css
+        colors = [ "col%x"%i for i in range(12) ]
+        next_color_index = 0
+
+        visited_tasks = {} # holds a description of a task for a user
+        task_colors = {}   # remember a color assigned to a task
+        for date in date_range(date_min, date_max):
+            columns = [date]
+            d_users = dates.get(date, {})
+            for user in users:
+                # every user has its column "splitted" in at least MIN_COLS
+                # sub-columns (for overlapping tasks)
+                user_columns = [None] * max(MIN_COLS, users_max[user])
+                # every task that is "visited" for the first time
+                # require a special treatment, so we put them in
+                # 'postpone'
+                postpone = []
+                for task in d_users.get(user, []):
+                    key = (task, user)
+                    if key in visited_tasks:
+                        task_descr = visited_tasks[ key ]
+                        user_columns[task_descr.column] = task_descr, False
+                        task_descr.lines+=1
+                    else:
+                        postpone.append(key)
+                for key in postpone:
+                    # to every 'new' task we must affect a color
+                    # (which must be the same for every user concerned
+                    # by the task)
+                    task, user = key
+                    for i,t in enumerate(user_columns):
+                        if t is None:
+                            if task in task_colors:
+                                color = task_colors[task]
+                            else:
+                                color = colors[next_color_index]
+                                next_color_index = (next_color_index+1)%len(colors)
+                                task_colors[task] = color
+                            task_descr = _TaskEntry(task, color, i)
+                            user_columns[i] = task_descr, True
+                            visited_tasks[key] = task_descr
+                            break
+                    else:
+                        raise RuntimeError("is it possible we got it wrong?")
+
+                columns.append( user_columns )
+            rows.append( columns )
+
+        widths = [ len(col) for col in rows[0][1:] ]
+        self.w(u'<div class="section">')
+        if title:
+            self.w(u'<h4>%s</h4>\n' % title)
+        self.w(u'<table class="listing timetable">')
+        self.render_col_headers(users, widths)
+        self.render_rows(rows)
+        self.w(u'</table>')
+        self.w(u'</div>\n')
+
+    def render_col_headers(self,users,widths):
+        """ render column headers """
+        self.w(u'<tr class="header">\n')
+
+        self.w(u'<th class="ttdate">&nbsp;</th>\n')
+        columns = []
+        for user,width in zip(users,widths):
+            self.w(u'<th colspan="%s">' % max(MIN_COLS,width))
+            if user!=u"*":
+                user.view('secondary',w=self.w)
+            else:
+                self.w(user)
+            self.w(u'</th>')
+        self.w(u'</tr>\n')
+        return columns
+
+    def render_rows(self, rows):
+        """ render table content (row headers and central content) """
+        odd = False
+        previous_is_empty = False
+        for row in rows:
+            date = row[0]
+            empty_line = True
+            for group in row[1:]:
+                for value in group:
+                    if value:
+                        empty_line = False
+                        break
+                else:
+                    continue
+                break
+            if empty_line and previous_is_empty:
+                continue
+            previous_is_empty = False
+
+            klass = "even"
+            if date.day_of_week in (5,6) and not empty_line:
+                klass = "odd"
+            self.w(u'<tr class="%s">' % klass)
+            odd = not odd
+
+            if not empty_line:
+                self.w(u'<th class="ttdate">%s</th>' % self.format_date(date) )
+            else:
+                self.w(u'<th>...</th>'  )
+                previous_is_empty = True
+
+            empty_klasses = [ "ttle", "ttme", "ttre" ]
+            filled_klasses = [ "ttlf", "ttmf", "ttrf" ]
+            kj = 0 # 0: left, 1: mid, 2: right
+            for uid, group in enumerate(row[1:]):
+                for i, value in enumerate(group):
+                    if i == 0:
+                        kj = 0
+                    elif i == len(group):
+                        kj = 2
+                    else:
+                        kj = 1
+                    if value:
+                        task_descr, first_row = value
+                        if first_row:
+                            url = html_escape(task_descr.task.absolute_url(vid="edition"))
+                            self.w(u'<td rowspan="%d" class="%s %s" onclick="document.location=\'%s\'">&nbsp;<div>' % (
+                                task_descr.lines, task_descr.color, filled_klasses[kj], url))
+                            task_descr.task.view('tooltip', w=self.w)
+                            self.w(u'</div></td>')
+                    else:
+                        if empty_line:
+                            self.w(u'<td class="ttempty">&nbsp;</td>')
+                        else:
+                            self.w(u'<td class="%s">&nbsp;</td>' % empty_klasses[kj] )
+            self.w(u'</tr>\n')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/treeview.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,77 @@
+from logilab.mtconverter import html_escape
+
+from cubicweb.interfaces import ITree
+from cubicweb.common.selectors import interface_selector, yes_selector
+from cubicweb.common.view import EntityView
+
+class TreeView(EntityView):
+    id = 'treeview'
+    accepts = ('Any',)
+    
+    def call(self, subvid=None):
+        if subvid is None and 'subvid' in self.req.form:
+            subvid = self.req.form.pop('subvid') # consume it
+        if subvid is None:
+            subvid = 'oneline'
+        self.req.add_css('jquery.treeview.css')
+        self.req.add_js(('cubicweb.ajax.js', 'jquery.treeview.js', 'cubicweb.widgets.js'))
+        self.w(u'<ul class="treeview widget" cubicweb:loadtype="auto" cubicweb:wdgtype="TreeView">')
+        for rowidx in xrange(len(self.rset)):
+            self.wview('treeitemview', self.rset, row=rowidx, col=0, vid=subvid)
+        self.w(u'</ul>')
+
+
+class DefaultTreeViewItemView(EntityView):
+    """default treeitem view for entities which don't implement ITree
+    """
+    id = 'treeitemview'
+    accepts = ('Any',)
+    
+    def cell_call(self, row, col, vid='oneline'):
+        entity = self.entity(row, col)
+        itemview = self.view(vid, self.rset, row=row, col=col)
+        if row == len(self.rset) - 1:
+            self.w(u'<li class="last">%s</li>' % itemview)
+        else:
+            self.w(u'<li>%s</li>' % itemview)
+
+
+class TreeViewItemView(EntityView):
+    """specific treeitem view for entities which implement ITree
+    
+    (each item should be exandable if it's not a tree leaf)
+    """
+    id = 'treeitemview'
+    # XXX append yes_selector to make sure we get an higher score than
+    #     the default treeitem view
+    __selectors__ = (interface_selector, yes_selector)
+    accepts_interfaces = (ITree,)
+    
+    def cell_call(self, row, col, vid='oneline'):
+        entity = self.entity(row, col)
+        cssclasses = []
+        is_leaf = False
+        if row == len(self.rset) - 1:
+            is_leaf = True
+        if not hasattr(entity, 'is_leaf') or entity.is_leaf():
+            if is_leaf : cssclasses.append('last')
+            self.w(u'<li class="%s">' % u' '.join(cssclasses))
+        else:
+            rql = entity.children_rql() % {'x': entity.eid}
+            url = html_escape(self.build_url('json', rql=rql, vid='treeview',
+                                             pageid=self.req.pageid,
+                                             subvid=vid))
+            cssclasses.append('expandable')
+            divclasses = ['hitarea expandable-hitarea']
+            if is_leaf :
+                cssclasses.append('lastExpandable')
+                divclasses.append('lastExpandable-hitarea')
+            self.w(u'<li cubicweb:loadurl="%s" class="%s">' % (url, u' '.join(cssclasses)))
+            self.w(u'<div class="%s"> </div>' % u' '.join(divclasses))
+                
+            # add empty <ul> because jquery's treeview plugin checks for
+            # sublists presence
+            self.w(u'<ul class="placeholder"><li>place holder</li></ul>')
+        self.wview(vid, self.rset, row=row, col=col)
+        self.w(u'</li>')
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/urlpublishing.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,247 @@
+"""associate url's path to view identifier / rql queries
+
+It currently handle url's path with the forms
+
+* <publishing_method>
+
+* minimal REST publishing:
+  * <eid>
+  * <etype>[/<attribute name>/<attribute value>]*
+
+* folder navigation
+
+
+You can actually control URL (more exactly path) resolution using URL path
+evaluator.
+
+XXX actionpath and folderpath execute a query whose results is lost
+because of redirecting instead of direct traversal
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from rql import TypeResolverException
+
+from cubicweb import RegistryException, typed_eid
+from cubicweb.web import NotFound, Redirect
+from cubicweb.web.component import SingletonComponent, Component
+
+
+class PathDontMatch(Exception):
+    """exception used by url evaluators to notify they can't evaluate
+    a path
+    """
+    
+class URLPublisherComponent(SingletonComponent):
+    """associate url's path to view identifier / rql queries,
+    by applying a chain of urlpathevaluator components.
+
+    An evaluator is a URLPathEvaluator subclass with a .evaluate_path
+    method taking the request object and the path to publish as
+    argument.  It will either returns a publishing method identifier
+    and a rql query on success or raises a `PathDontMatch` exception
+    on failure. URL evaluators are called according to their `priority`
+    attribute, with 0 as the greatest priority and greater values as
+    lower priority.  The first evaluator returning a result or raising
+    something else than `PathDontMatch` will stop the handlers chain.
+    """
+    id = 'urlpublisher'
+    
+    def __init__(self, default_method='view'):
+        super(URLPublisherComponent, self).__init__()
+        self.default_method = default_method
+        evaluators = []        
+        for evaluatorcls in self.vreg.registry_objects('components',
+                                                       'urlpathevaluator'):
+            # instantiation needed
+            evaluator = evaluatorcls(self)
+            evaluators.append(evaluator)
+        self.evaluators = sorted(evaluators, key=lambda x: x.priority)
+        
+    def process(self, req, path):
+        """given an url (essentialy caracterized by a path on the server,
+        but additional information may be found in the request object), return
+        a publishing method identifier (eg controller) and an optional result
+        set
+        
+        :type req: `cubicweb.web.Request`
+        :param req: the request object
+        
+        :type path: str
+        :param path: the path of the resource to publish
+
+        :rtype: tuple(str, `cubicweb.common.utils.ResultSet` or None)
+        :return: the publishing method identifier and an optional result set
+        
+        :raise NotFound: if no handler is able to decode the given path
+        """
+        parts = [part for part in path.split('/')
+                 if part != ''] or (self.default_method,)
+        if req.form.get('rql'):
+            if parts[0] in self.vreg.registry('controllers'):
+                return parts[0], None
+            return 'view', None
+        for evaluator in self.evaluators:
+            try:
+                pmid, rset = evaluator.evaluate_path(req, parts[:])
+                break
+            except PathDontMatch:
+                continue
+        else:
+            raise NotFound(path)
+        if pmid is None:
+            pmid = self.default_method
+        return pmid, rset
+
+        
+class URLPathEvaluator(Component):
+    __abstract__ = True
+    id = 'urlpathevaluator'
+
+    def __init__(self, urlpublisher):
+        self.urlpublisher = urlpublisher
+
+
+class RawPathEvaluator(URLPathEvaluator):
+    """handle path of the form::
+
+        <publishing_method>?parameters...
+    """
+    priority = 0
+    def evaluate_path(self, req, parts):
+        if len(parts) == 1 and parts[0] in self.vreg.registry('controllers'):
+            return parts[0], None
+        raise PathDontMatch()
+
+
+class EidPathEvaluator(URLPathEvaluator):
+    """handle path with the form::
+
+        <eid>
+    """
+    priority = 1
+    def evaluate_path(self, req, parts):
+        if len(parts) != 1:
+            raise PathDontMatch()
+        try:
+            rset = req.execute('Any X WHERE X eid %(x)s',
+                               {'x': typed_eid(parts[0])}, 'x')
+        except ValueError:
+            raise PathDontMatch()
+        if rset.rowcount == 0:
+            raise NotFound()
+        return None, rset
+
+        
+class RestPathEvaluator(URLPathEvaluator):
+    """handle path with the form::
+
+        <etype>[[/<attribute name>]/<attribute value>]*
+    """
+    priority = 2
+    def __init__(self, urlpublisher):
+        super(RestPathEvaluator, self).__init__(urlpublisher)
+        self.etype_map = {}
+        for etype in self.schema.entities():
+            etype = str(etype)
+            self.etype_map[etype.lower()] = etype
+            
+    def evaluate_path(self, req, parts):
+        if not (0 < len(parts) < 4):
+            raise PathDontMatch()
+        try:
+            etype = self.etype_map[parts.pop(0).lower()]
+        except KeyError:
+            raise PathDontMatch()
+        cls = self.vreg.etype_class(etype)
+        if parts:
+            if len(parts) == 2:
+                attrname = parts.pop(0).lower()
+                try:
+                    cls.e_schema.subject_relation(attrname)
+                except KeyError:
+                    raise PathDontMatch()
+            else:
+                attrname = cls._rest_attr_info()[0]
+            value = req.url_unquote(parts.pop(0))
+            rset = self.attr_rset(req, etype, attrname, value)
+        else:
+            rset = self.cls_rset(req, cls)
+        if rset.rowcount == 0:
+            raise NotFound()
+        return None, rset
+
+    def cls_rset(self, req, cls):
+        return req.execute(cls.fetch_rql(req.user))
+        
+    def attr_rset(self, req, etype, attrname, value):
+        rql = u'Any X WHERE X is %s, X %s %%(x)s' % (etype, attrname)
+        if attrname == 'eid':
+            try:
+                rset = req.execute(rql, {'x': typed_eid(value)}, 'x')
+            except (ValueError, TypeResolverException):
+                # conflicting eid/type
+                raise PathDontMatch()
+        else:
+            rset = req.execute(rql, {'x': value})
+        return rset
+
+
+class URLRewriteEvaluator(URLPathEvaluator):
+    """tries to find a rewrite rule to apply
+
+    URL rewrite rule definitions are stored in URLRewriter objects
+    """
+    priority = 3
+    def evaluate_path(self, req, parts):
+        # uri <=> req._twreq.path or req._twreq.uri
+        uri = req.url_unquote('/' + '/'.join(parts))
+        vobjects = sorted(self.vreg.registry_objects('urlrewriting'),
+                          key=lambda x: x.priority,
+                          reverse=True)
+        for rewritercls in vobjects:
+            rewriter = rewritercls()
+            try:
+                # XXX we might want to chain url rewrites
+                return rewriter.rewrite(req, uri)
+            except KeyError:
+                continue
+        raise PathDontMatch()
+        
+
+class ActionPathEvaluator(URLPathEvaluator):
+    """handle path with the form::
+
+    <any evaluator path>/<action>
+    """
+    priority = 4
+    def evaluate_path(self, req, parts):
+        if len(parts) < 2:
+            raise PathDontMatch()
+        # remove last part and see if this is something like an actions
+        # if so, call
+        try:
+            requested = parts.pop(-1)
+            actions = self.vreg.registry_objects('actions', requested)
+        except RegistryException:
+            raise PathDontMatch()
+        for evaluator in self.urlpublisher.evaluators:
+            if evaluator is self or evaluator.priority == 0:
+                continue
+            try:
+                pmid, rset = evaluator.evaluate_path(req, parts[:])
+            except PathDontMatch:
+                continue
+            else:
+                try:
+                    action = self.vreg.select(actions, req, rset)
+                except RegistryException:
+                    raise PathDontMatch()
+                else:
+                    # XXX avoid redirect
+                    raise Redirect(action.url())
+        raise PathDontMatch()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/urlrewrite.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,208 @@
+"""Rules based url rewriter component, to get configurable RESTful urls
+
+:organization: Logilab
+:copyright: 2007-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+import re
+
+from cubicweb.vregistry import autoselectors
+
+from cubicweb.common.registerers import accepts_registerer
+from cubicweb.common.appobject import AppObject
+
+
+def rgx(pattern, flags=0):
+    """this is just a convenient shortcout to add the $ sign"""
+    return re.compile(pattern+'$', flags)
+
+class metarewriter(autoselectors):
+    """auto-extend rules dictionnary"""
+    def __new__(mcs, name, bases, classdict):
+        # collect baseclass' rules
+        rules = []
+        ignore_baseclass_rules = classdict.get('ignore_baseclass_rules', False)
+        if not ignore_baseclass_rules:
+            for base in bases:
+                rules[0:0] = getattr(base, 'rules', [])
+        rules[0:0] = classdict.get('rules', [])
+        inputs = set()
+        for data in rules[:]:
+            try:
+                input, output, groups = data
+            except ValueError:
+                input, output = data
+            if input in inputs:
+                rules.remove( (input, output) )
+            else:
+                inputs.add(input)
+        classdict['rules'] = rules
+        return super(metarewriter, mcs).__new__(mcs, name, bases, classdict)
+
+
+class URLRewriter(AppObject):
+    """base class for URL rewriters
+
+    url rewriters should have a `rules` dict that maps an input URI
+    to something that should be used for rewriting.
+
+    The actual logic that defines how the rules dict is used is implemented
+    in the `rewrite` method
+
+    A `priority` attribute might be used to indicate which rewriter
+    should be tried first. The higher the priority is, the earlier the
+    rewriter will be tried
+    """
+    __metaclass__ = metarewriter
+    __registry__ = 'urlrewriting'
+    __registerer__ = accepts_registerer
+    __abstract__ = True
+
+    id = 'urlrewriting'
+    accepts = ('Any',)
+    priority = 1
+
+    def rewrite(self, req, uri):
+        raise NotImplementedError
+
+
+class SimpleReqRewriter(URLRewriter):
+    """The SimpleReqRewriters uses a `rules` dict that maps
+    input URI (regexp or plain string) to a dictionary to update the
+    request's form
+
+    If the input uri is a regexp, group substitution is allowed
+    """
+    id = 'simple'
+
+    rules = [
+        ('/schema',  dict(vid='schema')),
+        ('/index', dict(vid='index')),
+        ('/myprefs', dict(vid='epropertiesform')),
+        ('/siteconfig', dict(vid='systemepropertiesform')),
+        ('/manage', dict(vid='manage')),
+        ('/notfound', dict(vid='404')),
+        ('/error', dict(vid='error')),
+        (rgx('/schema/([^/]+?)/?'),  dict(vid='eschema', rql=r'Any X WHERE X is EEType, X name "\1"')),
+        (rgx('/add/([^/]+?)/?'), dict(vid='creation', etype=r'\1')),
+        (rgx('/doc/images/(.+?)/?'), dict(vid='wdocimages', fid=r'\1')),
+        (rgx('/doc/?'), dict(vid='wdoc', fid=r'main')),
+        (rgx('/doc/(.+?)/?'), dict(vid='wdoc', fid=r'\1')),
+        (rgx('/changelog/?'), dict(vid='changelog')),
+        ]
+    
+    def rewrite(self, req, uri):
+        """for each `input`, `output `in rules, if `uri` matches `input`,
+        req's form is updated with `output`
+        """
+        rset = None
+        for data in self.rules:
+            try:
+                inputurl, infos, required_groups = data
+            except ValueError:
+                inputurl, infos = data
+                required_groups = None
+            if required_groups and not req.user.matching_groups(required_groups):
+                continue
+            if isinstance(inputurl, basestring):
+                if inputurl == uri:
+                    req.form.update(infos)
+                    break
+            elif inputurl.match(uri): # it's a regexp
+                # XXX what about i18n ? (vtitle for instance)
+                for param, value in infos.items():
+                    if isinstance(value, basestring):
+                        req.form[param]= inputurl.sub(value, uri)
+                    else:
+                        req.form[param] = value
+                break
+        else:
+            self.debug("no simple rewrite rule found for %s", uri)
+            raise KeyError(uri)
+        return None, None
+
+
+def build_rset(rql, rgxgroups=None, cachekey=None, setuser=False,
+               vid=None, vtitle=None, form={}, **kwargs):
+
+    def do_build_rset(inputurl, uri, req, schema):
+        if rgxgroups:
+            match = inputurl.match(uri)
+            for arg, group in rgxgroups:
+                kwargs[arg] = match.group(group)
+        req.form.update(form)
+        if setuser:
+            kwargs['u'] = req.user.eid
+        if vid:
+            req.form['vid'] = vid
+        if vtitle:
+            req.form['vtitle'] = req._(vtitle) % kwargs
+        return None, req.execute(rql, kwargs, cachekey)
+    return do_build_rset
+
+def update_form(**kwargs):
+    def do_build_rset(inputurl, uri, req, schema):
+        match = inputurl.match(uri)
+        kwargs.update(match.groupdict())
+        req.form.update(kwargs)
+        return None, None
+    return do_build_rset
+
+def rgx_action(rql=None, args=None, cachekey=None, argsgroups=(), setuser=False,
+               form=None, formgroups=(), transforms={}, controller=None):
+    def do_build_rset(inputurl, uri, req, schema):
+        if rql:
+            kwargs = args and args.copy() or {}
+            if argsgroups:
+                match = inputurl.match(uri)
+                for key in argsgroups:
+                    value = match.group(key)
+                    try:
+                        kwargs[key] = transforms[key](value)
+                    except KeyError:
+                        kwargs[key] = value
+            if setuser:
+                kwargs['u'] = req.user.eid
+            rset = req.execute(rql, kwargs, cachekey)
+        else:
+            rset = None
+        form2 = form and form.copy() or {}
+        if formgroups:
+            match = inputurl.match(uri)
+            for key in formgroups:
+                form2[key] = match.group(key)
+        if form2:
+            req.form.update(form2)
+        return controller, rset
+    return do_build_rset
+
+
+class SchemaBasedRewriter(URLRewriter):
+    """Here, the rules dict maps regexps or plain strings to
+    callbacks that will be called with (input, uri, req, schema)
+    """
+    id = 'schemabased'
+    rules = [
+        # rgxp : callback
+        (rgx('/search/(.+)'), build_rset(rql=r'Any X WHERE X has_text %(text)s',
+                                         rgxgroups=[('text', 1)])), 
+        ]
+
+    def rewrite(self, req, uri):
+        # XXX this could be refacted with SimpleReqRewriter
+        for data in self.rules:
+            try:
+                inputurl, callback, required_groups = data
+            except ValueError:
+                inputurl, callback = data
+                required_groups = None
+            if required_groups and not req.user.matching_groups(required_groups):
+                continue
+            if isinstance(inputurl, basestring):
+                if inputurl == uri:
+                    return callback(inputurl, uri, req, self.schema)
+            elif inputurl.match(uri): # it's a regexp
+                return callback(inputurl, uri, req, self.schema)
+        else:
+            self.debug("no schemabased rewrite rule found for %s", uri)
+            raise KeyError(uri)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/vcard.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,50 @@
+"""vcard import / export
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.view import EntityView
+
+VCARD_PHONE_TYPES = {'home': 'HOME', 'office': 'WORK', 'mobile': 'CELL', 'fax': 'FAX'}
+
+class VCardEUserView(EntityView):
+    """export a person information as a vcard"""
+    id = 'vcard'
+    title = _('vcard')
+    templatable = False
+    content_type = 'text/x-vcard'
+    accepts = ('EUser',)
+        
+
+    def set_request_content_type(self):
+        """overriden to set a .vcf filename"""
+        self.req.set_content_type(self.content_type, filename='vcard.vcf')
+        
+    def cell_call(self, row, col):
+        self.vcard_header()
+        self.vcard_content(self.complete_entity(row, col))
+        self.vcard_footer()
+
+    def vcard_header(self):
+        self.w(u'BEGIN:vcard\n')
+        self.w(u'VERSION:3.0\n')
+        
+    def vcard_footer(self):
+        self.w(u'NOTE:this card has been generated by CubicWeb\n')
+        self.w(u'END:vcard\n')
+        
+    def vcard_content(self, entity):
+        who = u'%s %s' % (entity.surname or '',
+                          entity.firstname or '')
+        w = self.w
+        w(u'FN:%s\n' % who)
+        w(u'N:%s;;;;\n' % entity.login)
+        w(u'TITLE:%s\n' % who)
+        for email in entity.use_email:
+            w(u'EMAIL;TYPE=INTERNET:%s\n' % email.address)
+
+from logilab.common.deprecation import class_renamed
+VCardEuserView = class_renamed('VCardEuserView', VCardEUserView)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/wdoc.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,238 @@
+"""inline help system, using ReST file in products `wdoc` directory
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from itertools import chain
+from os.path import join
+from bisect import bisect_right
+
+from mx.DateTime import strptime, today
+
+from logilab.common.changelog import ChangeLog
+from logilab.mtconverter import CHARSET_DECL_RGX
+
+from cubicweb.common.selectors import req_form_params_selector
+from cubicweb.common.view import StartupView
+from cubicweb.common.uilib import rest_publish
+from cubicweb.web import NotFound
+
+_ = unicode
+
+# table of content management #################################################
+
+try:
+    from xml.etree.ElementTree import parse
+except ImportError:
+    from elementtree.ElementTree import parse
+
+def build_toc_index(node, index):
+    try:
+        nodeidx = node.attrib['resource']
+        assert not nodeidx in index, nodeidx
+        index[nodeidx] = node
+    except KeyError:
+        pass
+    for child in node:
+        build_toc_index(child, index)
+        child.parent = node
+
+def get_insertion_point(section, index):
+    if section.attrib.get('insertafter'):
+        snode = index[section.attrib['insertafter']]
+        node = snode.parent
+        idx = node.getchildren().index(snode) + 1
+    elif section.attrib.get('insertbefore'):
+        snode = index[section.attrib['insertbefore']]
+        node = snode.parent
+        idx = node.getchildren().index(snode)
+    else:
+        node = index[section.attrib['appendto']]
+        idx = None
+    return node, idx
+                     
+def build_toc(config):
+    alltocfiles = reversed(tuple(config.locate_all_files('toc.xml')))
+    maintoc = parse(alltocfiles.next()).getroot()
+    maintoc.parent = None
+    index = {}
+    build_toc_index(maintoc, index)
+    # insert component documentation into the tree according to their toc.xml
+    # file 
+    for fpath in alltocfiles:
+        toc = parse(fpath).getroot()
+        for section in toc:
+            node, idx = get_insertion_point(section, index)
+            if idx is None:
+                node.append(section)
+            else:
+                node.insert(idx, section)
+            section.parent = node
+            build_toc_index(section, index)
+    return index
+    
+def title(node, lang):
+    for title in node.findall('title'):
+        if title.attrib['{http://www.w3.org/XML/1998/namespace}lang'] == lang:
+            return unicode(title.text)
+
+def subsections(node):
+    return [child for child in node if child.tag == 'section']
+
+# help views ##################################################################
+
+class InlineHelpView(StartupView):
+    __selectors__ = (req_form_params_selector,)
+    form_params = ('fid',)
+    id = 'wdoc'
+    title = _('site documentation')
+    
+    def call(self):
+        fid = self.req.form['fid']
+        for lang in chain((self.req.lang, self.vreg.property_value('ui.language')),
+                          self.config.available_languages()):
+            rid = '%s_%s.rst' % (fid, lang)
+            resourcedir = self.config.locate_doc_file(rid)
+            if resourcedir:
+                break
+        else:
+            raise NotFound
+        self.tocindex = build_toc(self.config)
+        try:
+            node = self.tocindex[fid]
+        except KeyError:
+            node = None
+        else:
+            self.navigation_links(node)
+            self.w(u'<div class="hr"></div>')
+            self.w(u'<h1>%s</h1>' % (title(node, self.req.lang)))            
+        data = open(join(resourcedir, rid)).read()
+        self.w(rest_publish(self, data))
+        if node is not None:
+            self.subsections_links(node)
+            self.w(u'<div class="hr"></div>')
+            self.navigation_links(node)
+
+    def navigation_links(self, node):
+        req = self.req
+        parent = node.parent
+        if parent is None:
+            return
+        brothers = subsections(parent)
+        self.w(u'<div class="docnav">\n')
+        previousidx = brothers.index(node) - 1
+        if previousidx >= 0:
+            self.navsection(brothers[previousidx], 'prev')            
+        self.navsection(parent, 'up')            
+        nextidx = brothers.index(node) + 1
+        if nextidx < len(brothers):
+            self.navsection(brothers[nextidx], 'next')            
+        self.w(u'</div>\n')
+
+    navinfo = {'prev': ('', 'data/previous.png', _('i18nprevnext_previous')),
+               'next': ('', 'data/next.png', _('i18nprevnext_next')),
+               'up': ('', 'data/up.png', _('i18nprevnext_up'))}
+               
+    def navsection(self, node, navtype):
+        htmlclass, imgpath, msgid = self.navinfo[navtype]
+        self.w(u'<span class="%s">' % htmlclass)
+        self.w(u'%s : ' % self.req._(msgid))
+        self.w(u'<a href="%s">%s</a>' % (
+            self.req.build_url('doc/'+node.attrib['resource']),
+            title(node, self.req.lang)))
+        self.w(u'</span>\n')
+        
+    def subsections_links(self, node, first=True):
+        sub = subsections(node)
+        if not sub:
+            return
+        if first:
+            self.w(u'<div class="hr"></div>')
+        self.w(u'<ul class="docsum">')
+        for child in sub:
+            self.w(u'<li><a href="%s">%s</a>' % (
+                self.req.build_url('doc/'+child.attrib['resource']),
+                title(child, self.req.lang)))
+            self.subsections_links(child, False)
+            self.w(u'</li>')
+        self.w(u'</ul>\n')
+        
+
+
+class InlineHelpImageView(StartupView):
+    __selectors__ = (req_form_params_selector,)
+    form_params = ('fid',)
+    id = 'wdocimages'
+    binary = True
+    templatable = False
+    content_type = 'image/png'
+    
+    def call(self):
+        fid = self.req.form['fid']
+        for lang in chain((self.req.lang, self.vreg.property_value('ui.language')),
+                          self.config.available_languages()):
+            rid = join('images', '%s_%s.png' % (fid, lang))
+            resourcedir = self.config.locate_doc_file(rid)
+            if resourcedir:
+                break
+        else:
+            raise NotFound
+        self.w(open(join(resourcedir, rid)).read())
+
+
+class ChangeLogView(StartupView):
+    id = 'changelog'
+    title = _('What\'s new?')
+    maxentries = 25
+    
+    def call(self):
+        rid = 'ChangeLog_%s' % (self.req.lang)
+        allentries = []
+        title = self.req._(self.title)
+        restdata = ['.. -*- coding: utf-8 -*-', '', title, '='*len(title), '']
+        w = restdata.append
+        for fpath in self.config.locate_all_files(rid):
+            cl = ChangeLog(fpath)
+            encoding = 'utf-8'
+            # additional content may be found in title
+            for line in (cl.title + cl.additional_content).splitlines():
+                m = CHARSET_DECL_RGX.search(line)
+                if m is not None:
+                    encoding = m.group(1)
+                    continue
+                elif line.startswith('.. '):
+                    w(unicode(line, encoding))
+            for entry in cl.entries:
+                if entry.date:
+                    date = strptime(entry.date, '%Y-%m-%d')
+                else:
+                    date = today()
+                messages = []
+                for msglines, submsgs in entry.messages:
+                    msgstr = unicode(' '.join(l.strip() for l in msglines), encoding)
+                    msgstr += u'\n\n'
+                    for submsglines in submsgs:
+                        msgstr += '     - ' + unicode(' '.join(l.strip() for l in submsglines), encoding)
+                        msgstr += u'\n'
+                    messages.append(msgstr)
+                entry = (date, messages)
+                allentries.insert(bisect_right(allentries, entry), entry)
+        latestdate = None
+        i = 0
+        for date, messages in reversed(allentries):
+            if latestdate != date:
+                fdate = self.format_date(date)
+                w(u'\n%s' % fdate)
+                w('~'*len(fdate))
+                latestdate = date
+            for msg in messages:
+                w(u'* %s' % msg)
+                i += 1
+                if i > self.maxentries:
+                    break
+        w('') # blank line
+        self.w(rest_publish(self, '\n'.join(restdata)))
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/wfentities.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,16 @@
+"""html view for workflow related entities
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.common.view import EntityView
+
+class CellView(EntityView):
+    id = 'cell'
+    accepts = ('TrInfo',)
+    def cell_call(self, row, col, cellvid=None):
+        entity = self.entity(row, col)
+        self.w(entity.printable_value('comment'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/xbel.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,54 @@
+"""xbel views
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+_ = unicode
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.web.views.baseviews import XmlView, EntityView
+
+
+class XbelView(XmlView):
+    id = 'xbel'
+    title = _('xbel')
+    templatable = False
+    content_type = 'text/xml' #application/xbel+xml 
+    
+    def cell_call(self, row, col):
+        self.wview('xbelitem', self.rset, row=row, col=col)
+        
+    def call(self):
+        """display a list of entities by calling their <item_vid> view"""
+        title = self.page_title()
+        url = self.build_url(rql=self.req.form.get('rql', ''))
+        self.w(u'<?xml version="1.0" encoding="%s"?>\n' % self.req.encoding)
+        self.w(u'<!DOCTYPE xbel PUBLIC "+//IDN python.org//DTD XML Bookmark Exchange Language 1.0//EN//XML" "http://www.python.org/topics/xml/dtds/xbel-1.0.dtd">')
+        self.w(u'<xbel version="1.0">')
+        self.w(u'<title>%s</title>' % self.req._('bookmarks'))
+        for i in xrange(self.rset.rowcount):
+            self.cell_call(i, 0)
+        self.w(u"</xbel>")
+    
+
+class XbelItemView(EntityView):
+    id = 'xbelitem'
+
+    def cell_call(self, row, col):
+        entity = self.complete_entity(row, col)
+        self.w(u'<bookmark href="%s">' % html_escape(self.url(entity)))
+        self.w(u'  <title>%s</title>' % html_escape(entity.dc_title()))
+        self.w(u'</bookmark>')
+
+    def url(self, entity):
+        return entity.absolute_url()
+        
+class XbelItemBookmarkView(XbelItemView):
+    accepts = ('Bookmark',)
+
+    def url(self, entity):
+        return entity.actual_url()
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/ChangeLog_en	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+.. -*- coding: utf-8 -*-
+.. _`user preferences`: myprefs#fieldset_ui
+
+2008-09-25  --  2.50.0
+    * jQuery replaces MochiKit
+    * schema inheritance support
+
+2008-05-13  --  2.48.0
+    * web pages are now served with the ``xhtml+xml`` content type
+
+2008-03-27  --  2.47.0
+    * fckeditor is now integrated to edit rich text fields. If you don't see it,
+      check your `user preferences`_.
+
+2008-03-13  --  2.46.0
+    * new calendar and timetable views.
+    
+    * click-and-edit functionalities : if you see the text edit cursor when
+      you're over a fied, try to double-click!
+      
+    * automatic facets oriented search : a filter box should appear when you're
+      looking for something and more than one entity are displayed.
+
+2008-02-15  --  2.44.0
+    * new internationalized online help system. Click the question mark on the
+      right top corner! Hopefuly some new documentation will appear as time is
+      going. 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/ChangeLog_fr	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,31 @@
+.. -*- coding: utf-8 -*-
+.. _`préférences utilisateurs`: myprefs#fieldset_ui
+
+2008-09-25  --  2.50.0
+    * jQuery remplace MochiKit
+    * support de l'héritage de schéma
+
+2008-05-13  --  2.48.0
+    * les pages sont servies en tant que ``xhtml+xml`` pour certains navigateurs
+
+2008-03-27  --  2.47.0
+    * fckeditor est enfin intégré pour éditer les champs de type texte riche. Si
+      vous ne le voyez pas apparaître, vérifiez vos `préférences utilisateurs`_.
+
+2008-03-13  --  2.46.0
+    * nouvelle vues calendrier et emploi du temps
+    
+    * fonctionalité "click-et-édite" : si vous voyez apparaitre le curseur
+      d'édition de texte en survolant un champ, essayez de double-cliquer !
+      
+    * recherche par facettes : une boîte de filtrage devrait apparaitre
+      automatiquement lorsque vous effectuez une recherche qui ramène plus d'une
+      entité
+
+2008-02-15  --  2.44.0
+    * nouveau système d'aide internationalisé. Cliquez sur le point
+      d'interrogation en haut à droite. Reste à enrichir le contenu de cette
+      documentation, mais cela devrait arriver avec le temps.
+
+
+      
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/about_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,25 @@
+About this site
+===============
+
+This web application is based on the CubicWeb knowledge management system
+from Logilab_. CubicWeb is made of an object database and a web
+framework. It allows to develop an application by defining a set of
+entities and their relationships, plus a set of views that present the
+data selected from the object database using a specific query language.
+
+This application has a precise schema_ and can be considered an
+example of a semantic web database, as it can export the data to XML
+and others formats and is not limited to publishing HTML.
+
+Supported formats: |microformats|_ - JSON_ - |rss|_ - |dublincore|_
+
+.. |microformats| image:: /data/microformats-button.png
+.. _microformats: http://microformats.org
+.. _JSON: http://www.json.org/
+.. |rss| image:: /data/rss-button.png
+.. _rss: http://www.rssboard.org
+.. |dublincore| image:: /data/dublincore-button.png
+.. _dublincore: http://dublincore.org
+
+.. _Logilab: http://www.logilab.fr/
+.. _schema: schema
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/about_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+.. -*- coding: utf-8 -*-
+
+A propos de ce site
+===================
+
+Cette application web est basée sur le système de gestion de connaissance CubicWeb de
+Logilab_. CubicWeb est composé d'une base de données objet et d'un framework web. Il
+permet de développer une application en définissant un ensemble d'entités et de
+relations entre ces entités, ainsi que des vues présentant les données
+sélectionnées depuis la base de données en utilisant un langage de requête
+spécifique.
+
+Cette application possède un schéma_ précis et peut être considérée comme un
+exemple de site web sémantique, car elle n'est pas limitée à publier du HTML
+mais peut également exporter les données en XML et d'autres formats.
+
+Formats supportés: |microformats|_ - JSON_ - |rss|_ - |dublincore|_
+
+.. |microformats| image:: /data/microformats-button.png
+.. _microformats: http://microformats.org
+.. _JSON: http://www.json.org/
+.. |rss| image:: /data/rss-button.png
+.. _rss: http://www.rssboard.org
+.. |dublincore| image:: /data/dublincore-button.png
+.. _dublincore: http://dublincore.org
+
+.. _Logilab: http://www.logilab.fr/
+.. _schéma: schema
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/add_content_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+Add some entities
+-----------------
+As manager, you can access to entity creation forms by using the `site management`_ page.
+
+.. _`site management`: manage
+
+As regular user, the index page or the action box may propose some links to create entities according to the context.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/add_content_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+Ajouter des entités
+-------------------
+Pour un administrateur, la création des objets est toujours possible directement dans la `page de gestion de site`_.
+
+.. _`page de gestion de site`: manage
+
+Pour les utilisateurs, la page principale ou la boîte d'action des entités vous permettra la création de nouveaux contenus.
+L'intérêt de la dernière méthode est de faciliter l'édition de la relation entre les objets.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/advanced_usage_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+.. winclude:: advanced_usage_schema
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/advanced_usage_schema_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+Schema of the data
+------------------
+
+First take a look at the data schema_ then try to remember that you are browsing
+through a heap of data by applying stylesheets to the results of queries. This
+site is not a content management system with items placed in folders. It is an
+interface to a database which applies a view to retreived data.
+
+.. _schema: schema
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/advanced_usage_schema_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+.. -*- coding: utf-8 -*-
+
+Schéma des données
+------------------
+
+Commencez d'abord par jeter un œil au schéma_ de l'application, puis essayez de vous rappeler que vous naviguez dans un ensemble de données en appliquant des vues aux résultats de requête. Ce site n'est pas un système de gestion de contenu avec des objets dans des répertoires. C'est une interface vers une base de données qui vous permet d'appliquer une vue aux données récupérées.
+
+.. _schéma: schema
+
+
+Relation entre les objets
+-------------------------
+
+Une relation est définie pour donner un sens précis entre les éléments. Les relations peuvent être facilement ajoutées ou détruites selon le schéma_ de votre application.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/bookmarks_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+Any authenticated user can defines its own bookmarks in the application. They
+are similar to bookmarks in a browser but are dedicated to link into the
+application and they are stored in the database (and so you'll get them from any
+browser you use to connect to the application. They are available through the
+bookmarks box, on the left of the screen.
+
+To add a bookmark on the page you're seeing, simply click on the "bookmark this
+page" link in the "manage bookmarks" dropdown-menu. You'll then be asked to give
+it a title. Once created, it will appear in the bookmarks box.
+
+In this dropdown-menu, the "pick existing bookmarks" link will show you a list
+of bookmarks already defined by the other users of the application. If you are
+interested by one of them, simply click the `[+]` sign to the left of it to add it
+to your bookmarks.
+
+The "edit bookmarks" link will show you a form to edit your current bookmarks,
+for instance to change their title. If you are using share bookmarks (picked
+from another user), those bookmarks won't appear in the form unless you're
+allowed to modify them.
+
+To remove a bookmark, simply click the `[-]` sign to the left of it.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/bookmarks_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,34 @@
+.. -*- coding: utf-8 -*-
+
+Chaque utilisateur authentifié peut définir ses propres signets dans
+l'application. Ils fonctionnent comme des signets dans un navigateur, mais sont
+restreints aux liens internes au site et sont conservés dans la base de données,
+ce qui vous permet de les retrouver quelque soit le navigateur que vous utilisez
+pour vous connecter au site. Ils sont disponibles dans la boîte des signets, à
+gauche de l'écran.
+
+Pour ajouter un signet sur la page que vous être en train de visualiser, vous
+devez cliquer sur le menu déroulant `gérer des signets`; puis sur `poser un
+signet ici` de la boîte pour faire apparaître le champ d'ajout. Il est alors
+demandé de lui donner un titre. Une fois créé, le signet apparaît dans la boîte
+des signets.
+
+Le signe `[-]` en face du signet permet sa suppression immédiate.
+
+Le lien `récupérer un signet` du menu déroulant permet de voir une liste des
+signets déja définis par les autres utilisateurs de l'application. Si l'un de
+ces signets vous intéresse, vous pouvez l'ajouter à vos signet en cliquant sur
+le signe `[+]` à gauche de celui-ci.
+
+Le lien `éditer les signets` du menu déroulant permet de modifier vos signets
+(par exemple changer leur titre) et le chemin relatif du signet. Si vous
+utilisez des signets partagés (que vous avez récupéré d'un autre utilisateur),
+ceux-ci n'apparaîtront pas dans le formulaire de modification à moins que vous
+ayez le droit de les modifier.
+
+
+Pour plus de détails sur les relations possibles, veuillez vous réferer au
+schéma_ du composant signet.
+
+.. _`schéma`: eetype/Bookmark?vid=eschema
+.. _`préférences utilisateurs`: myprefs
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+Once you have read the `RQL tutorial`_ and you know the application'`schema`_,
+you're ready to define your own views. Just type the RQL query retreiving data
+you're interested in in the search field. If the view used to display them
+doesn't fit your need, choose another one in the possible views box (this box
+isn't displayed by default, but you can activate it in your `user
+preferences`_). Once you're satified, add a `bookmark`_ if you want to reuse
+this view later.
+
+.. _`RQL tutorial`: doc/tut_rql
+.. _`schema`: schema
+.. _`user preferences`: myprefs
+.. _`bookmark`: doc/bookmarks
+
+
+Below are some example of what can be acheived...
+
+.. winclude:: custom_view_last_update
+.. winclude:: custom_view_rss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,21 @@
+.. -*- coding: utf-8 -*-
+
+Une fois que vous avez lu le `tutoriel RQL`_ et que vous connaissez le `schéma`_
+de l'application, vous êtes prêt à définir vos propres vues. Tapez simplement
+dans le champ de recherche la requête qui vous permet de récupérer les données
+qui vous intéressent. Si la vue appliquée par défaut ne correspond pas à vos
+besoins, sélectionnez en une autre dans la boîte des vues possibles (celle-ci
+n'est pas affichée par défaut, vous pouvez l'activer dans vos `préférences
+utilisateur`_). Une fois que vous êtes satisfait, ajoutez un `signet`_ si vous
+voulez réutiliser votre vue plus tard.
+
+.. _`tutoriel RQL`: doc/tut_rql
+.. _`schéma`: schema
+.. _`préférences utilisateur`: myprefs
+.. _`signet`: doc/bookmarks
+
+
+Ci-dessous quelques exemples de ce que l'on peut faire...
+
+.. winclude:: custom_view_last_update
+.. winclude:: custom_view_rss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_last_update_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,8 @@
+Latest changes 
+--------------
+
+* table of `all latest changes`_
+
+Links below is providing useful RQL query example.
+
+.. _all latest changes: view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_last_update_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+.. -*- coding: utf-8 -*-
+ 
+Dernières modifications
+-----------------------
+
+* la table des `derniers changements`_ fournit un exemple d'utilisation de RQL
+  pour récupérer les derniers changements ayant eu lieu sur ce site.
+
+.. _`derniers changements`: view?rql=Any+M%2CX+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_rss_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+
+.. role:: raw-html(raw)
+   :format: html
+
+RSS driven
+----------
+
+RSS is a pretty useful technology that can be widely used on this
+site. Any set of data can be presented as RSS. You can then plug in
+an RSS reader into that and follow the site activity. For example :
+
+:raw-html:`<p><a class="reference"
+href="view?vid=rss&amp;rql=Any+X%2CM+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30"><img
+alt="rss" src="data/rss.png"> latest changes</a></p>`
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/custom_view_rss_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+.. -*- coding: utf-8 -*-
+
+.. role:: raw-html(raw)
+   :format: html
+ 
+Flux RSS
+--------
+
+RSS est une technologie très utile qui peut être utilisée de manière très
+générique sur ce site. N'importe quel résultat de requête peut-être présenté
+comme un flux RSS. Vous pouvez donc ensuite connecter ce flux à n'importe quel
+lecteur RSS pour suivre l'activité de ce cite. Par exemple pour avoir les
+derniers changements sous forme de flux RSS:
+
+:raw-html:`<p><a class="reference"
+href="view?vid=rss&amp;rql=Any+X%2CM+WHERE+X+modification_date+M+ORDERBY+M+DESC+LIMIT+30"><img
+alt="rss" src="data/rss.png"> latest changes</a></p>`
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/glossary_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+action box
+    Area visible in the upper left column. You have a list of available actions on the entity. The most frequently used entry is `modify`.
+
+object
+    All element created in the application
+    Example: project, ticket, user, ...
+
+relation editing module
+    HTML widget that let you define new relations amongst objects.
+
+relation
+    It's a kind of 'smart' link between 2 objets of the application. It has a specific sense that determine dynamic behaviour and add a new logic of the content.
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/glossary_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+boîte des actions
+    boîte visible dans la colonne de gauche en haut à gauche de l'écran. Cette boîte vous permet d'accéder aux actions disponibles pour cette entité. L'entrée la plus utilisée est `modifier`.
+
+module d'édition des relations entre objets
+    module HTML qui permet l'édition des relations entre objects.
+
+objet
+    Tout élement qui peut être créé au sein de l'application
+    Exemple: projet, ticket, étiquette, utilisateur, ...
+
+relation
+    Une relation est un lien 'intelligent' et bi-directionnel entre 2 objets de l'application. Il est intelligent au sens où il porte un sens et permet de définir des comportements dynamiques à l'application et ajouter une logique métier au contenu.
+
+
Binary file web/wdoc/images/userprefs_en.png has changed
Binary file web/wdoc/images/userprefs_fr.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/main_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+.. winclude:: about
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/search_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+
+.. winclude:: search_sample_queries
+
+You can as well type complex queries using the RQL_ query language, 
+used every where to build dynamic pages of this site.
+
+You can use one of the following prefixes to specify which kind of search you
+want: 
+
+* `rql` : RQL query
+* `text` : full text search
+
+.. _RQL: doc/tut_rql
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/search_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+.. -*- coding: utf-8 -*-
+
+.. winclude:: search_sample_queries
+
+Vous pouvez également taper des requêtes complexes en utilisant le langage de
+requête RQL_, base sur laquelle ce site est construit.
+
+Vous pouvez préfixer votre recherche des mots clés suivants pour indiquer le
+type de recherche que vous désirez :
+
+* `rql` : requête RQL
+* `text` : recherche plein texte
+
+.. _RQL: doc/tut_rql
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/search_sample_queries_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+Experiment with the search bar. Try queries like "card sometextualcontent" or
+"card wikiid index" or "345".
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/search_sample_queries_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+Essayer le champ de recherche. Essayer des recherches comme "fiche
+unmotachercher", ou encore "fiche wikiid index" ou "345".
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/standard_usage_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+First of all, you can use this site as any web site by clicking on the
+different links. The Logo on the top left of this page will lead you
+to a start page from which you will be able to navigate to all the
+data hosted on this site.
+
+The bookmarks box on the left hand side provides some useful
+shortcuts.
+
+Most text is indexed and you can search all the content by typing
+words in the search box.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/standard_usage_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,11 @@
+.. -*- coding: utf-8 -*-
+
+Plusieurs éléments par défaut sont proposés pour faciliter la navigation:
+
+- le logo en haut de la page vous ramène à la page d'accueil du site qui fournit un point de départ pour la navigation vers les données de ce site.
+
+- la boîte de signet à gauche fournit des raccourcis utiles.
+
+- la notion d'étiquette vous permet de marquer de manière subjective les pages à se souvenir
+
+- le contenu textuel des entités est indexé et vous pouvez donc rechercher des entités en tapant simplement les mots à rechercher dans la boîte de recherche.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/toc.xml	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,54 @@
+<toc resource="main">
+  <title xml:lang="en">Site documentation</title>
+  <title xml:lang="fr">Documentation du site</title>
+
+  <section resource="standard_usage">
+    <title xml:lang="en">Standard use</title>
+    <title xml:lang="fr">Utilisation standard</title>
+
+    <section resource="bookmarks">
+      <title xml:lang="en">Using bookmarks</title>
+      <title xml:lang="fr">Utilisation des signets</title>
+    </section>
+
+    <section resource="search">
+      <title xml:lang="en">Search</title>
+      <title xml:lang="fr">Recherche</title>
+    </section>
+
+    <section resource="userprefs">
+      <title xml:lang="en">User preferences</title>
+      <title xml:lang="fr">Préférences utilisateur</title>
+    </section>
+
+    <section resource="add_content">
+      <title xml:lang="en">Add content to the site</title>
+      <title xml:lang="fr">Ajouter du contenu au site</title>
+    </section>
+  </section>
+
+  <section resource="advanced_usage">
+    <title xml:lang="en">Advanced use</title>
+    <title xml:lang="fr">Utilisation avancée</title>
+
+    <section resource="tut_rql">
+      <title xml:lang="en">"Relation Query Language" tutorial</title>
+      <title xml:lang="fr">Tutoriel "Relation Query Language"</title>
+    </section>
+
+    <section resource="custom_view">
+      <title xml:lang="en">Defining your own views</title>
+      <title xml:lang="fr">Définir ses propres vues</title>
+    </section>
+  </section>
+
+  <section resource="glossary">
+    <title xml:lang="en">Glossary</title>
+    <title xml:lang="fr">Glossaire</title>
+  </section>
+
+  <section resource="about">
+    <title xml:lang="fr">A propos de ce site</title>
+    <title xml:lang="en">About this site</title>
+  </section>
+</toc>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/tut_rql_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,258 @@
+.. -*- coding: utf-8 -*-
+
+Let's learn RQL by practice...
+
+.. contents::
+
+Introduction
+------------
+
+RQL has a syntax and principle inspirated from the SQL query language, though
+it's at a higher level in order to be more intuitive and suitable to easily make
+advanced queries on a schema based database.
+
+* the main idea in RQL is that we'are following relations between entities
+* attributes are a special case of relations
+* RQL has been inspirated from SQL but is at a higher level
+* a knowledge of the application'schema is necessary to build rql queries
+
+To use RQL, you'll have to know the basis of the language as well as a good
+knowledge of the application'schema. You can always view it using the "schema"
+link in user's dropdown menu (on the top-right of the screen) or by clicking here_.
+
+.. _here: schema
+
+
+Some bits of théory
+-------------------
+
+Variables et types
+~~~~~~~~~~~~~~~~~~
+Entities and attributes'value to follow and / or select are represented by the
+query by *variables* which must be written upper-case.
+
+Possible types for each variable are deducted from the schema according to
+constraints in the query.
+
+You can explicitly constrain a variable's type using the special relation "is".
+
+Base types
+~~~~~~~~~~
+* `String` (literal: between double or simple quotes)
+* `Int`, `Float` (using '.' as separator)
+* `Date`, `Datetime`, `Time` (literal: string YYYY/MM/DD[ hh:mm] or
+  `TODAY` and `NOW` keywords)
+* `Boolean` (keywords `TRUE` et `FALSE`)
+* keyword `NULL`
+
+Opérateurs
+~~~~~~~~~~
+* Logical operators : `AND`, `OR`, `,`
+* Mathematical operators: `+`, `-`, `*`, `/`
+* Comparisons operators: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN`
+
+  * `=` is the default comparison operator
+
+  * `LIKE` / `~=` permits use of the special character `%` in a string to tell
+    the string must begin or end with a prefix or suffix (as SQL LIKE operator) ::
+    
+      Any X WHERE X name ~= 'Th%'
+      Any X WHERE X name LIKE '%lt'
+
+  * `IN` permits to give a list of possible values ::
+
+      Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault')
+
+Grammaire des requêtes de recherche
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+::
+
+  [DISTINCT] <entity type> V1(, V2)*
+  [GROUPBY V1(, V2)*]  [ORDERBY <orderterms>]
+  [WHERE <restriction>] 
+  [LIMIT <value>] [OFFSET <value>]
+
+:entity type:
+  Type of the selected variable(s). You'll usually use `Any` type to not specify
+  any type.
+:restriction:
+  List of relations to follow, in the form
+    `V1 relation V2|<constant value>`
+:orderterms:
+  Define a selection order : variable or column number, followed by the sort method
+  (`ASC`, `DESC`), with ASC as default when not specified
+
+notice about grouped query (e.g using a `GROUPBY` clause): every selected
+variable should be either grouped or used in an aggregat function.
+
+
+Example schema
+--------------
+
+In this document, we will suppose the application's schema is the one described
+here. Available entity types are :
+
+:Person:
+  ::
+
+	name      (String, required) 
+	birthday (Date)
+
+
+:Company:
+  ::
+
+	name   (String)
+
+
+:Note:
+  ::
+
+	diem (Date)
+	type (String)
+
+
+And relations between those entities: ::
+
+	Person  works_for    Company
+	Person  evaluated_by Note
+	Company evaluated_by Note
+
+
+Meta-data
+~~~~~~~~~
+Every entities'type have the following meta-data:
+
+* `eid (Int)`, a unique identifier
+* `creation_date (Datetime)`, date on which the entity has been created
+* `modification_date (Datetime)`, lastest date on which the entity has been modified
+
+* `created_by (EUser)`, relation to the user which has created this entity
+
+* `owned_by (EUser)`, relation to the user()s considered as owner of this
+  entity, the entity's creator by default
+
+* `is (Eetype)`, special relation to specify a variable type.
+
+A user's entity has the following schema:
+
+:EUser:
+  ::
+
+	login  	  (String) not null
+	password  (Password)
+	firstname (String)
+	surname   (String)
+
+
+Basis queries
+-------------
+0. *Every persons* ::
+   
+      Person X
+
+   or ::
+
+      Any X WHERE X is Person
+
+
+1. *The company named Logilab* ::
+
+      Company S WHERE S name 'Logilab'
+
+
+2. *Every entities with a "name" attribute whose value starts with 'Log'* ::
+
+      Any S WHERE S name LIKE 'Log%'
+
+   or ::
+
+      Any S WHERE S name ~= 'Log%'
+
+   This query may return Person or Company entities.
+
+
+3. *Every persons working for the Logilab company* ::
+
+      Person P WHERE P works_for S, S name "Logilab"
+
+   or ::
+
+      Person P WHERE P works_for S AND S name "Logilab"
+
+
+4. *Company named Caesium ou Logilab* ::
+
+      Company S WHERE S name IN ('Logilab','Caesium')
+
+   or ::
+
+      Company S WHERE S name 'Logilab' OR S name 'Caesium'
+
+
+5. *Every company but ones named Caesium ou Logilab* ::
+
+      Company S WHERE NOT S name IN ('Logilab','Caesium')
+
+   or ::
+
+      Company S WHERE NOT S name 'Logilab' AND NOT S name 'Caesium'
+
+
+6. *Entities evaluated by the note with eid 43* ::
+
+      Any X WHERE X evaluated_by N, N eid 43
+
+
+7. *Every persons order by birthday from the youngest to the oldest* ::
+   
+      Person X ORDERBY D DESC WHERE X birthday D
+
+   Notice you've to define a variable using the birthday relation to use it in the
+   sort term. 
+
+
+8. *Number of persons working for each known company* ::
+   
+      Any S, COUNT(X) GROUPBY S WHERE X works_for S
+
+   Notice you've that since you're writing a grouped query on S, X have to be
+   either grouped as well or used in an aggregat function (as in this example).
+
+
+   
+Advanced
+--------
+0. *Person with no name specified (i.e NULL)* ::
+
+      Person P WHERE P name NULL
+
+
+1. *Person which are not working for any company* ::
+
+      Person P WHERE NOT p works_for S
+
+
+2. *Every company where person named toto isn't working* ::
+
+      Company S WHERE NOT P works_for S , P name 'toto'
+
+
+3. *Every entity which have been modified between today and yesterday* ::
+
+      Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1
+
+
+4. *Every note without type, to be done in the next 7 days, ordered by date* ::
+
+      Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY,
+      N diem < today + 7 ORDERBY D
+
+
+5. *Person with an homonym (without duplicate)* ::
+
+      DISTINCT Person X,Y where X name NX, Y name NX
+
+   or even better (e.g. without both (Xeid, Yeid) and (Yeid, Xeid) in the results) ::
+
+      Person X,Y where X name NX, Y name NX, X eid XE, Y eid > XE
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/tut_rql_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,268 @@
+.. -*- coding: utf-8 -*-
+
+Apprenons RQL par la pratique...
+
+.. contents::
+
+
+Introduction
+------------
+
+RQL est assez proche par sa syntaxe et ses principes du langage de requête des
+bases de données relationnelles SQL. Il est cependant plus intuitif et mieux
+adapté pour faire des recherches avancées sur des bases de données structurées
+par un schéma de données. On retiendra les points suivants :
+
+* RQL est un langage mettant l'accent sur le parcours de relations.
+* Les attributs sont considérés comme des cas particuliers de relations.
+* RQL s'inspire de SQL mais se veut plus haut niveau.
+* Une connaissance du schéma définissant l'application est nécessaire.
+
+Pour s'en servir, il convient de connaître les règles de base du langage RQL,
+mais surtout d'avoir une bonne vision du schéma de données de l'application. Ce
+schéma est toujours disponible dans l'application par le lien `schéma`, dans la
+boîte affichée en cliquant sur le lien de l'utilisateur connectée (en haut à droite).
+Vous pouvez également le voir en cliquant ici_.
+
+.. _ici: schema
+
+
+Un peu de théorie
+-----------------
+
+Variables et typage
+~~~~~~~~~~~~~~~~~~~
+Les entités et valeurs à parcourir et / ou séléctionner sont représentées dans
+la requête par des *variables* qui doivent être écrites en majuscule
+
+Les types possibles pour chaque variable sont déduits à partir du schéma en
+fonction des contraintes présentes dans la requête.
+
+On peut contraindre les types possibles pour une variable à l'aide de la
+relation spéciale `is`.
+
+Types de bases
+~~~~~~~~~~~~~~
+* `String` (litéral: entre doubles ou simples quotes)
+* `Int`, `Float` (le séparateur étant le '.')
+* `Date`, `Datetime`, `Time` (litéral: chaîne YYYY/MM/DD[ hh:mm] ou mots-clés
+  `TODAY` et `NOW`)
+* `Boolean` (mots-clés `TRUE` et `FALSE`)
+* mot-clé `NULL`
+
+Opérateurs
+~~~~~~~~~~
+* Opérateurs logiques : `AND`, `OR`, `,`
+* Opérateurs mathématiques: `+`, `-`, `*`, `/`
+* Operateur de comparaisons: `=`, `<`, `<=`, `>=`, `>`, `~=`, `LIKE`, `IN`
+
+  * L'opérateur `=` est l'opérateur par défaut
+
+  * L'opérateur `LIKE` / `~=` permet d'utiliser le caractère `%` dans une chaine
+    de caractère pour indiquer que la chaîne doit commencer ou terminer par un
+    préfix/suffixe ::
+    
+      Any X WHERE X nom ~= 'Th%'
+      Any X WHERE X nom LIKE '%lt'
+
+  * L'opérateur `IN` permet de donner une liste de valeurs possibles ::
+
+      Any X WHERE X nom IN ('chauvat', 'fayolle', 'di mascio', 'thenault')
+
+Grammaire des requêtes de recherche
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+::
+
+  [DISTINCT] <type d'entité> V1(, V2)*
+  [GROUPBY V1(, V2)*]  [ORDERBY <orderterms>]
+  [WHERE <restriction>] 
+  [LIMIT <value>] [OFFSET <value>]
+
+:type d'entité:
+  Type de la ou des variables séléctionnées. 
+  Le type spécial `Any`, revient à ne pas spécifier de type.
+:restriction:
+  liste des relations à parcourir sous la forme 
+    `V1 relation V2|<valeur constante>`
+:orderterms:
+  Définition de l'ordre de sélection : variable ou n° de colonne suivie de la
+  méthode de tri (`ASC`, `DESC`), ASC étant la valeur par défaut
+
+note pour les requêtes groupées (i.e. avec une clause `GROUPBY`) :
+toutes les variables sélectionnées doivent être soit groupée soit
+aggrégée
+
+
+Schéma
+------
+
+Nous supposerons dans la suite de ce document que le schéma de l'application est
+le suivant. Les différentes entités disponibles sont :
+
+:Personne:
+  ::
+
+	nom    (String, obligatoire) 
+	datenaiss (Date)
+
+
+:Societe:
+  ::
+
+	nom   (String)
+
+
+:Note:
+  ::
+
+	diem (Date)
+	type (String)
+
+
+Et les relations entre elles : ::
+
+	Person  travaille_pour Societe
+	Person  evaluee_par    Note
+	Societe evaluee_par    Note
+
+
+Méta-données
+~~~~~~~~~~~~
+Tous les types d'entités ont les métadonnées suivantes :
+
+* `eid (Int)`, permettant d'identifier chaque instance de manière unique
+* `creation_date (Datetime)`, date de création de l'entité
+* `modification_date (Datetime)`, date de dernière modification de l'entité
+
+* `created_by (EUser)`, relation vers l'utilisateur ayant créé l'entité
+
+* `owned_by (EUser)`, relation vers le où les utilisateurs considérés comme 
+  propriétaire de l'entité, par défaut le créateur de l'entité
+
+* `is (Eetype)`, relation spéciale permettant de spécifier le
+  type d'une variable. 
+
+Enfin, le schéma standard d'un utilisateur est le suivant :
+
+:EUser:
+  ::
+
+	login  	  (String, obligatoire)
+	password  (Password)
+	firstname (String)
+	surname   (String)
+
+
+L'essentiel
+-----------
+0. *Toutes les personnes* ::
+   
+      Personne X
+
+   ou ::
+
+      Any X WHERE X is Personne
+
+
+1. *La societé nommé Logilab* ::
+
+     Societe S WHERE S nom 'Logilab'
+
+
+2. *Toutes les entités ayant un attribut nom commençant par 'Log'* ::
+
+     Any S WHERE S nom LIKE 'Log%'
+
+   ou ::
+
+      Any S WHERE S nom ~= 'Log%'
+
+   Cette requête peut renvoyer des entités de type personne et de type
+   société.
+
+
+3. *Toutes les personnes travaillant pour la société nommé Logilab* ::
+
+      Personne P WHERE P travaille_pour S, S nom "Logilab"
+
+   ou ::
+
+      Personne P WHERE P travaille_pour S AND S nom "Logilab"
+
+
+4. *Les societés nommées Caesium ou Logilab* ::
+
+      Societe S WHERE S nom IN ('Logilab','Caesium')
+
+   ou ::
+
+      Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'
+
+
+5. *Toutes les societés sauf celles nommées Caesium ou Logilab* ::
+
+      Societe S WHERE NOT S nom IN ('Logilab','Caesium')
+
+   ou ::
+
+      Societe S WHERE NOT S nom 'Logilab' AND NOT S nom 'Caesium'
+
+
+6. *Les entités évalués par la note d'identifiant 43* ::
+
+      Any X WHERE X evaluee_par N, N eid 43
+
+
+7. *Toutes les personnes triés par date de naissance dans l'ordre antechronologique* ::
+   
+      Personne X ORDERBY D DESC WHERE X datenaiss D
+
+   On note qu'il faut définir une variable et la séléctionner pour s'en
+   servir pour le tri. 
+
+
+8. *Nombre de personne travaillant pour chaque société* ::
+   
+      Any S, COUNT(X) GROUPBY S WHERE X travaille_pour S
+
+   On note qu'il faut définir une variable pour s'en servir pour le
+   groupage. De plus les variables séléctionnée doivent être groupée
+   (mais les variables groupées ne doivent pas forcément être sélectionnées).
+
+
+   
+Exemples avancés
+----------------
+0. *Toutes les personnes dont le champ nom n'est pas spécifié (i.e NULL)* ::
+
+      Personne P WHERE P nom NULL
+
+
+1. *Toutes les personnes ne travaillant pour aucune société* ::
+
+      Personne P WHERE NOT p travaille_pour S
+
+
+2. *Toutes les sociétés où la personne nommée toto ne travaille pas* ::
+
+      Societe S WHERE NOT P travaille_pour S , P nom 'toto'
+
+
+3. *Toutes les entités ayant été modifiées entre aujourd'hui et hier* ::
+
+      Any X WHERE X modification_date <= TODAY, X modification_date >= TODAY - 1
+
+
+4. *Toutes les notes n'ayant pas de type et à effectuer dans les 7 jours, triées par date* ::
+
+      Any N, D where N is Note, N type NULL, N diem D, N diem >= TODAY,
+      N diem < today + 7 ORDERBY D
+
+
+5. *Les personnes ayant un homonyme (sans doublons)* ::
+
+      DISTINCT Personne X,Y where X nom NX, Y nom NX
+
+   ou mieux (sans avoir (Xeid, Yeid) et (Yeid, Xeid) dans les résultats) ::
+
+      Personne X,Y where X nom NX, Y nom NX, X eid XE, Y eid > XE
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/userprefs_en.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+User's personnal information are modifiable using user's edit form. You can
+access it through the dropdown-menu under the link on the top-right of the
+window, labeled by your login. In this menu, click the "personal information"
+link to go to this form.
+
+Each user can as well customize the site appearance using the "user's
+preferences" link in this menu. This will show you a form to configure which
+boxes are displayed, in which order, etc...
+
+.. image:: doc/images/userprefs
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/wdoc/userprefs_fr.rst	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+Les données concernant l'utilisateur sont paramétrables par la page
+d'édition de l'utilisateur. Vous pouvez accéder à celle ci par le menu
+déroulant apparaissant en cliquant sur le lien en haut à droite de la
+fenêtre de l'application, dont l'intitulé est votre login. Dans ce
+menu, cliquez sur "information personnelles" pour modifier vos données
+personnelles (y compris le mot de passe d'accès à l'application).
+
+Chaque utilisateur peut également personaliser l'apparence du site via le lien
+"préférences utilisateur"_. Ce formulaire permet notamment de configurer les
+boîtes qui seront affichées, leur ordre, etc...
+
+L'administrateur possède quant à lui un menu "configuration du site" qui reprend l'ensemble des préférences utilisateurs mais les applique par défaut au site.
+
+
+Les types de préférences
+========================
+
+- navigation: détermine des caractériques plus personnelles pour l'ergonomie liée à la taille de votre écran (taille des champs d'entrées, nombre d'éléments à afficher dans des listes, ...)
+- propriétés génériques de l'interface: détermine essentiellement la localisation de l'application avec des formats d'affichages particulier (champ date et heure).
+- boîtes: éléments dynamiques et optionnels installés par les composants disponibles au sein de l'application.
+- composants: éléments permettant l'usage d'une navigation plus évoluée
+- composants contextuels: possibilité d'agir sur les comportements par défaut de l'application.
+
+Changement de la langue
+-----------------------
+Dans l'onglet **ui -> ui.language**, choisissez la langue voulue
+
+Changement de l'outil d'édition en ligne
+----------------------------------------
+Il est possible de choisir le format de balisage par défaut pour les notes. Par défaut, le format html est proposé pour les débutants avec la possibilité d'utiliser un éditeur en ligne.
+
+Si vous êtes dans ce cas, vérifiez les deux entrées suivantes:
+
+- **ui -> ui.default-text-format** à HTML
+- **ui -> ui.fckeditor** à 'yes'
+
+Usage avancé de RQL
+-------------------
+Il est possible d'afficher les requêtes RQL_ en jeu pour l'affichage d'une page en activant une barre d'entrée spécifique:
+
+- **components -> rql input box** à 'yes'
+
+Il est alors possible d'éditer et de relancer toute requête
+
+.. _"préférences utilisateur: myprefs
+.. _RQL: doc/tut_rql
+.. image:: doc/images/userprefs
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/webconfig.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,321 @@
+"""common web configuration for twisted/modpython applications
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+from os.path import join, dirname, exists
+from urlparse import urljoin
+
+from logilab.common.configuration import Method
+from logilab.common.decorators import cached
+
+from cubicweb.toolsutils import read_config
+from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options, merge_options
+
+_ = unicode
+
+register_persistent_options( (
+    # site-wide only web ui configuration
+    ('site-title',
+     {'type' : 'string', 'default': 'unset title',
+      'help': _('site title'),
+      'sitewide': True, 'group': 'ui', 
+      }),
+    ('main-template',
+     {'type' : 'string', 'default': 'main',
+      'help': _('id of main template used to render pages'),
+      'sitewide': True, 'group': 'ui',
+      }),
+    # user web ui configuration
+    ('fckeditor',
+     {'type' : 'yn', 'default': True,
+      'help': _('should html fields being edited using fckeditor (a HTML '
+                'WYSIWYG editor).  You should also select text/html as default '
+                'text format to actually get fckeditor.'),
+      'group': 'ui',
+      }),
+    # navigation configuration
+    ('page-size',
+     {'type' : 'int', 'default': 40,
+      'help': _('maximum number of objects displayed by page of results'),
+      'group': 'navigation',
+      }),
+    ('related-limit',
+     {'type' : 'int', 'default': 8,
+      'help': _('maximum number of related entities to display in the primary '
+                'view'),
+      'group': 'navigation',
+      }),
+    ('combobox-limit',
+     {'type' : 'int', 'default': 20,
+      'help': _('maximum number of entities to display in related combo box'),
+      'group': 'navigation',
+      }),
+    
+    ))
+
+
+class WebConfiguration(CubicWebConfiguration):
+    """the WebConfiguration is a singleton object handling application's
+    configuration and preferences
+    """
+    cubicweb_vobject_path = CubicWebConfiguration.cubicweb_vobject_path | set(['web/views'])
+    cube_vobject_path = CubicWebConfiguration.cube_vobject_path | set(['views'])
+    
+    options = merge_options(CubicWebConfiguration.options + (
+        ('anonymous-user',
+         {'type' : 'string',
+          'default': None,
+          'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('anonymous-password',
+         {'type' : 'string',
+          'default': None,
+          'help': 'password of the CubicWeb user account to use for anonymous user, '
+          'if anonymous-user is set',
+          'group': 'main', 'inputlevel': 1,
+          }),
+        ('query-log-file',
+         {'type' : 'string',
+          'default': None,
+          'help': 'web application query log file',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('pyro-application-id',
+         {'type' : 'string',
+          'default': Method('default_application_id'),
+          'help': 'CubicWeb application identifier in the Pyro name server',
+          'group': 'pyro-client', 'inputlevel': 1,
+          }),
+        # web configuration
+        ('https-url',
+         {'type' : 'string',
+          'default': None,
+          'help': 'web server root url on https. By specifying this option your '\
+          'site can be available as an http and https site. Authenticated users '\
+          'will in this case be authenticated and once done navigate through the '\
+          'https site. IMPORTANTE NOTE: to do this work, you should have your '\
+          'apache redirection include "https" as base url path so cubicweb can '\
+          'differentiate between http vs https access. For instance: \n'\
+          'RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]\n'\
+          'where the cubicweb web server is listening on port 8080.',
+          'group': 'main', 'inputlevel': 2,
+          }),
+        ('auth-mode',
+         {'type' : 'choice',
+          'choices' : ('cookie', 'http'),
+          'default': 'cookie',
+          'help': 'authentication mode (cookie / http)',
+          'group': 'web', 'inputlevel': 1,
+          }),
+        ('realm',
+         {'type' : 'string',
+          'default': 'cubicweb',
+          'help': 'realm to use on HTTP authentication mode',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ('http-session-time',
+         {'type' : 'int',
+          'default': 0,
+          'help': 'duration in seconds for HTTP sessions. 0 mean no expiration. '\
+          'Should be greater than RQL server\'s session-time.',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ('cleanup-session-time',
+         {'type' : 'int',
+          'default': 43200,
+          'help': 'duration in seconds for which unused connections should be '\
+          'closed, to limit memory consumption. This is different from '\
+          'http-session-time since in some cases you may have an unexpired http '\
+          'session (e.g. valid session cookie) which will trigger transparent '\
+          'creation of a new session. In other cases, sessions may never expire \
+          and cause memory leak. Should be smaller than http-session-time, '\
+          'unless it\'s 0. Default to 12 h.',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ('cleanup-anonymous-session-time',
+         {'type' : 'int',
+          'default': 120,
+          'help': 'Same as cleanup-session-time but specific to anonymous '\
+          'sessions. Default to 2 min.',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ('embed-allowed',
+         {'type' : 'regexp',
+          'default': None,
+          'help': 'regular expression matching URLs that may be embeded. \
+leave it blank if you don\'t want the embedding feature, or set it to ".*" \
+if you want to allow everything',
+          'group': 'web', 'inputlevel': 1,
+          }),
+        ('submit-url',
+         {'type' : 'string',
+          'default': Method('default_submit_url'),
+          'help': ('URL that may be used to report bug in this application '
+                   'by direct access to the project\'s (jpl) tracker, '
+                   'if you want this feature on. The url should looks like '
+                   'http://mytracker.com/view?__linkto=concerns:1234:subject&etype=Ticket&type=bug&vid=creation '
+                   'where 1234 should be replaced by the eid of your project in '
+                   'the tracker. If you have no idea about what I\'am talking '
+                   'about, you should probably let no value for this option.'),
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ('submit-mail',
+         {'type' : 'string',
+          'default': None,
+          'help': ('Mail used as recipient to report bug in this application, '
+                   'if you want this feature on'),
+          'group': 'web', 'inputlevel': 2,
+          }),
+
+        ('language-negociation',
+         {'type' : 'yn',
+          'default': True,
+          'help': 'use Accept-Language http header to try to set user '\
+          'interface\'s language according to browser defined preferences',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        
+        ('print-traceback',
+         {'type' : 'yn',
+          'default': not CubicWebConfiguration.mode == 'installed',
+          'help': 'print the traceback on the error page when an error occured',
+          'group': 'web', 'inputlevel': 2,
+          }),
+        ))
+
+    def default_submit_url(self):
+        try:
+            cube = self.cubes()[0]
+            cubeeid = self.cube_pkginfo(cube).cube_eid
+        except Exception, ex:
+            return None
+        if cubeeid:
+            return 'http://intranet.logilab.fr/jpl/view?__linkto=concerns:%s:subject&etype=Ticket&type=bug&vid=creation' % cubeeid
+        return None
+
+    # method used to connect to the repository: 'inmemory' / 'pyro'
+    # Pyro repository by default
+    repo_method = 'pyro'
+    
+    # don't use @cached: we want to be able to disable it while this must still
+    # be cached
+    def repository(self, vreg=None):
+        """return the application's repository object"""
+        try:
+            return self.__repo
+        except AttributeError:
+            from cubicweb.dbapi import get_repository
+            if self.repo_method == 'inmemory':
+                repo = get_repository('inmemory', vreg=vreg, config=self)
+            else:
+                repo = get_repository('pyro', self['pyro-application-id'],
+                                      config=self)
+            self.__repo = repo
+            return repo
+
+    def vc_config(self):
+        return self.repository().get_versions()
+    
+    # mapping to external resources (id -> path) (`external_resources` file) ##
+    ext_resources = {
+        'FAVICON':  'DATADIR/favicon.ico',
+        'LOGO':     'DATADIR/logo.png',
+        'RSS_LOGO': 'DATADIR/rss.png',
+        'HELP':     'DATADIR/help.png',
+        'CALENDAR_ICON': 'DATADIR/calendar.gif',
+        'SEARCH_GO':'DATADIR/go.png',
+        
+        'FCKEDITOR_PATH':  '/usr/share/fckeditor/',
+        
+        'IE_STYLESHEETS':    ['DATADIR/cubicweb.ie.css'],
+        'STYLESHEETS':       ['DATADIR/cubicweb.css'],
+        'STYLESHEETS_PRINT': ['DATADIR/cubicweb.print.css'],
+        
+        'JAVASCRIPTS':       ['DATADIR/jquery.js',
+                              'DATADIR/cubicweb.compat.js',
+                              'DATADIR/jquery.json.js',
+                              'DATADIR/cubicweb.python.js',
+                              'DATADIR/cubicweb.htmlhelpers.js'],
+        }
+        
+        
+    def anonymous_user(self):
+        """return a login and password to use for anonymous users. None
+        may be returned for both if anonymous connections are not allowed
+        """
+        try:
+            user = self['anonymous-user']
+            passwd = self['anonymous-password']
+        except KeyError:
+            user, passwd = None, None
+        if user is not None:
+            user = unicode(user)
+        return user, passwd
+    
+    def has_resource(self, rid):
+        """return true if an external resource is defined"""
+        return bool(self.ext_resources.get(rid))
+
+    @cached
+    def locate_resource(self, rid):
+        """return the directory where the given resource may be found"""
+        return self._fs_locate(rid, 'data')
+            
+    @cached
+    def locate_doc_file(self, fname):
+        """return the directory where the given resource may be found"""
+        return self._fs_locate(fname, 'wdoc')
+            
+    def _fs_locate(self, rid, rdirectory):
+        """return the directory where the given resource may be found"""
+        path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())]
+        for directory in path:
+            if exists(join(directory, rdirectory, rid)):
+                return join(directory, rdirectory)
+            
+    def locate_all_files(self, rid, rdirectory='wdoc'):
+        """return all files corresponding to the given resource"""
+        path = [self.apphome] + self.cubes_path() + [join(self.shared_dir())]
+        for directory in path:
+            fpath = join(directory, rdirectory, rid)
+            if exists(fpath):
+                yield join(fpath)
+
+    def load_configuration(self):
+        """load application's configuration files"""
+        super(WebConfiguration, self).load_configuration()
+        # load external resources definition
+        self._build_ext_resources()
+        self._init_base_url()
+        
+    def _init_base_url(self):
+        # normalize base url(s)
+        baseurl = self['base-url']
+        if baseurl and baseurl[-1] != '/':
+            baseurl += '/'
+            self.global_set_option('base-url', baseurl)
+        httpsurl = self['https-url']
+        if httpsurl and httpsurl[-1] != '/':
+            httpsurl += '/'
+            self.global_set_option('https-url', httpsurl)
+
+    def _build_ext_resources(self):
+        libresourcesfile = join(self.shared_dir(), 'data', 'external_resources')
+        self.ext_resources.update(read_config(libresourcesfile))
+        for path in reversed([self.apphome] + self.cubes_path()):
+            resourcesfile = join(path, 'data', 'external_resources')
+            if exists(resourcesfile):
+                self.debug('loading %s', resourcesfile)
+                self.ext_resources.update(read_config(resourcesfile))
+        for resource in ('STYLESHEETS', 'STYLESHEETS_PRINT',
+                         'IE_STYLESHEETS', 'JAVASCRIPTS'):
+            val = self.ext_resources[resource]
+            if isinstance(val, str):
+                files = [w.strip() for w in val.split(',') if w.strip()]
+                self.ext_resources[resource] = files
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/webctl.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+"""cubicweb-ctl commands and command handlers common to twisted/modpython
+web configuration
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.toolsutils import CommandHandler
+
+
+class WebCreateHandler(CommandHandler):
+    cmdname = 'create'
+    
+    def bootstrap(self, cubes, inputlevel=0):
+        """bootstrap this configuration"""
+        print '** generic web configuration'
+        config = self.config
+        if config.repo_method == 'pyro':
+            print
+            print '** repository server configuration'
+            print '-' * 72
+            config.input_config('pyro-client', inputlevel)
+    
+    def postcreate(self):
+        """hooks called once application's initialization has been completed"""
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/widgets.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,981 @@
+"""widgets for entity edition
+
+those are in cubicweb.common since we need to know available widgets at schema
+serialization time
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from simplejson import dumps
+from mx.DateTime import now, today
+
+from logilab.mtconverter import html_escape
+
+from yams.constraints import SizeConstraint, StaticVocabularyConstraint
+
+from cubicweb.common.uilib import toggle_action
+from cubicweb.web import INTERNAL_FIELD_VALUE, eid_param
+
+def _format_attrs(kwattrs):
+    """kwattrs is the dictionary of the html attributes available for
+    the edited element
+    """
+    # sort for predictability (required for tests)
+    return u' '.join(sorted(u'%s="%s"' % item for item in kwattrs.iteritems()))
+
+def _value_from_values(values):
+    # take care, value may be 0, 0.0...
+    if values:
+        value = values[0]
+        if value is None:
+            value = u''
+    else:
+        value = u''
+    return value
+
+def _eclass_eschema(eschema_or_eclass):
+    try:
+        return eschema_or_eclass, eschema_or_eclass.e_schema
+    except AttributeError:
+        return None, eschema_or_eclass
+
+def checkbox(name, value, attrs='', checked=None):
+    if checked is None:
+        checked = value
+    checked = checked and 'checked="checked"' or ''
+    return u'<input type="checkbox" name="%s" value="%s" %s %s />' % (
+        name, value, checked, attrs)
+
+def widget(vreg, subjschema, rschema, objschema, role='object'):
+    """get a widget to edit the given relation"""
+    if rschema == 'eid':
+        # return HiddenWidget(vreg, subjschema, rschema, objschema)
+        return EidWidget(vreg, _eclass_eschema(subjschema)[1], rschema, objschema)
+    return widget_factory(vreg, subjschema, rschema, objschema, role=role)
+
+
+class Widget(object):
+    """abstract widget class"""
+    need_multipart = False
+    # generate the "id" attribute with the same value as the "name" (html) attribute
+    autoid = True
+    html_attributes = set(('id', 'class', 'tabindex', 'accesskey', 'onchange', 'onkeypress'))
+    cubicwebns_attributes = set()
+    
+    def __init__(self, vreg, subjschema, rschema, objschema,
+                 role='subject', description=None,
+                 **kwattrs):
+        self.vreg = vreg
+        self.rschema = rschema
+        self.subjtype = subjschema
+        self.objtype = objschema
+        self.role = role
+        self.name = rschema.type
+        self.description = description
+        self.attrs = kwattrs
+        # XXX accesskey may not be unique
+        kwattrs['accesskey'] = self.name[0]
+
+    def copy(self):
+        """shallow copy (useful when you need to modify self.attrs
+        because widget instances are cached)
+        """
+        # brute force copy (subclasses don't have the
+        # same __init__ prototype) 
+        widget = self.__new__(self.__class__)
+        widget.__dict__ = dict(self.__dict__)
+        widget.attrs = dict(widget.attrs)
+        return widget
+    
+    @staticmethod
+    def size_constraint_attrs(attrs, maxsize):
+        """set html attributes in the attrs dict to consider maxsize"""
+        pass
+
+    def format_attrs(self):
+        """return a string with html attributes available for the edit input"""
+        # sort for predictability (required for tests)
+        attrs = []
+        for name, value in self.attrs.iteritems():
+            # namespace attributes have priority over standard xhtml ones
+            if name in self.cubicwebns_attributes:
+                attrs.append(u'cubicweb:%s="%s"' % (name, value))
+            elif name in self.html_attributes:
+                attrs.append(u'%s="%s"' % (name, value))
+        return u' '.join(sorted(attrs))
+        
+    def required(self, entity):
+        """indicates if the widget needs a value to be filled in"""
+        card = self.rschema.cardinality(self.subjtype, self.objtype, self.role)
+        return card in '1+'
+
+    def input_id(self, entity):
+        try:
+            return self.rname
+        except AttributeError:
+            return eid_param(self.name, entity.eid)
+    
+    def render_label(self, entity, label=None):
+        """render widget's label"""
+        label = label or self.rschema.display_name(entity.req, self.role)
+        forid = self.input_id(entity)
+        if forid:
+            forattr =  ' for="%s"' % forid
+        else:
+            forattr = ''
+        if self.required(entity):
+            label = u'<label class="required"%s>%s</label>' % (forattr, label)
+        else:
+            label = u'<label%s>%s</label>' % (forattr, label)
+        return label
+    
+    def render_error(self, entity):
+        """return validation error for widget's field of the given entity, if
+        any
+        """
+        errex = entity.req.data.get('formerrors')
+        if errex and errex.eid == entity.eid and self.name in errex.errors:
+            entity.req.data['displayederrors'].add(self.name)
+            return u'<span class="error">%s</span>' % errex.errors[self.name]
+        return u''
+
+    def render_help(self, entity):
+        """render a help message about the (edited) field"""
+        req = entity.req
+        help = [u'<br/>']
+        descr = self.description or self.rschema.rproperty(self.subjtype, self.objtype, 'description')
+        if descr:
+            help.append(u'<span class="helper">%s</span>' % req._(descr))
+        example = self.render_example(req)
+        if example:
+            help.append(u'<span class="helper">(%s: %s)</span>'
+                        % (req._('sample format'), example))
+        return u'&nbsp;'.join(help)
+    
+    def render_example(self, req):
+        return u''
+        
+    def render(self, entity):
+        """render the widget for a simple view"""
+        if not entity.has_eid():
+            return u''
+        return entity.printable_value(self.name)
+    
+    def edit_render(self, entity, tabindex=None,
+                    includehelp=False, useid=None, **kwargs):
+        """render the widget for edition"""
+        # this is necessary to handle multiple edition
+        self.rname = eid_param(self.name, entity.eid)
+        if useid:
+            self.attrs['id'] = useid
+        elif self.autoid:
+            self.attrs['id'] = self.rname
+        if tabindex is not None:
+            self.attrs['tabindex'] = tabindex
+        else:
+            self.attrs['tabindex'] = entity.req.next_tabindex()
+        output = self._edit_render(entity, **kwargs)
+        if includehelp:
+            output += self.render_help(entity)
+        return output
+    
+    def _edit_render(self, entity):
+        """do the actual job to render the widget for edition"""
+        raise NotImplementedError
+
+    def current_values(self, entity):
+        """return the value of the field associated to this widget on the given
+        entity. always return a list of values, which'll have size equal to 1
+        if the field is monovalued (like all attribute fields, but not all non
+        final relation fields
+        """
+        if self.rschema.is_final():
+            return entity.attribute_values(self.name)
+        elif entity.has_eid():
+            return [row[0] for row in entity.related(self.name, self.role)]
+        return ()
+            
+    def current_value(self, entity):
+        return _value_from_values(self.current_values(entity))
+
+    def current_display_values(self, entity):
+        """same as .current_values but consider values stored in session in case
+        of validation error
+        """
+        values = entity.req.data.get('formvalues')
+        if values is None:
+            return self.current_values(entity)
+        cdvalues = values.get(self.rname)
+        if cdvalues is None:
+            return self.current_values(entity)
+        if not isinstance(cdvalues, (list, tuple)):
+            cdvalues = (cdvalues,)
+        return cdvalues
+    
+    def current_display_value(self, entity):
+        """same as .current_value but consider values stored in session in case
+        of validation error
+        """
+        return _value_from_values(self.current_display_values(entity))
+    
+    def hidden_input(self, entity, qvalue):
+        """return an hidden field which
+        1. indicates that a field is edited
+        2. hold the old value to easily detect if the field has been modified
+
+        `qvalue` is the html quoted old value
+        """
+        if self.role == 'subject':
+            editmark = 'edits'
+        else:
+            editmark = 'edito'
+        if qvalue is None or not entity.has_eid():
+            qvalue = INTERNAL_FIELD_VALUE
+        return u'<input type="hidden" name="%s-%s" value="%s"/>\n' % (
+            editmark, self.rname, qvalue)
+
+class InputWidget(Widget):
+    """abstract class for input generating a <input> tag"""
+    input_type = None
+    html_attributes = Widget.html_attributes | set(('type', 'name', 'value'))
+
+    def _edit_render(self, entity):
+        value = self.current_value(entity)
+        dvalue = self.current_display_value(entity)
+        if isinstance(value, basestring):
+            value = html_escape(value)
+        if isinstance(dvalue, basestring):
+            dvalue = html_escape(dvalue)
+        return u'%s<input type="%s" name="%s" value="%s" %s/>' % (
+            self.hidden_input(entity, value), self.input_type,
+            self.rname, dvalue, self.format_attrs())
+
+class HiddenWidget(InputWidget):
+    input_type = 'hidden'
+    autoid = False
+    def __init__(self, vreg, subjschema, rschema, objschema,
+                 role='subject', **kwattrs):
+        InputWidget.__init__(self, vreg, subjschema, rschema, objschema,
+                             role='subject', 
+                             **kwattrs)
+        # disable access key
+        del self.attrs['accesskey']
+
+    def current_value(self, entity):
+        value = InputWidget.current_value(self, entity)
+        return value or INTERNAL_FIELD_VALUE
+
+    def current_display_value(self, entity):
+        value = InputWidget.current_display_value(self, entity)
+        return value or INTERNAL_FIELD_VALUE
+    
+    def render_label(self, entity, label=None):
+        """render widget's label"""
+        return u''
+    
+    def render_help(self, entity):
+        return u''
+    
+    def hidden_input(self, entity, value):
+        """no hidden input for hidden input"""
+        return ''
+    
+
+class EidWidget(HiddenWidget):
+
+    def _edit_render(self, entity):
+        return u'<input type="hidden" name="eid" value="%s" />' % entity.eid
+
+
+class StringWidget(InputWidget):
+    input_type = 'text'
+    html_attributes = InputWidget.html_attributes | set(('size', 'maxlength'))
+    @staticmethod
+    def size_constraint_attrs(attrs, maxsize):
+        """set html attributes in the attrs dict to consider maxsize"""
+        attrs['size'] = min(maxsize, 40)
+        attrs['maxlength'] = maxsize
+        
+        
+class AutoCompletionWidget(StringWidget):
+    cubicwebns_attributes = (StringWidget.cubicwebns_attributes |
+                          set(('accesskey', 'size', 'maxlength')))
+    attrs = ()
+    
+    wdgtype = 'SuggestField'
+    
+    def current_value(self, entity):
+        value = StringWidget.current_value(self, entity)
+        return value or INTERNAL_FIELD_VALUE
+
+    def _get_url(self, entity):
+        return entity.req.build_url('json', fname=entity.autocomplete_initfuncs[self.rschema],
+                                pageid=entity.req.pageid, mode='remote')
+
+    def _edit_render(self, entity):
+        req = entity.req
+        req.add_js( ('cubicweb.widgets.js', 'jquery.autocomplete.js') )
+        req.add_css('jquery.autocomplete.css')
+        value = self.current_value(entity)
+        dvalue = self.current_display_value(entity)
+        if isinstance(value, basestring):
+            value = html_escape(value)
+        if isinstance(dvalue, basestring):
+            dvalue = html_escape(dvalue)
+        iid = self.attrs.pop('id')
+        if self.required(entity):
+            cssclass = u' required'
+        else:
+            cssclass = u''
+        dataurl = self._get_url(entity)
+        return (u'%(hidden)s<input type="text" name="%(iid)s" value="%(value)s" cubicweb:dataurl="%(url)s" class="widget%(required)s" id="%(iid)s" '
+                u'tabindex="%(tabindex)s" cubicweb:loadtype="auto" cubicweb:wdgtype="%(wdgtype)s"  %(attrs)s />' % {
+                    'iid': iid,
+                    'hidden': self.hidden_input(entity, value),
+                    'wdgtype': self.wdgtype,
+                    'url': html_escape(dataurl),
+                    'tabindex': self.attrs.pop('tabindex'),
+                    'value': dvalue,
+                    'attrs': self.format_attrs(),
+                    'required' : cssclass,
+                    })
+
+class StaticFileAutoCompletionWidget(AutoCompletionWidget):
+    wdgtype = 'StaticFileSuggestField'
+    
+    def _get_url(self, entity):
+        return entity.req.datadir_url + entity.autocomplete_initfuncs[self.rschema]
+
+class RestrictedAutoCompletionWidget(AutoCompletionWidget):
+    wdgtype = 'RestrictedSuggestField'    
+
+    
+class PasswordWidget(InputWidget):
+    input_type = 'password'
+    
+    def required(self, entity):
+        if InputWidget.required(self, entity) and not entity.has_eid():
+            return True
+        return False
+    
+    def current_values(self, entity):
+        # on existant entity, show password field has non empty (we don't have
+        # the actual value
+        if entity.has_eid():
+            return (INTERNAL_FIELD_VALUE,)
+        return super(PasswordWidget, self).current_values(entity)
+
+    def _edit_render(self, entity):
+        html = super(PasswordWidget, self)._edit_render(entity)
+        name = eid_param(self.name + '-confirm', entity.eid)
+        return u'%s<br/>\n<input type="%s" name="%s" id="%s" tabindex="%s"/>&nbsp;<span class="emphasis">(%s)</span>' % (
+            html, self.input_type, name, name, entity.req.next_tabindex(),
+            entity.req._('confirm password'))
+
+    
+class TextWidget(Widget):
+    html_attributes = Widget.html_attributes | set(('rows', 'cols'))
+    
+    @staticmethod
+    def size_constraint_attrs(attrs, maxsize):
+        """set html attributes in the attrs dict to consider maxsize"""
+        if 256 < maxsize < 513:
+            attrs['cols'], attrs['rows'] = 60, 5
+        else:
+            attrs['cols'], attrs['rows'] = 80, 10
+    
+    def render(self, entity):
+        if not entity.has_eid():
+            return u''
+        return entity.printable_value(self.name)
+    
+    def add_fckeditor_info(self, req):
+        req.add_js('fckeditor.js')
+        req.fckeditor_config()
+    
+    def _edit_render(self, entity, with_format=True):
+        req = entity.req
+        editor = self._edit_render_textarea(entity, with_format)
+        value = self.current_value(entity)
+        if isinstance(value, basestring):
+            value = html_escape(value)
+        return u'%s%s' % (self.hidden_input(entity, value), editor)
+    
+    def _edit_render_textarea(self, entity, with_format):
+        self.attrs.setdefault('cols', 80)
+        self.attrs.setdefault('rows', 20)
+        dvalue = self.current_display_value(entity)
+        if isinstance(dvalue, basestring):
+            dvalue = html_escape(dvalue)
+        if entity.use_fckeditor(self.name):
+            self.add_fckeditor_info(entity.req)
+            if with_format:
+                if entity.has_eid():
+                    format = entity.format(self.name)
+                else:
+                    format = ''
+                frname = eid_param(self.name + '_format', entity.eid)
+                hidden = u'<input type="hidden" name="edits-%s" value="%s"/>\n'\
+                         '<input type="hidden" name="%s" value="text/html"/>\n' % (
+                    frname, format, frname)
+            return u'%s<textarea cubicweb:type="wysiwyg" onkeypress="autogrow(this)" name="%s" %s>%s</textarea>' % (
+                hidden, self.rname, self.format_attrs(), dvalue)
+        if with_format and entity.has_format(self.name):
+            fmtwdg = entity.get_widget(self.name + '_format')
+            fmtwdgstr = fmtwdg.edit_render(entity, tabindex=self.attrs['tabindex'])
+            self.attrs['tabindex'] = entity.req.next_tabindex()
+        else:
+            fmtwdgstr = ''
+        return u'%s<br/><textarea onkeypress="autogrow(this)" name="%s" %s>%s</textarea>' % (
+            fmtwdgstr, self.rname, self.format_attrs(), dvalue)
+            
+    
+class CheckBoxWidget(Widget):
+    html_attributes = Widget.html_attributes | set(('checked', ))
+    def _edit_render(self, entity):
+        value = self.current_value(entity)
+        dvalue = self.current_display_value(entity)
+        return self.hidden_input(entity, value) + checkbox(self.rname, 'checked', self.format_attrs(), dvalue)
+
+    def render(self, entity):
+        if not entity.has_eid():
+            return u''
+        if getattr(entity, self.name):
+            return entity.req._('yes')
+        return entity.req._('no')
+
+
+class YesNoRadioWidget(CheckBoxWidget):
+    
+    def _edit_render(self, entity):
+        value = self.current_value(entity)
+        dvalue = self.current_display_value(entity)
+        attrs1 = self.format_attrs()
+        del self.attrs['id'] # avoid duplicate id for xhtml compliance
+        attrs2 = self.format_attrs()
+        if dvalue:
+            attrs1 += ' checked="checked"'
+        else:
+            attrs2 += ' checked="checked"'
+        wdgs = [self.hidden_input(entity, value),
+                u'<input type="radio" name="%s" value="1" %s/>%s<br/>' % (self.rname, attrs1, entity.req._('yes')),
+                u'<input type="radio" name="%s" value="" %s/>%s<br/>' % (self.rname, attrs2, entity.req._('no'))]
+        return '\n'.join(wdgs)
+
+    
+class FileWidget(Widget):
+    need_multipart = True
+    def _file_wdg(self, entity):
+        wdgs = [u'<input type="file" name="%s" %s/>' % (self.rname, self.format_attrs())]
+        req = entity.req
+        if entity.has_format(self.name) or entity.has_text_encoding(self.name):
+            divid = '%s-%s-advanced' % (self.name, entity.eid)
+            wdgs.append(u'<a href="%s" title="%s"><img src="%s" alt="%s"/></a>' %
+                        (html_escape(toggle_action(divid)),
+                         req._('show advanced fields'),
+                         html_escape(req.build_url('data/puce_down.png')),
+                         req._('show advanced fields')))
+            wdgs.append(u'<div id="%s" class="hidden">' % divid)
+            for extraattr in ('_format', '_encoding'):
+                if entity.e_schema.has_subject_relation('%s%s' % (self.name, extraattr)):
+                    ewdg = entity.get_widget(self.name + extraattr)
+                    wdgs.append(ewdg.render_label(entity))
+                    wdgs.append(ewdg.edit_render(entity, includehelp=True))
+                    wdgs.append(u'<br/>')
+            wdgs.append(u'</div>')
+        if entity.has_eid() and not self.required(entity):
+            # trick to be able to delete an uploaded file
+            wdgs.append(u'<br/>')
+            wdgs.append(checkbox(eid_param('__%s_detach' % self.rname, entity.eid), False))
+            wdgs.append(req._('detach attached file'))
+        return '\n'.join(wdgs)
+    
+    def _edit_render(self, entity):
+        return self.hidden_input(entity, None) + self._file_wdg(entity)
+
+
+class TextFileWidget(FileWidget):
+    def _edit_msg(self, entity):
+        if entity.has_eid() and not self.required(entity):
+            msg = entity.req._(
+                'You can either submit a new file using the browse button above'
+                ', or choose to remove already uploaded file by checking the '
+                '"detach attached file" check-box, or edit file content online '
+                'with the widget below.')
+        else:
+            msg = entity.req._(
+                'You can either submit a new file using the browse button above'
+                ', or edit file content online with the widget below.')
+        return msg
+    
+    def _edit_render(self, entity):
+        wdgs = [self._file_wdg(entity)]
+        if entity.format(self.name) in ('text/plain', 'text/html', 'text/rest'):
+            msg = self._edit_msg(entity)
+            wdgs.append(u'<p><b>%s</b></p>' % msg)
+            twdg = TextWidget(self.vreg, self.subjtype, self.rschema, self.objtype)
+            twdg.rname = self.rname
+            data = getattr(entity, self.name)
+            if data:
+                encoding = entity.text_encoding(self.name)
+                try:
+                    entity[self.name] = unicode(data.getvalue(), encoding)
+                except UnicodeError:
+                    pass
+                else:
+                    wdgs.append(twdg.edit_render(entity, with_format=False))
+                    entity[self.name] = data # restore Binary value
+            wdgs.append(u'<br/>')
+        return '\n'.join(wdgs)
+
+
+class ComboBoxWidget(Widget):
+    html_attributes = Widget.html_attributes | set(('multiple', 'size'))
+    
+    def __init__(self, vreg, subjschema, rschema, objschema,
+                 multiple=False, **kwattrs):
+        super(ComboBoxWidget, self).__init__(vreg, subjschema, rschema, objschema,
+                                             **kwattrs)
+        if multiple:
+            self.attrs['multiple'] = 'multiple'
+            if not 'size' in self.attrs:
+                self.attrs['size'] = '5'
+        # disable access key (dunno why but this is not allowed by xhtml 1.0)
+        del self.attrs['accesskey']
+        
+    def vocabulary(self, entity):
+        raise NotImplementedError()
+    
+    def form_value(self, entity, value, values):
+        if value in values:
+            flag = 'selected="selected"'
+        else:
+            flag = ''
+        return value, flag
+
+    def _edit_render(self, entity):
+        values = self.current_values(entity)
+        if values:
+            res = [self.hidden_input(entity, v) for v in values]
+        else:
+            res = [self.hidden_input(entity, INTERNAL_FIELD_VALUE)]
+        dvalues = self.current_display_values(entity)
+        res.append(u'<select name="%s" %s>' % (self.rname, self.format_attrs()))
+        for label, value in self.vocabulary(entity):
+            if value is None:
+                # handle separator
+                res.append(u'<optgroup label="%s"/>' % (label or ''))
+            else:
+                value, flag = self.form_value(entity, value, dvalues)
+                res.append(u'<option value="%s" %s>%s</option>' % (value, flag, html_escape(label)))
+        res.append(u'</select>')
+        return '\n'.join(res)
+
+ 
+class StaticComboBoxWidget(ComboBoxWidget):
+    
+    def __init__(self, vreg, subjschema, rschema, objschema,
+                 vocabfunc, multiple=False, sort=False, **kwattrs):
+        super(StaticComboBoxWidget, self).__init__(vreg, subjschema, rschema, objschema,
+                                                   multiple, **kwattrs)
+        self.sort = sort
+        self.vocabfunc = vocabfunc
+
+    def vocabulary(self, entity):
+        choices = self.vocabfunc(entity)
+        if self.sort:
+            choices = sorted(choices)
+        if self.rschema.rproperty(self.subjtype, self.objtype, 'internationalizable'):
+            return zip((entity.req._(v) for v in choices), choices)
+        return zip(choices, choices)
+    
+
+class EntityLinkComboBoxWidget(ComboBoxWidget):
+    """to be used be specific forms"""
+    
+    def current_values(self, entity):
+        if entity.has_eid():
+            return [r[0] for r in entity.related(self.name, self.role)]
+        defaultmeth = 'default_%s_%s' % (self.role, self.name)
+        if hasattr(entity, defaultmeth):
+            return getattr(entity, defaultmeth)()
+        return ()
+    
+    def vocabulary(self, entity):
+        return [('', INTERNAL_FIELD_VALUE)] + entity.vocabulary(self.rschema, self.role)
+
+
+class RawDynamicComboBoxWidget(EntityLinkComboBoxWidget):
+    
+    def vocabulary(self, entity, limit=None):
+        req = entity.req
+        # first see if its specified by __linkto form parameters
+        linkedto = entity.linked_to(self.name, self.role)
+        if linkedto:
+            entities = (req.eid_rset(eid).get_entity(0, 0) for eid in linkedto)
+            return [(entity.view('combobox'), entity.eid) for entity in entities]
+        # it isn't, check if the entity provides a method to get correct values
+        if not self.required(entity):
+            res = [('', INTERNAL_FIELD_VALUE)]
+        else:
+            res = []
+        # vocabulary doesn't include current values, add them
+        if entity.has_eid():
+            rset = entity.related(self.name, self.role)
+            relatedvocab = [(e.view('combobox'), e.eid) for e in rset.entities()]
+        else:
+            relatedvocab = []
+        return res + entity.vocabulary(self.rschema, self.role) + relatedvocab
+
+
+class DynamicComboBoxWidget(RawDynamicComboBoxWidget):
+    
+    def vocabulary(self, entity, limit=None):
+        return sorted(super(DynamicComboBoxWidget, self).vocabulary(entity, limit))
+
+
+class AddComboBoxWidget(DynamicComboBoxWidget):
+    def _edit_render(self, entity):
+        req = entity.req
+        req.add_js( ('cubicweb.ajax.js', 'jquery.js', 'cubicweb.widgets.js') )
+        values = self.current_values(entity)
+        if values:
+            res = [self.hidden_input(entity, v) for v in values]
+        else:
+            res = [self.hidden_input(entity, INTERNAL_FIELD_VALUE)]
+        dvalues = self.current_display_values(entity)
+        etype_from = entity.e_schema.subject_relation(self.name).objects(entity.e_schema)[0]
+        res.append(u'<select class="widget" cubicweb:etype_to="%s" cubicweb:etype_from="%s" cubicweb:loadtype="auto" cubicweb:wdgtype="AddComboBox" name="%s" %s>'
+                   % (entity.e_schema, etype_from, self.rname, self.format_attrs()))
+        for label, value in self.vocabulary(entity):
+            if value is None:
+                # handle separator
+                res.append(u'<optgroup label="%s"/>' % (label or ''))
+            else:
+                value, flag = self.form_value(entity, value, dvalues)
+                res.append(u'<option value="%s" %s>%s</option>' % (value, flag, html_escape(label)))
+        res.append(u'</select>')
+        res.append(u'<div id="newvalue">')
+        res.append(u'<input type="text" id="newopt" />')
+        res.append(u'<a href="javascript:noop()" id="add_newopt">&nbsp;</a></div>')
+        return '\n'.join(res)
+
+class IntegerWidget(StringWidget):
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs['size'] = 5
+        kwattrs['maxlength'] = 15
+        StringWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+        
+    def render_example(self, req):
+        return '23'
+    
+
+        
+class FloatWidget(StringWidget):
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs['size'] = 5
+        kwattrs['maxlength'] = 15
+        StringWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+
+    def render_example(self, req):
+        formatstr = req.property_value('ui.float-format')
+        return formatstr % 1.23
+    
+    def current_values(self, entity):
+        values = entity.attribute_values(self.name)
+        if values:
+            formatstr = entity.req.property_value('ui.float-format')
+            value = values[0]
+            if value is not None:
+                value = float(value)
+            else:
+                return ()
+            return [formatstr % value]
+        return ()
+
+class DecimalWidget(StringWidget):
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs['size'] = 5
+        kwattrs['maxlength'] = 15
+        StringWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+        
+    def render_example(self, req):
+        return '345.0300'
+    
+
+
+class DateWidget(StringWidget):
+    format_key = 'ui.date-format'
+    monthnames = ("january", "february", "march", "april",
+                  "may", "june", "july", "august",
+                  "september", "october", "november", "december")
+    
+    daynames = ("monday", "tuesday", "wednesday", "thursday",
+                "friday", "saturday", "sunday")
+    
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs.setdefault('size', 10)
+        kwattrs.setdefault('maxlength', 10)
+        StringWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+
+    def current_values(self, entity):
+        values = entity.attribute_values(self.name)
+        if values and hasattr(values[0], 'strftime'):
+            formatstr = entity.req.property_value(self.format_key)
+            return [values[0].strftime(formatstr)]
+        return values
+
+    def render_example(self, req):
+        formatstr = req.property_value(self.format_key)
+        return now().strftime(formatstr)
+
+    def add_localized_infos(self, req):
+        """inserts JS variables defining localized months and days"""
+        # import here to avoid dependancy from cubicweb-common to simplejson
+        _ = req._
+        monthnames = [_(mname) for mname in self.monthnames]
+        daynames = [_(dname) for dname in self.daynames]
+        req.html_headers.define_var('MONTHNAMES', monthnames)
+        req.html_headers.define_var('DAYNAMES', daynames)
+
+
+    def _edit_render(self, entity):
+        wdg = super(DateWidget, self)._edit_render(entity)
+        cal_button = self.render_calendar_popup(entity)
+        return wdg+cal_button
+
+    def render_help(self, entity):
+        """calendar popup widget"""
+        req = entity.req
+        help = [ u'<br/>' ]
+        descr = self.rschema.rproperty(self.subjtype, self.objtype, 'description')
+        if descr:
+            help.append('<span class="helper">%s</span>' % req._(descr))
+        example = self.render_example(req)
+        if example:
+            help.append('<span class="helper">(%s: %s)</span>'
+                        % (req._('sample format'), example))
+        return u'&nbsp;'.join(help)
+
+    def render_calendar_popup(self, entity):
+        """calendar popup widget"""
+        req = entity.req
+        self.add_localized_infos(req)
+        req.add_js(('cubicweb.ajax.js', 'cubicweb.calendar.js',))
+        req.add_css(('cubicweb.calendar_popup.css',))
+        inputid = self.attrs.get('id', self.rname)
+        helperid = "%shelper" % inputid
+        _today = today()
+        year = int(req.form.get('year', _today.year))
+        month = int(req.form.get('month', _today.month))
+
+        return (u"""<a onclick="toggleCalendar('%s', '%s', %s, %s);" class="calhelper">
+<img src="%s" title="%s" alt="" /></a><div class="calpopup hidden" id="%s"></div>"""
+                % (helperid, inputid, year, month,
+                   req.external_resource('CALENDAR_ICON'), req._('calendar'), helperid) )
+
+class DateTimeWidget(DateWidget):
+    format_key = 'ui.datetime-format'
+    
+    def render_example(self, req):
+        formatstr1 = req.property_value('ui.datetime-format')
+        formatstr2 = req.property_value('ui.date-format')
+        return req._('%s, or without time: %s') % (now().strftime(formatstr1),
+                                                   now().strftime(formatstr2))
+
+
+
+
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs['size'] = 16
+        kwattrs['maxlength'] = 16
+        DateWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+
+
+class TimeWidget(StringWidget):
+    format_key = 'ui.time-format'
+    def __init__(self, vreg, subjschema, rschema, objschema, **kwattrs):
+        kwattrs['size'] = 5
+        kwattrs['maxlength'] = 5
+        StringWidget.__init__(self, vreg, subjschema, rschema, objschema, **kwattrs)
+
+        
+class EmailWidget(StringWidget):
+    
+    def render(self, entity):
+        email = getattr(entity, self.name)
+        if not email:
+            return u''
+        return u'<a href="mailto:%s">%s</a>' % (email, email)
+        
+class URLWidget(StringWidget):
+    
+    def render(self, entity):
+        url = getattr(entity, self.name)
+        if not url:
+            return u''
+        url = html_escape(url)
+        return u'<a href="%s">%s</a>' % (url, url)
+    
+class EmbededURLWidget(StringWidget):
+    
+    def render(self, entity):
+        url = getattr(entity, self.name)
+        if not url:
+            return u''
+        aurl = html_escape(entity.build_url('embed', url=url))
+        return u'<a href="%s">%s</a>' % (aurl, url)
+
+
+
+class PropertyKeyWidget(ComboBoxWidget):
+    """specific widget for EProperty.pkey field to set the value widget according to
+    the selected key
+    """
+    
+    def _edit_render(self, entity):
+        entity.req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
+        vtabindex = self.attrs.get('tabindex', 0) + 1
+        self.attrs['onchange'] = "javascript:setPropValueWidget('%s', %s)" % (
+            entity.eid, vtabindex)
+        # limit size
+        if not entity.has_eid():
+            self.attrs['size'] = 10
+        else:
+            self.attrs['size'] = 1
+        return super(PropertyKeyWidget, self)._edit_render(entity)
+    
+    def vocabulary(self, entity):
+        _ = entity.req._
+        if entity.has_eid():
+            return [(_(entity.pkey), entity.pkey)]
+        # key beginning with 'system.' should usually not be edited by hand
+        choices = entity.vreg.user_property_keys()
+        return sorted(zip((_(v) for v in choices), choices))
+
+
+class PropertyValueWidget(Widget):
+    """specific widget for EProperty.value field which will be different according to
+    the selected key type and vocabulary information
+    """
+    
+    def render_help(self, entity):
+        return u''
+        
+    def render(self, entity):
+        assert entity.has_eid()
+        w = self.vreg.property_value_widget(entity.pkey, req=entity.req, **self.attrs)
+        return w.render(entity)
+        
+    def _edit_render(self, entity):
+        if not entity.has_eid():
+            # no key set yet, just include an empty div which will be filled
+            # on key selection
+            # empty span as well else html validation fail (label is refering to this id)
+            return u'<div id="div:%s"><span id="%s"/></div>' % (self.rname, self.attrs.get('id'))
+        w = self.vreg.property_value_widget(entity.pkey, req=entity.req, **self.attrs)
+        if entity.pkey.startswith('system.'):
+            value = '<span class="value" id="%s">%s</span>' % (self.attrs.get('id'), w.render(entity))
+            msg = entity.req._('value associated to this key is not editable manually')
+            return value + '<div>%s</div>' % msg
+        return w.edit_render(entity, self.attrs.get('tabindex'), includehelp=True)
+    
+
+def widget_factory(vreg, subjschema, rschema, objschema, role='subject',
+                   **kwargs):
+    """return the most adapated widget to edit the relation
+    'subjschema rschema objschema' according to information found in the schema
+    """
+    if role == 'subject':
+        eclass, subjschema = _eclass_eschema(subjschema)
+    else:
+        eclass, objschema = _eclass_eschema(objschema)
+    if eclass is not None and rschema in eclass.widgets:
+        wcls = WIDGETS[eclass.widgets[rschema]]
+    elif not rschema.is_final():
+        card = rschema.rproperty(subjschema, objschema, 'cardinality')
+        if role == 'object':
+            multiple = card[1] in '+*'
+        else: #if role == 'subject':
+            multiple = card[0] in '+*'
+        return DynamicComboBoxWidget(vreg, subjschema, rschema, objschema,
+                                     role=role, multiple=multiple)
+    else:
+        wcls = None
+    factory = FACTORIES.get(objschema, _default_widget_factory)
+    return factory(vreg, subjschema, rschema, objschema, wcls=wcls,
+                   role=role, **kwargs)
+
+
+# factories to find the most adapated widget according to a type and other constraints
+                
+def _string_widget_factory(vreg, subjschema, rschema, objschema, wcls=None, **kwargs):
+    w = None
+    for c in rschema.rproperty(subjschema, objschema, 'constraints'):
+        if isinstance(c, StaticVocabularyConstraint):
+            # may have been set by a previous SizeConstraint but doesn't make sense
+            # here (even doesn't have the same meaning on a combobox actually)
+            kwargs.pop('size', None) 
+            return (wcls or StaticComboBoxWidget)(vreg, subjschema, rschema, objschema,
+                                                  vocabfunc=c.vocabulary, **kwargs)
+        if isinstance(c, SizeConstraint) and c.max is not None:
+            # don't return here since a StaticVocabularyConstraint may
+            # follow
+            if wcls is None:
+                if c.max < 257:
+                    _wcls = StringWidget
+                else:
+                    _wcls = TextWidget
+            else:
+                _wcls = wcls
+            _wcls.size_constraint_attrs(kwargs, c.max)
+            w = _wcls(vreg, subjschema, rschema, objschema, **kwargs)
+    if w is None:
+        w = (wcls or TextWidget)(vreg, subjschema, rschema, objschema, **kwargs)
+    return w
+
+def _default_widget_factory(vreg, subjschema, rschema, objschema, wcls=None, **kwargs):
+    if wcls is None:
+        wcls = _WFACTORIES[objschema]
+    return wcls(vreg, subjschema, rschema, objschema, **kwargs)
+
+FACTORIES = {
+    'String' :  _string_widget_factory,
+    'Boolean':  _default_widget_factory,
+    'Bytes':    _default_widget_factory,
+    'Date':     _default_widget_factory,
+    'Datetime': _default_widget_factory,
+    'Float':    _default_widget_factory,
+    'Decimal':    _default_widget_factory,
+    'Int':      _default_widget_factory,
+    'Password': _default_widget_factory,
+    'Time':     _default_widget_factory,
+    }
+
+# default widget by entity's type
+_WFACTORIES = {
+    'Boolean':  YesNoRadioWidget,
+    'Bytes':    FileWidget,
+    'Date':     DateWidget,
+    'Datetime': DateTimeWidget,
+    'Int':      IntegerWidget,
+    'Float':    FloatWidget,
+    'Decimal':  DecimalWidget,
+    'Password': PasswordWidget,
+    'String' :  StringWidget,
+    'Time':     TimeWidget,
+    }
+    
+# widgets registry
+WIDGETS = {}
+def register(widget_list):
+    for obj in widget_list:
+        if isinstance(obj, type) and issubclass(obj, Widget):
+            if obj is Widget or obj is ComboBoxWidget:
+                continue
+            WIDGETS[obj.__name__] = obj
+
+register(globals().values())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/wsgi/__init__.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,87 @@
+"""This package contains all WSGI specific code for cubicweb
+
+NOTE: this package borrows a lot of code to Django
+      (http://www.djangoproject.com) and to the wsgiref module
+      of the python2.5's stdlib.
+
+WSGI corresponding PEP: http://www.python.org/dev/peps/pep-0333/
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from email import message, message_from_string
+from Cookie import SimpleCookie
+from StringIO import StringIO
+from cgi import parse_header, parse_qsl
+from pprint import pformat as _pformat
+
+
+def pformat(obj):
+    """pretty prints `obj` if possible"""
+    try:
+        return _pformat(obj)
+    except:
+        return u'<could not parse>'
+    
+def qs2dict(qs):
+    """transforms a query string into a regular python dict"""
+    result = {}
+    for key, value in parse_qsl(qs, True):
+        result.setdefault(key, []).append(value)
+    return result
+
+def normalize_header(header):
+    """returns a normalized header name
+    
+    >>> normalize_header('User_Agent')
+    'User-agent'
+    """
+    return header.replace('_', '-').capitalize()
+
+def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0):
+    """
+    THIS COMES FROM DJANGO
+    A version of shutil.copyfileobj that will not read more than 'size' bytes.
+    This makes it safe from clients sending more than CONTENT_LENGTH bytes of
+    data in the body.
+    """
+    if not size:
+        return
+    while size > 0:
+        buf = fsrc.read(min(length, size))
+        if not buf:
+            break
+        fdst.write(buf)
+        size -= len(buf)
+
+def parse_file_upload(header_dict, post_data):
+    """This is adapted FROM DJANGO"""
+    raw_message = '\r\n'.join('%s:%s' % pair for pair in header_dict.iteritems())
+    raw_message += '\r\n\r\n' + post_data
+    msg = message_from_string(raw_message)
+    post, files = {}, {}
+    for submessage in msg.get_payload():
+        name_dict = parse_header(submessage['Content-Disposition'])[1]
+        key = name_dict['name']
+        # name_dict is something like {'name': 'file', 'filename': 'test.txt'} for file uploads
+        # or {'name': 'blah'} for POST fields
+        # We assume all uploaded files have a 'filename' set.
+        if 'filename' in name_dict:
+            assert type([]) != type(submessage.get_payload()), "Nested MIME messages are not supported"
+            if not name_dict['filename'].strip():
+                continue
+            # IE submits the full path, so trim everything but the basename.
+            # (We can't use os.path.basename because that uses the server's
+            # directory separator, which may not be the same as the
+            # client's one.)
+            filename = name_dict['filename'][name_dict['filename'].rfind("\\")+1:]
+            mimetype = 'Content-Type' in submessage and submessage['Content-Type'] or None
+            content = StringIO(submessage.get_payload())
+            files[key] = [filename, mimetype, content]
+        else:
+            post.setdefault(key, []).append(submessage.get_payload())
+    return post, files
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/wsgi/handler.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,195 @@
+"""WSGI request handler for cubicweb
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb import ObjectNotFound, AuthenticationError
+from cubicweb.web import (NotFound, Redirect, DirectResponse, StatusResponse,
+                       ExplicitLogin)
+from cubicweb.web.application import CubicWebPublisher
+from cubicweb.wsgi.request import CubicWebWsgiRequest
+
+# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+STATUS_CODE_TEXT = {
+    100: 'CONTINUE',
+    101: 'SWITCHING PROTOCOLS',
+    200: 'OK',
+    201: 'CREATED',
+    202: 'ACCEPTED',
+    203: 'NON-AUTHORITATIVE INFORMATION',
+    204: 'NO CONTENT',
+    205: 'RESET CONTENT',
+    206: 'PARTIAL CONTENT',
+    300: 'MULTIPLE CHOICES',
+    301: 'MOVED PERMANENTLY',
+    302: 'FOUND',
+    303: 'SEE OTHER',
+    304: 'NOT MODIFIED',
+    305: 'USE PROXY',
+    306: 'RESERVED',
+    307: 'TEMPORARY REDIRECT',
+    400: 'BAD REQUEST',
+    401: 'UNAUTHORIZED',
+    402: 'PAYMENT REQUIRED',
+    403: 'FORBIDDEN',
+    404: 'NOT FOUND',
+    405: 'METHOD NOT ALLOWED',
+    406: 'NOT ACCEPTABLE',
+    407: 'PROXY AUTHENTICATION REQUIRED',
+    408: 'REQUEST TIMEOUT',
+    409: 'CONFLICT',
+    410: 'GONE',
+    411: 'LENGTH REQUIRED',
+    412: 'PRECONDITION FAILED',
+    413: 'REQUEST ENTITY TOO LARGE',
+    414: 'REQUEST-URI TOO LONG',
+    415: 'UNSUPPORTED MEDIA TYPE',
+    416: 'REQUESTED RANGE NOT SATISFIABLE',
+    417: 'EXPECTATION FAILED',
+    500: 'INTERNAL SERVER ERROR',
+    501: 'NOT IMPLEMENTED',
+    502: 'BAD GATEWAY',
+    503: 'SERVICE UNAVAILABLE',
+    504: 'GATEWAY TIMEOUT',
+    505: 'HTTP VERSION NOT SUPPORTED',
+}
+
+
+class WSGIResponse(object):
+    """encapsulates the wsgi response parameters
+    (code, headers and body if there is one)
+    """
+    def __init__(self, code, req, body=None):
+        text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE')
+        self.status =  '%s %s' % (code, text)
+        self.headers = [(str(k), str(v)) for k, v in req.headers_out.items()]
+        if body:
+            self.body = [body]
+        else:
+            self.body = []
+
+    def __iter__(self):
+        return iter(self.body)
+    
+
+
+class CubicWebWSGIApplication(object):
+    """This is the wsgi application which will be called by the
+    wsgi server with the WSGI ``environ`` and ``start_response``
+    parameters.
+
+    XXX: missing looping tasks and proper repository shutdown when
+    the application is stopped.
+    NOTE: no pyro
+    """
+
+    def __init__(self, config, debug=None, vreg=None):
+        self.appli = CubicWebPublisher(config, debug=debug, vreg=vreg)
+        self.debugmode = debug
+        self.config = config
+        self.base_url = None
+#         self.base_url = config['base-url'] or config.default_base_url()
+#         assert self.base_url[-1] == '/'
+#         self.https_url = config['https-url']
+#         assert not self.https_url or self.https_url[-1] == '/'
+        try:
+            self.url_rewriter = self.appli.vreg.select_component('urlrewriter')
+        except ObjectNotFound:
+            self.url_rewriter = None
+        
+    def _render(self, req):
+        """this function performs the actual rendering
+        XXX missing: https handling, url rewriting, cache management,
+                     authentication
+        """
+        if self.base_url is None:
+            self.base_url = self.config._base_url = req.base_url()
+        # XXX https handling needs to be implemented
+        if req.authmode == 'http':
+            # activate realm-based auth
+            realm = self.config['realm']
+            req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
+        try:
+            self.appli.connect(req)
+        except AuthenticationError:
+            return self.request_auth(req)
+        except Redirect, ex:
+            return self.redirect(req, ex.location)
+        path = req.path
+        if not path or path == "/":
+            path = 'view'
+        try:
+            result = self.appli.publish(path, req)
+        except DirectResponse, ex:
+            return WSGIResponse(200, req, ex.response)
+        except StatusResponse, ex:
+            return WSGIResponse(ex.status, req, ex.content)
+        except NotFound:
+            result = self.appli.notfound_content(req)
+            return WSGIResponse(404, req, result)
+        except ExplicitLogin:  # must be before AuthenticationError
+            return self.request_auth(req)
+        except AuthenticationError:
+            if self.config['auth-mode'] == 'cookie':
+                # in cookie mode redirecting to the index view is enough :
+                # either anonymous connection is allowed and the page will
+                # be displayed or we'll be redirected to the login form
+                msg = req._('you have been logged out')
+#                 if req.https:
+#                     req._base_url =  self.base_url
+#                     req.https = False
+                url = req.build_url('view', vid='index', __message=msg)
+                return self.redirect(req, url)
+            else:
+                # in http we have to request auth to flush current http auth
+                # information
+                return self.request_auth(req, loggedout=True)
+        except Redirect, ex:
+            return self.redirect(req, ex.location)
+        if not result:
+            # no result, something went wrong...
+            self.error('no data (%s)', req)
+            # 500 Internal server error
+            return self.redirect(req, req.build_url('error'))
+        return WSGIResponse(200, req, result)
+        
+    
+    def __call__(self, environ, start_response):
+        """WSGI protocol entry point"""
+        req = CubicWebWsgiRequest(environ, self.appli.vreg, self.base_url)
+        response = self._render(req)
+        start_response(response.status, response.headers)
+        return response.body
+
+    def redirect(self, req, location):
+        """convenience function which builds a redirect WSGIResponse"""
+        self.debug('redirecting to %s', location)
+        req.set_header('location', str(location))
+        return WSGIResponse(303, req)
+        
+    def request_auth(self, req, loggedout=False):
+        """returns the appropriate WSGIResponse to require the user to log in
+        """
+#         if self.https_url and req.base_url() != self.https_url:
+#             return self.redirect(self.https_url + 'login')
+        if self.config['auth-mode'] == 'http':
+            code = 401 # UNAUTHORIZED
+        else:
+            code = 403 # FORBIDDEN
+        if loggedout:
+#             if req.https:
+#                 req._base_url =  self.base_url
+#                 req.https = False
+            content = self.appli.loggedout_content(req)
+        else:
+            content = self.appli.need_login_content(req)
+        return WSGIResponse(code, req, content)
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(CubicWebWSGIApplication, getLogger('cubicweb.wsgi'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/wsgi/request.py	Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,181 @@
+"""WSGI request adapter for cubicweb
+
+NOTE: each docstring tagged with ``COME FROM DJANGO`` means that
+the code has been taken (or adapted) from Djanco source code :
+  http://www.djangoproject.com/
+
+:organization: Logilab
+:copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from StringIO import StringIO
+from urllib import quote
+
+from logilab.common.decorators import cached
+
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb.wsgi import (pformat, qs2dict, safe_copyfileobj, parse_file_upload,
+                        normalize_header)
+
+
+
+class CubicWebWsgiRequest(CubicWebRequestBase):
+    """most of this code COMES FROM DJANO
+    """
+    
+    def __init__(self, environ, vreg, base_url=None):
+        self.environ = environ
+        self.path = environ['PATH_INFO']
+        self.method = environ['REQUEST_METHOD'].upper()
+        self._headers = dict([(normalize_header(k[5:]), v) for k, v in self.environ.items()
+                              if k.startswith('HTTP_')])
+        https = environ.get("HTTPS") in ('yes', 'on', '1')
+        self._base_url = base_url or self.application_uri()
+        post, files = self.get_posted_data()
+        super(CubicWebWsgiRequest, self).__init__(vreg, https, post)
+        if files is not None:
+            for fdef in files.itervalues():
+                fdef[0] = unicode(fdef[0], self.encoding)
+            self.form.update(files)
+        # prepare output headers
+        self.headers_out = {}
+        
+    def __repr__(self):
+        # Since this is called as part of error handling, we need to be very
+        # robust against potentially malformed input.
+        form = pformat(self.form)
+        meta = pformat(self.environ)
+        return '<CubicWebWsgiRequest\FORM:%s,\nMETA:%s>' % \
+            (form, meta)
+
+    ## cubicweb request interface ################################################
+    
+    def base_url(self):
+        return self._base_url
+
+    def http_method(self):
+        """returns 'POST', 'GET', 'HEAD', etc."""
+        return self.method
+    
+    def relative_path(self, includeparams=True):
+        """return the normalized path of the request (ie at least relative
+        to the application's root, but some other normalization may be needed
+        so that the returned path may be used to compare to generated urls
+
+        :param includeparams:
+           boolean indicating if GET form parameters should be kept in the path
+        """
+        path = self.environ['PATH_INFO']
+        path = path[1:] # remove leading '/'
+        if includeparams:
+            qs = self.environ.get('QUERY_STRING')
+            if qs:
+                return '%s?%s' % (path, qs)
+        
+        return path
+
+    def get_header(self, header, default=None):
+        """return the value associated with the given input HTTP header,
+        raise KeyError if the header is not set
+        """
+        return self._headers.get(normalize_header(header), default)
+    
+    def set_header(self, header, value, raw=True):
+        """set an output HTTP header"""
+        assert raw, "don't know anything about non-raw headers for wsgi requests"
+        self.headers_out[header] = value
+
+    def add_header(self, header, value):
+        """add an output HTTP header"""
+        self.headers_out[header] = value
+    
+    def remove_header(self, header):
+        """remove an output HTTP header"""
+        self.headers_out.pop(header, None)
+
+    def header_if_modified_since(self):
+        """If the HTTP header If-modified-since is set, return the equivalent
+        mx date time value (GMT), else return None
+        """
+        return None
+        
+    ## wsgi request helpers ###################################################
+    
+    def application_uri(self):
+        """Return the application's base URI (no PATH_INFO or QUERY_STRING)
+
+        see python2.5's wsgiref.util.application_uri code
+        """
+        environ = self.environ
+        url = environ['wsgi.url_scheme'] + '://'
+        if environ.get('HTTP_HOST'):
+            url += environ['HTTP_HOST']
+        else:
+            url += environ['SERVER_NAME']
+            if environ['wsgi.url_scheme'] == 'https':
+                if environ['SERVER_PORT'] != '443':
+                    url += ':' + environ['SERVER_PORT']
+            else:
+                if environ['SERVER_PORT'] != '80':
+                    url += ':' + environ['SERVER_PORT']
+        url += quote(environ.get('SCRIPT_NAME') or '/')
+        return url
+        
+    def get_full_path(self):
+        return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + self.environ.get('QUERY_STRING', '')) or '')
+
+    def is_secure(self):
+        return 'wsgi.url_scheme' in self.environ \
+            and self.environ['wsgi.url_scheme'] == 'https'
+
+    def get_posted_data(self):
+        files = None
+        if self.method == 'POST':
+            if self.environ.get('CONTENT_TYPE', '').startswith('multipart'):
+                header_dict = dict((normalize_header(k[5:]), v)
+                                   for k, v in self.environ.items()
+                                   if k.startswith('HTTP_'))
+                header_dict['Content-Type'] = self.environ.get('CONTENT_TYPE', '')
+                post, files = parse_file_upload(header_dict, self.raw_post_data)
+            else:
+                post = qs2dict(self.raw_post_data)
+        else:
+            # The WSGI spec says 'QUERY_STRING' may be absent.
+            post = qs2dict(self.environ.get('QUERY_STRING', ''))
+        return post, files
+
+    @property
+    @cached
+    def raw_post_data(self):
+        buf = StringIO()
+        try:
+            # CONTENT_LENGTH might be absent if POST doesn't have content at all (lighttpd)
+            content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+        except ValueError: # if CONTENT_LENGTH was empty string or not an integer
+            content_length = 0
+        if content_length > 0:
+            safe_copyfileobj(self.environ['wsgi.input'], buf,
+                    size=content_length)
+        postdata = buf.getvalue()
+        buf.close()
+        return postdata
+
+    def _validate_cache(self):
+        """raise a `DirectResponse` exception if a cached page along the way
+        exists and is still usable
+        """
+        # XXX
+#         if self.get_header('Cache-Control') in ('max-age=0', 'no-cache'):
+#             # Expires header seems to be required by IE7
+#             self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
+#             return
+#         try:
+#             http.checkPreconditions(self._twreq, _PreResponse(self))
+#         except http.HTTPError, ex:
+#             self.info('valid http cache, no actual rendering')
+#             raise DirectResponse(ex.response)
+        # Expires header seems to be required by IE7
+        self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')