# HG changeset patch
# User Yann Voté
# Date 1474894332 -7200
# Node ID faf279e332980ae0abc433aba899f4128b324da1
# Parent 1817f8946c22afbc69f4e999b95b3c1a62111c92# Parent 1400aee10df4a7e8de57499e87c932990e6f9558
Merge with pyramid-cubicweb
The following tasks have been done:
- merge packaging files
- merge documentation
- move pyramid_cubicweb package at cubicweb/pyramid and update imports
accordingly
- rename tests directory into test
- move pyramid-cubicweb README.rst into README.pyramid.rst until better idea
- add a test dependency on unreleased cubicweb-pyramid to have both py27 and
py34 tests pass
Closes #14023058.
diff -r 1400aee10df4 -r faf279e33298 .hgignore
--- a/.hgignore Thu Jul 07 14:30:32 2016 +0200
+++ b/.hgignore Mon Sep 26 14:52:12 2016 +0200
@@ -1,10 +1,25 @@
-syntax: glob
-
-*.pyc
-*.swp
-
-*.egg-info
-
-docs/_build
-pyramid_cubicweb/tests/data/database
-.tox
+^build$
+^dist$
+\.egg-info$
+^.tox$
+^.cache$
+\.pyc$
+\.pyo$
+\.bak$
+\.old$
+\~$
+\#.*?\#$
+\.swp$
+^doc/book/en/apidoc$
+\.old$
+syntax: regexp
+.*/data.*/database/.*
+.*/data/ldapdb/.*
+.*/data/uicache/
+.*/data/libpython/cubicweb_.*/i18n/.*\.po
+^doc/html/
+^doc/doctrees/
+^doc/book/en/devweb/js_api/
+^doc/_build
+^doc/js_api/
+test-results.xml
diff -r 1400aee10df4 -r faf279e33298 .hgtags
--- a/.hgtags Thu Jul 07 14:30:32 2016 +0200
+++ b/.hgtags Mon Sep 26 14:52:12 2016 +0200
@@ -1,31 +1,571 @@
-7ffca3b439105a0bcca1a6fdf83710be3083a4b1 pyramid_cubicweb-version-0.1.0
-7ffca3b439105a0bcca1a6fdf83710be3083a4b1 pyramid_cubicweb-debian-version-0.1.0-1
-3d60138100d660e430e069c5d104ec927820d298 pyramid_cubicweb-version-0.1.0
-3d60138100d660e430e069c5d104ec927820d298 pyramid_cubicweb-debian-version-0.1.0-1
-9f3b9e610c3dde38a20586f217d033ed4a4e73ca pyramid_cubicweb-version-0.1.1
-9f3b9e610c3dde38a20586f217d033ed4a4e73ca pyramid_cubicweb-debian-0.1.1-1
-5eb4e27e9998fd3e0fc0e6e38ce996c5d6951be3 pyramid_cubicweb-version-0.1.2
-5eb4e27e9998fd3e0fc0e6e38ce996c5d6951be3 pyramid_cubicweb-debian-version-0.1.2-1
-0a7769e583c2499163cd3765a2a851b8a95011ec pyramid_cubicweb-debian-version-0.1.3-1
-0a7769e583c2499163cd3765a2a851b8a95011ec pyramid_cubicweb-version-0.1.3
-cd8308245d200092c228dbdd0fa8bcfec748c2fb pyramid_cubicweb-version-0.2.0
-cd8308245d200092c228dbdd0fa8bcfec748c2fb pyramid_cubicweb-debian-version-0.2.0-1
-1ae61c25299a6312acd09dc8d2f2c65e2e53f93b pyramid_cubicweb-version-0.2.1
-1ae61c25299a6312acd09dc8d2f2c65e2e53f93b pyramid_cubicweb-debian-version-0.2.1-1
-a80e076d3f42513d45eaf9a111dfa549267eace1 pyramid-cubicweb-version-0.3.0
-a80e076d3f42513d45eaf9a111dfa549267eace1 pyramid-cubicweb-debian-version-0.3.0-1
-6df91cb85eccc9d1cccd6a85d6975c73d6599d39 0.3.1
-6df91cb85eccc9d1cccd6a85d6975c73d6599d39 debian/0.3.1-1
-897a149e82087612232a835afba03e8ae6dc4e4a 0.4.0
-897a149e82087612232a835afba03e8ae6dc4e4a debian/0.4.0-1
-398b2c840e14932ce513714eb6a50ef266c70c85 0.4.1
-398b2c840e14932ce513714eb6a50ef266c70c85 debian/0.4.1-1
-61f69ac2d6bc81f35988e359f37601bccdf751d5 0.5.0
-61f69ac2d6bc81f35988e359f37601bccdf751d5 debian/0.5.0-1
-94d1a024b3bfd3eef2141c45e4a627935d241f7c 0.6.0
-94d1a024b3bfd3eef2141c45e4a627935d241f7c debian/0.6.0-1
-2621daafa10cb6f5dc2d8d962f16afc2a9f56de4 0.6.1
-2621daafa10cb6f5dc2d8d962f16afc2a9f56de4 debian/0.6.1-1
-0cf2972d2011b5aa10f9d9222adad08c137657d9 0.7.0
-0cf2972d2011b5aa10f9d9222adad08c137657d9 debian/0.7.0-1
-0cf2972d2011b5aa10f9d9222adad08c137657d9 centos/0.7.0-1
+8a9dac5a67a67e6342728c88085a38ff190f790b cubicweb-version-2_99_1
+8280fa6906f38bb51f5652c4d835ae1c613ad0a8 cubicweb-version-2_99_3
+aa7d98f8f07f94afe750769640161f948a8a0d07 cubicweb-debian-version-2_99_3-1
+76c1707200cfc5222672d1a5337b5e4b0b27bb0f cubicweb-version-2_99_4
+eaf1089ae49d19cd8ae81732ff20e1003af0a238 cubicweb-debian-version-2_99_4-1
+68b7a92468857ef741b91f3422ef14d78e3f5491 cubicweb-version-2_99_5
+91793edd3b2fb11320b5421895296331da952524 cubicweb-debian-version-2_99_5-1
+5b58fd1aba50680e261857dcc47ede0d1c2ecc59 cubicweb-version-2_99_6
+c718f6495ace1e33831862f221e1b93ee0b386e2 cubicweb-debian-version-2_99_6-1
+a93da6d88d4ee493ec6bc8852f3ee921af8324ad cubicweb-version_3_0_0
+221886d8e3c84bb31937e24398a783f27cd350dd cubicweb-version-3_0_2
+18d3e56c1de4a6597ab36536964bc66b5550cf98 cubicweb-debian-version-3_0_2-1
+0cb027c056f939ec3580ea1cc0aeda9f9884f0fa cubicweb-version-3_0_3
+a736bae56d4a703a26933fb9875fb9caab216c6b cubicweb-debian-version-3_0_3-1
+2e400b8dfc25ae30db602f64601e30e210b3fade cubicweb-version-3_0_4
+fc222bc99929d395c1c2235c40d3bb6f247b4ba9 cubicweb-debian-version-3_0_4-1
+7ad527099393ef56f27af313392022bb8ed73082 cubicweb-version-3_0_9
+a8e9e53b245d53838a07aa8c76d1bed352692a9f cubicweb-debian-version-3_0_9-1
+a711c7c185d15a1bd22b7eaab46a26b98b74fbf3 cubicweb-version-3_1_0
+dd3efdf58d281286d6f52f7416db349b75b7789c cubicweb-debian-version-3_1_0-1
+ce8094084165419ff1717bdb2f426574bcaaad93 cubicweb-version-3_1_1
+dfaedb0bba88e3a4e931948bb0c6a9587269303f cubicweb-debian-version-3_1_1-1
+a9ba200ab15098704a6255387c558c02488551c6 cubicweb-version-3_1_2
+a823124b812f4fa494bfceb773f3ca1cd00407e8 cubicweb-debian-version-3_1_2-1
+a5dc91adb7c133f83f5ad9cceb07bc246d21ed01 cubicweb-version-3_1_3
+9e98dec0768b87363a7826a04636dc161ed0ec7d cubicweb-debian-version-3_1_3-1
+e0e0a1c3d80f4fbf4bbd55066278e467b75df8a4 cubicweb-version-3_1_4
+0e132fbae9cc5e004f4b79a8b842addad43519a7 cubicweb-debian-version-3_1_4-1
+c14231e3a4f9120e2bb6a1d8690252fff5e48131 cubicweb-version-3_2_0
+c9c492787a8aa1b7916e22eb6498cba1c8fa316c cubicweb-debian-version-3_2_0-1
+634c251dd032894850080c4e5aeb0a4e09f888c0 cubicweb-version-3_2_1
+e784f8847a124a93e5b385d7a92a2772c050fe82 cubicweb-debian-version-3_2_1-1
+9b21e068fef73c37bcb4e53d006a7bde485f390b cubicweb-version-3_2_2
+0e07514264aa1b0b671226f41725ea4c066c210a cubicweb-debian-version-3_2_2-1
+f60bb84b86cf371f1f25197e00c778b469297721 cubicweb-version-3_2_3
+4003d24974f15f17bd03b7efd6a5047cad4e4c41 cubicweb-debian-version-3_2_3-1
+2d7d3062ca03d4b4144100013dc4ab7f9d9cb25e cubicweb-version-3_3_0
+07214e923e75c8f0490e609e9bee0f4964b87114 cubicweb-debian-version-3_3_0-1
+a356da3e725bfcb59d8b48a89d04be05ea261fd3 3.3.1
+e3aeb6e6c3bb5c18e8dcf61bae9d654beda6c036 cubicweb-version-3_3_2
+bef5e74e53f9de8220451dca4b5863a24a0216fb cubicweb-debian-version-3_3_2-1
+47b5236774a0cf3b1cfe75f6d4bd2ec989644ace cubicweb-version-3_3_3
+81973c897c9e78e5e52643e03628654916473196 cubicweb-debian-version-3_3_3-1
+2ba27ce8ecd9828693ec53c517e1c8810cbbe33e cubicweb-debian-version-3_3_3-2
+d46363eac5d71bc1570d69337955154dfcd8fcc8 cubicweb-version-3.3.4
+7dc22caa7640bf70fcae55afb6d2326829dacced cubicweb-debian-version-3.3.4-1
+d85937184430b2e7a8d9318100272757026c01a4 cubicweb-version-3.3.5
+82fe8a6d077b34e972664c8e2bace3ae30c94b9d cubicweb-debian-version-3.3.5-1
+cfcc7f6121d6c6b53a1ef9bc96c3b7c82f33d169 cubicweb-version-3.4.2
+e6a8cd8cc910507e5f928ddba6de3cf5e5bdf3d4 cubicweb-debian-version-3.4.2-1
+b2add17d5bc437a807976cd13870f92d3a94f2a6 cubicweb-version-3.4.3
+a3828745e2cf0c4bf1cab6f5397c524ba0510df6 cubicweb-debian-version-3.4.3-1
+2aee4ea585cdba159bc9490741db3fcd25dcaaca cubicweb-version-3.4.4
+e244a0fd7d719c25f4267470342ff8334b2dc8b3 cubicweb-debian-version-3.4.4-1
+3a65f9b3367c7297dc540a53f84e6507cb309892 cubicweb-version-3.4.5
+7fd294cbf6ff3cf34475cc50e972f650a34ae6e8 cubicweb-debian-version-3.4.5-1
+921fdbf8b3038dc27a2ec5398a0fbcbc5b9ba4be cubicweb-version-3.4.6
+52dba800ca4d4b82c47f3befb824bd91ef015368 cubicweb-debian-version-3.4.6-1
+0e549b299f0b357837ea620c561aa843f46de17a cubicweb-version-3.4.7
+ebb92e62eb040a070deb1f2d2434734cfac3af01 cubicweb-debian-version-3.4.7-1
+ba43e084e8841f62c3b4c2449b26a7546233e5fb cubicweb-version-3.4.8
+97273eeaaead11c0f422dc5a4fe2d4f14fc6a2dd cubicweb-debian-version-3.4.8-1
+e916f1e856c83aced0fe73f7ae9068e37edcc38c cubicweb-version-3.4.9
+24ea70f19a48cce60248ab18695925755009bcb8 cubicweb-debian-version-3.4.9-1
+f3d2adf483320d7726136433a41c57b130cbdc15 cubicweb-version-3.4.11
+635a25031f4abdd89c44d17f5d2b0d0d43914511 cubicweb-debian-version-3.4.11-1
+70c0dd1c3b7d747c3a268396a7f79d9a7a3340e6 cubicweb-version-3.5.0
+7e5d0ae8d2026c77f12ab512a4cde9911dcd8896 cubicweb-debian-version-3.5.0-1
+77ed72f3c2602bf300929f8863447653ce1beb0c cubicweb-version-3.5.1
+f476cecd46904f215bd29249ded8508d8f5634d7 cubicweb-debian-version-3.5.1-1
+1f0aa3cd5af2c92df8f9695773b8e465eb6f1795 cubicweb-version-3.5.2
+75cc4aa76fb12c06d4190956aa050cdf19ba4d8f cubicweb-debian-version-3.5.2-1
+540210e138d323e5224d7b08cbd71f5a23ed630d cubicweb-version-3.5.3
+2e22b975f9c23aebfe3e0a16a798c3fe81fa2a82 cubicweb-debian-version-3.5.3-1
+312349b3712e0a3e32247e03fdc7408e17bd19de cubicweb-version-3.5.4
+37d025b2aa7735dae4a861059014c560b45b19e6 cubicweb-debian-version-3.5.4-1
+1eca47d59fd932fe23f643ca239cf2408e5b1856 cubicweb-version-3.5.5
+aad818d9d9b6fdb2ffea56c0a9af718c0b69899d cubicweb-debian-version-3.5.5-1
+4e619e97b3fd70769a0f454963193c10cb87f9d4 cubicweb-version-3.5.6
+5f7c939301a1b915e17eec61c05e8e9ab8bdc182 cubicweb-debian-version-3.5.6-1
+0fc300eb4746e01f2755b9eefd986d58d8366ccf cubicweb-version-3.5.7
+7a96c0544c138a0c5f452e5b2428ce6e2b7cb378 cubicweb-debian-version-3.5.7-1
+1677312fd8a3e8c0a5ae083e3104ca62b7c9a5bb cubicweb-version-3.5.9
+d7f2d32340fb59753548ef29cbc1958ef3a55fc6 cubicweb-debian-version-3.5.9-1
+9b52725d8c534ba40877457b413077a10173bf88 cubicweb-version-3.5.10
+dfe2f245248c97bea3a29c8ecc6d293e25ff708e cubicweb-debian-version-3.5.10-1
+f48b2f193961803cf42147272671a335a2daeceb cubicweb-version-3.5.11
+4920121d41f28c8075a4f00461911677396fc566 cubicweb-debian-version-3.5.11-1
+98af3d02b83e7635207781289cc3445fb0829951 cubicweb-version-3.5.12
+4281e1e2d76b9a37f38c0eeb1cbdcaa2fac6533c cubicweb-debian-version-3.5.12-1
+450804da3ab2476b7ede0c1f956235b4c239734f cubicweb-version-3.6.0
+d2ba93fcb8da95ceab08f48f8149a480215f149c cubicweb-debian-version-3.6.0-1
+4ae30c9ca11b1edad67d25b76fce672171d02023 cubicweb-version-3.6.1
+b9cdfe3341d1228687515d9af8686971ad5e6f5c cubicweb-debian-version-3.6.1-1
+0a16f07112b90fb61d2e905855fece77e5a7e39c cubicweb-debian-version-3.6.1-2
+bfebe3d14d5390492925fc294dfdafad890a7104 cubicweb-version-3.6.2
+f3b4bb9121a0e7ee5961310ff79e61c890948a77 cubicweb-debian-version-3.6.2-1
+9c342fa4f1b73e06917d7dc675949baff442108b cubicweb-version-3.6.3
+f9fce56d6a0c2bc6c4b497b66039a8bbbbdc8074 cubicweb-debian-version-3.6.3-1
+270aba1e6fa21dac6b070e7815e6d1291f9c87cd cubicweb-version-3.7.0
+0c9ff7e496ce344b7e6bf5c9dd2847daf9034e5e cubicweb-debian-version-3.7.0-1
+6b0832bbd1daf27c2ce445af5b5222e1e522fb90 cubicweb-version-3.7.1
+9194740f070e64da5a89f6a9a31050a8401ebf0c cubicweb-debian-version-3.7.1-1
+d010f749c21d55cd85c5feb442b9cf816282953c cubicweb-version-3.7.2
+8fda29a6c2191ba3cc59242c17b28b34127c75fa cubicweb-debian-version-3.7.2-1
+768beb8e15f15e079f8ee6cfc35125e12b19e140 cubicweb-version-3.7.3
+44c7bf90df71dd562e5a7be5ced3019da603d24f cubicweb-debian-version-3.7.3-1
+ec23f3ebcd34a92b9898b312f44d56cca748d0d6 cubicweb-version-3.7.4
+fefeda65bb83dcc2d775255fe69fdee0e793d135 cubicweb-debian-version-3.7.4-1
+c476d106705ebdd9205d97e64cafa72707acabe7 cubicweb-version-3.7.5
+2d0982252e8d780ba964f293a0e691d48070db6d cubicweb-debian-version-3.7.5-1
+3c703f3245dc7696341ae1d66525554d9fa2d11d cubicweb-version-3.8.0
+24cc65ab2eca05729d66cef3de6f69bb7f9dfa35 cubicweb-debian-version-3.8.0-1
+1e074c6150fe00844160986852db364cc5992848 cubicweb-version-3.8.1
+eb972d125eefd0de2d0743e95c6e1f4e3e93e4c1 cubicweb-debian-version-3.8.1-1
+ef2e37d34013488a2018e73338fbbfbde5901c5c cubicweb-version-3.8.2
+2b962bb9eee8ee7156a12cf137428c292f8e3b35 cubicweb-debian-version-3.8.2-1
+7e6c6a2a272d0a95fd42248f3125e45185f0eef1 cubicweb-version-3.8.3
+1ccaa924786047be66b44f6dbc76e6631f56b04a cubicweb-debian-version-3.8.3-1
+d00d1fab42afec8607fc84d862becfd7f58850f1 cubicweb-version-3.8.4
+b7883287f40c853e8278edc3f24326f2c9549954 cubicweb-debian-version-3.8.4-1
+2de32c0c293ba451b231efe77d6027376af3a2a3 cubicweb-version-3.8.5
+5d05b08adeab1ea301e49ed8537e35ede6db92f6 cubicweb-debian-version-3.8.5-1
+1a24c62aefc5e57f61be3d04affd415288e81904 cubicweb-version-3.8.6
+607a90073911b6bb941a49b5ec0b0d2a9cd479af cubicweb-debian-version-3.8.6-1
+a1a334d934390043a4293a4ee42bdceb1343246e cubicweb-version-3.8.7
+1cccf88d6dfe42986e1091de4c364b7b5814c54f cubicweb-debian-version-3.8.7-1
+48f468f33704e401a8e7907e258bf1ac61eb8407 cubicweb-version-3.9.x
+d9936c39d478b6701a4adef17bc28888ffa011c6 cubicweb-version-3.9.0
+eda4940ffef8b7d36127e68de63a52388374a489 cubicweb-debian-version-3.9.0-1
+4d75f743ed49dd7baf8bde7b0e475244933fa08e cubicweb-version-3.9.1
+9bd75af3dca36d7be5d25fc5ab1b89b34c811456 cubicweb-debian-version-3.9.1-1
+e51796b9caf389c224c6f66dcb8aa75bf1b82eff cubicweb-version-3.9.2
+8a23821dc1383e14a7e92a931b91bc6eed4d0af7 cubicweb-debian-version-3.9.2-1
+900772fd9caaf068eb2fdd4544b03efec91901e6 cubicweb-version-3.9.3
+ab1f9686ff3e0843b570b98f89fb5ccc8d7dec8c cubicweb-debian-version-3.9.3-1
+6cebb361dcb27ded654426b4c82f6401c862e034 cubicweb-version-3.9.4
+8d32d82134dc1d8eb0ce230191f34fd49084a168 cubicweb-debian-version-3.9.4-1
+0a1fce8ddc672ca9ee7328ed4f88c1aa6e48d286 cubicweb-version-3.9.5
+12038ca95f0fff2205f7ee029f5602d192118aec cubicweb-debian-version-3.9.5-1
+d37428222a6325583be958d7c7fe7c595115663d cubicweb-version-3.9.6
+7d2cab567735a17cab391c1a7f1bbe39118308a2 cubicweb-debian-version-3.9.6-1
+de588e756f4fbe9c53c72159c6b96580a36d3fa6 cubicweb-version-3.9.7
+1c01f9dffd64d507863c9f8f68e3585b7aa24374 cubicweb-debian-version-3.9.7-1
+eed788018b595d46a55805bd8d2054c401812b2b cubicweb-version-3.9.8
+e4dba8ae963701a36be94ae58c790bc97ba029bb cubicweb-debian-version-3.9.8-1
+df0b2de62cec10c84a2fff5233db05852cbffe93 cubicweb-version-3.9.9
+1ba51b00fc44faa0d6d57448000aaa1fd5c6ab57 cubicweb-debian-version-3.9.9-1
+b7db1f59355832a409d2032e19c84cfffdb3b265 cubicweb-debian-version-3.9.9-2
+09c98763ae9d43616d047c1b25d82b4e41a4362f cubicweb-debian-version-3.9.9-3
+a62f24e1497e953fbaed5894f6064a64f7ac0be3 cubicweb-version-3.10.x
+0793fe84651be36f8de9b4faba3781436dc07be0 cubicweb-version-3.10.0
+9ef1347f8d99e7daad290738ef93aa894a2c03ce cubicweb-debian-version-3.10.0-1
+6c6859a676732c845af69f92e74d4aafae12f83a cubicweb-version-3.10.1
+3abb41c47925f8fc6e327164d0ceca3773503ef9 cubicweb-debian-version-3.10.1-1
+3fc6b4aaaff301e482a92c61e39789621bd7ed3b cubicweb-version-3.10.2
+4a87c8af6f3ffe59c6048ebbdc1b6b204d0b9c7f cubicweb-debian-version-3.10.2-1
+8eb58d00a0cedcf7b275b1c7f43b08e2165f655c cubicweb-version-3.10.3
+303b150ebb7a92b2904efd52b446457999cab370 cubicweb-debian-version-3.10.3-1
+d73733479a3af453f06b849ed88d120784ce9224 cubicweb-version-3.10.4
+7b41930e1d32fea3989a85f6ea7281983300adb1 cubicweb-debian-version-3.10.4-1
+159d0dbe07d9eb1c6ace4c5e160d1ec6e6762086 cubicweb-version-3.10.5
+e2e7410e994777589aec218d31eef9ff8d893f92 cubicweb-debian-version-3.10.5-1
+3c81dbb58ac4d4a6f61b74eef4b943a8316c2f42 cubicweb-version-3.10.6
+1484257fe9aeb29d0210e635c12ae5b3d6118cfb cubicweb-debian-version-3.10.6-1
+1959d97ebf2e6a0f7cd05d4cc48bb955c4351da5 cubicweb-version-3.10.7
+bf5d9a1415e3c9abe6b68ba3b24a8ad741f9de3c cubicweb-debian-version-3.10.7-1
+e581a86a68f089946a98c966ebca7aee58a5718f cubicweb-version-3.10.8
+132b525de25bc75ed6389c45aee77e847cb3a437 cubicweb-debian-version-3.10.8-1
+37432cede4fe55b97fc2e9be0a2dd20e8837a848 cubicweb-version-3.11.0
+8daabda9f571863e8754f8ab722744c417ba3abf cubicweb-debian-version-3.11.0-1
+d0410eb4d8bbf657d7f32b0c681db09b1f8119a0 cubicweb-version-3.11.1
+77318f1ec4aae3523d455e884daf3708c3c79af7 cubicweb-debian-version-3.11.1-1
+56ae3cd5f8553678a2b1d4121b61241598d0ca68 cubicweb-version-3.11.2
+954b5b51cd9278eb45d66be1967064d01ab08453 cubicweb-debian-version-3.11.2-1
+b7a124f9aed2c7c9c86c6349ddd9f0a07023f0ca cubicweb-version-3.11.3
+b3c6702761a18a41fdbb7bc1083f92aefce07765 cubicweb-debian-version-3.11.3-1
+fd502219eb76f4bfd239d838a498a1d1e8204baf cubicweb-version-3.12.0
+92b56939b7c77bbf443b893c495a20f19bc30702 cubicweb-debian-version-3.12.0-1
+59701627adba73ee97529f6ea0e250a0f3748e32 cubicweb-version-3.12.1
+07e2c9c7df2617c5ecfa84cb819b3ee8ef91d1f2 cubicweb-debian-version-3.12.1-1
+5a9b6bc5653807500c30a7eb0e95b90fd714fec3 cubicweb-version-3.12.2
+6d418fb3ffed273562aae411efe323d5138b592a cubicweb-debian-version-3.12.2-1
+e712bc6f1f71684f032bfcb9bb151a066c707dec cubicweb-version-3.12.3
+ba8fe4f2e408c3fdf6c297cd42c2577dcac50e71 cubicweb-debian-version-3.12.3-1
+5cd0dbc26882f60e3f11ec55e7f058d94505e7ed cubicweb-version-3.12.4
+7c4d34a5ec57f927a70cbc7af7fa8310c847ac42 cubicweb-debian-version-3.12.4-1
+ae33197db1f08d42c9df49563f7b15246f8c8e58 cubicweb-version-3.12.5
+6dfe78a0797ccc34962510f8c2a57f63d65ce41e cubicweb-debian-version-3.12.5-1
+a18dac758150fe9c1f9e4958d898717c32a8f679 cubicweb-version-3.12.6
+105767487c7075dbcce36474f1af0485985cbf2c cubicweb-debian-version-3.12.6-1
+628fe57ce746c1dac87fb1b078b2026057df894e cubicweb-version-3.12.7
+a07517985136bbbfa6610c428a1b42cd04cd530b cubicweb-debian-version-3.12.7-1
+50122a47ce4fb2ecbf3cf20ed2777f4276c93609 cubicweb-version-3.12.8
+cf49ed55685a810d8d73585330ad1a57cc76260d cubicweb-debian-version-3.12.8-1
+cb2990aaa63cbfe593bcf3afdbb9071e4c76815a cubicweb-version-3.12.9
+92464e39134c70e4ddbe6cd78a6e3338a3b88b05 cubicweb-debian-version-3.12.9-1
+074c848a3712a77737d9a1bfbb618c75f5c0cbfa cubicweb-version-3.12.10
+9dfd21fa0a8b9f121a08866ad3e2ebd1dd06790d cubicweb-debian-version-3.12.10-1
+b661ef475260ca7d9ea5c36ba2cc86e95e5b17d3 cubicweb-version-3.13.0
+a96137858f571711678954477da6f7f435870cea cubicweb-debian-version-3.13.0-1
+7d84317ef185a10c5eb78e6086f2297d2f4bd1e3 cubicweb-version-3.13.1
+cc0578049cbe8b1d40009728e36c17e45da1fc6b cubicweb-debian-version-3.13.1-1
+f9227b9d61835f03163b8133a96da35db37a0c8d cubicweb-version-3.13.2
+9ad5411199e00b2611366439b82f35d7d3285423 cubicweb-debian-version-3.13.2-1
+0e82e7e5a34f57d7239c7a42e48ba4d5e53abab2 cubicweb-version-3.13.3
+fb48c55cb80234bc0164c9bcc0e2cfc428836e5f cubicweb-debian-version-3.13.3-1
+223ecf0620b6c87d997f8011aca0d9f0ee4750af cubicweb-version-3.13.4
+52f26475d764129c5559b2d80fd57e6ea1bdd6ba cubicweb-debian-version-3.13.4-1
+20d9c550c57eb6f9adcb0cfab1c11b6b8793afb6 cubicweb-version-3.13.5
+2e9dd7d945557c210d3b79153c65f6885e755315 cubicweb-debian-version-3.13.5-1
+17c007ad845abbac82e12146abab32a634657574 cubicweb-version-3.13.6
+8a8949ca5351d48c5cf795ccdff06c1d4aab2ce0 cubicweb-debian-version-3.13.6-1
+68e8c81fa96d6bcd21cc17bc9832d388ce05a9eb cubicweb-version-3.13.7
+2f93ce32febe2f82565994fbd454f331f76ca883 cubicweb-debian-version-3.13.7-1
+249bd41693392d4716686f05c6b84628cd14dfcd cubicweb-version-3.13.8
+43f83f5d0a4d57a06e9a4990bc957fcfa691eec3 cubicweb-debian-version-3.13.8-1
+07afe32945aa275052747f78ef1f55858aaf6fa9 cubicweb-version-3.13.9
+0a3cb5e60d57a7a9851371b4ae487094ec2bf614 cubicweb-debian-version-3.13.9-1
+2ad4e5173c73a43804c265207bcabb8940bd42f4 cubicweb-version-3.13.10
+2eab9a5a6bf8e3b0cf706bee8cdf697759c0a33a cubicweb-debian-version-3.13.10-1
+5c4390eb10c3fe76a81e6fccec109d7097dc1a8d cubicweb-version-3.14.0
+0bfe22fceb383b46d62b437bf5dd0141a714afb8 cubicweb-debian-version-3.14.0-1
+793d2d327b3ebf0b82b2735cf3ccb86467d1c08a cubicweb-version-3.14.1
+6928210da4fc25d086b5b8d5ff2029da41aade2e cubicweb-debian-version-3.14.1-1
+049a3819f03dc79d803be054cc3bfe8425313f63 cubicweb-version-3.14.2
+17ebd836cee30a9f690e83af7ce98287a7216d57 cubicweb-debian-version-3.14.2-1
+60efdbb455204899103c30bfa8d805c1b15161f6 cubicweb-version-3.14.3
+4d0f5d18e8a07ab218efe90d758af723ea4a1b2b cubicweb-debian-version-3.14.3-1
+508645a542870cb0def9c43056e5084ff8def5ca cubicweb-version-3.14.4
+bc40991b7f13642d457f5ca80ac1486c29e25a6e cubicweb-debian-version-3.14.4-1
+4c8cb2e9d0ee13af1d584e2920d1ae76f47380e9 cubicweb-debian-version-3.14.4-2
+f559ab9602e7eeb4996ac0f83d544a6e0374e204 cubicweb-version-3.14.5
+55fc796ed5d5f31245ae60bd148c9e42657a1af6 cubicweb-debian-version-3.14.5-1
+db021578232b885dc5e55dfca045332ce01e7f35 cubicweb-version-3.14.6
+75364c0994907764715bd5011f6a59d934dbeb7d cubicweb-debian-version-3.14.6-1
+3ba3ee5b3a89a54d1dc12ed41d5c12232eda1952 cubicweb-version-3.14.7
+20ee573bd2379a00f29ff27bb88a8a3344d4cdfe cubicweb-debian-version-3.14.7-1
+15fe07ff687238f8cc09d8e563a72981484085b3 cubicweb-version-3.14.8
+81394043ad226942ac0019b8e1d4f7058d67a49f cubicweb-debian-version-3.14.8-1
+9337812cef6b949eee89161190e0c3d68d7f32ea cubicweb-version-3.14.9
+68c762adf2d5a2c338910ef1091df554370586f0 cubicweb-debian-version-3.14.9-1
+0ff798f80138ca8f50a59f42284380ce8f6232e8 cubicweb-version-3.14.10
+197bcd087c87cd3de9f21f5bf40bd6203c074f1f cubicweb-debian-version-3.14.10-1
+0642b2d03acaa5e065cae7590e82b388a280ca22 cubicweb-version-3.15.0
+925db25a3250c5090cf640fc2b02bde5818b9798 cubicweb-debian-version-3.15.0-1
+783a5df54dc742e63c8a720b1582ff08366733bd cubicweb-version-3.15.1
+fe5e60862b64f1beed2ccdf3a9c96502dfcd811b cubicweb-debian-version-3.15.1-1
+2afc157ea9b2b92eccb0f2d704094e22ce8b5a05 cubicweb-version-3.15.2
+9aa5553b26520ceb68539e7a32721b5cd5393e16 cubicweb-debian-version-3.15.2-1
+0e012eb80990ca6f91aa9a8ad3324fbcf51435b1 cubicweb-version-3.15.3
+7ad423a5b6a883dbdf00e6c87a5f8ab121041640 cubicweb-debian-version-3.15.3-1
+63260486de89a9dc32128cd0eacef891a668977b cubicweb-version-3.15.4
+70cb36c826df86de465f9b69647cef7096dcf12c cubicweb-debian-version-3.15.4-1
+b0e086f451b7213fe63141438edc91a6b2da9072 cubicweb-version-3.15.5
+19e115ae5442c427c0adbda8b9d8ceccf2931b5c cubicweb-debian-version-3.15.5-1
+0163bd9f4880d5531e433c1500f9298a0adef6b7 cubicweb-version-3.15.6
+b05e156b8fe720494293b08e7060ba43ad57a5c8 cubicweb-debian-version-3.15.6-1
+d8916cee7b705fec66fa2797ab89ba3e3b617ced cubicweb-version-3.15.7
+c5400558f37079a8bf6f2cd27a1ffd49321f3d8b cubicweb-debian-version-3.15.7-1
+459d0c48dfafee903c15a5349d321f6e8f998cbb cubicweb-version-3.15.8
+4ef457479337396f63bf00c87cedcbb7cb5a6eee cubicweb-debian-version-3.15.8-1
+8bfc0753f1daa37a6a268287dd2848931fca1f95 cubicweb-version-3.15.9
+29fbc632a69667840294d7b38b0ca00e5f66ec19 cubicweb-debian-version-3.15.9-1
+89bdb5444cd20213d5af03c2612ceb28340cb760 cubicweb-version-3.15.10
+feca12e4a6188fbaae0cc48c6f8cc5f4202e1662 cubicweb-debian-version-3.15.10-1
+38c6a3ea8252e1a40452aad05e4aa25b43f66cd8 cubicweb-version-3.15.11
+09d65bc1f0253eacef4b480716b7c139ab9efc35 cubicweb-debian-version-3.15.11-1
+6c7c2a02c9a0ca870accfc8ed1bb120e9c858d5d cubicweb-version-3.16.0
+853237d1daf6710af94cc2ec8ee12aa7dba16934 cubicweb-debian-version-3.16.0-1
+d95cbb7349f01b9e02e5da65d55a92582bbee6db cubicweb-version-3.16.1
+84fbcdc8021c9c198fef3c6a9ad90c298ee12566 cubicweb-debian-version-3.16.1-1
+a2b4f245aa57013cf8bbcfa2f3d021ee04bccfa0 cubicweb-version-3.16.2
+b3c1ad0cbf001883058ab82be9474544a31b5294 cubicweb-debian-version-3.16.2-1
+ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-version-3.16.3
+ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-debian-version-3.16.3-1
+ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-centos-version-3.16.3-1
+041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-version-3.16.4
+041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-centos-version-3.16.4-1
+041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-debian-version-3.16.4-1
+810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-version-3.16.5
+810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-debiann-version-3.16.5-1
+810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-centos-version-3.16.5-1
+b4ccaf13081d2798c0414d002e743cb0bf6d81f8 cubicweb-version-3.16.6
+b4ccaf13081d2798c0414d002e743cb0bf6d81f8 cubicweb-centos-version-3.16.6-1
+b4ccaf13081d2798c0414d002e743cb0bf6d81f8 cubicweb-debian-version-3.16.6-1
+cc1a0aad580cf93d26959f97d8d6638e786c1082 cubicweb-version-3.17.0
+22be40c492e9034483bfec379ca11462ea97825b cubicweb-debian-version-3.17.0-1
+09a0c7ea6c3cb97bbbeed3795b3c3715ceb9566b cubicweb-debian-version-3.17.0-2
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-version-3.17.1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-debian-version-3.17.1-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-centos-version-3.17.1-1
+965f894b63cb7c4456acd82257709f563bde848f cubicweb-centos-version-3.17.1-2
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-version-3.17.2
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-debian-version-3.17.2-1
+32b4d5314fd90fe050c931886190f9a372686148 cubicweb-version-3.17.3
+32b4d5314fd90fe050c931886190f9a372686148 cubicweb-debian-version-3.17.3-1
+32b4d5314fd90fe050c931886190f9a372686148 cubicweb-centos-version-3.17.3-1
+c7ba8e5d2e45e3d1289c1403df40d7dcb5e62acb cubicweb-version-3.17.4
+c7ba8e5d2e45e3d1289c1403df40d7dcb5e62acb cubicweb-centos-version-3.17.4-1
+c7ba8e5d2e45e3d1289c1403df40d7dcb5e62acb cubicweb-debian-version-3.17.4-1
+15dd5b37998b8ef5e8fab1ea0491e6bd8e9f3355 cubicweb-version-3.17.5
+15dd5b37998b8ef5e8fab1ea0491e6bd8e9f3355 cubicweb-centos-version-3.17.5-1
+15dd5b37998b8ef5e8fab1ea0491e6bd8e9f3355 cubicweb-debian-version-3.17.5-1
+5b9fedf67a2912a80fe315a477df9e3ab104c734 cubicweb-version-3.17.6
+5b9fedf67a2912a80fe315a477df9e3ab104c734 cubicweb-centos-version-3.17.6-1
+5b9fedf67a2912a80fe315a477df9e3ab104c734 cubicweb-debian-version-3.17.6-1
+483181543899a762d068cfdc3ae751b54adc3f14 cubicweb-version-3.17.7
+483181543899a762d068cfdc3ae751b54adc3f14 cubicweb-centos-version-3.17.7-1
+483181543899a762d068cfdc3ae751b54adc3f14 cubicweb-debian-version-3.17.7-1
+909eb8b584c437b3d2580beff1325c3d5b5dcfb5 cubicweb-version-3.17.8
+909eb8b584c437b3d2580beff1325c3d5b5dcfb5 cubicweb-debian-version-3.17.8-1
+909eb8b584c437b3d2580beff1325c3d5b5dcfb5 cubicweb-centos-version-3.17.8-1
+5668d210e49c910180ff27712b6ae9ce8286e06c cubicweb-version-3.17.9
+5668d210e49c910180ff27712b6ae9ce8286e06c cubicweb-debian-version-3.17.9-1
+fe0e1863a13772836f40f743cc6fe4865f288ed3 cubicweb-version-3.17.10
+fe0e1863a13772836f40f743cc6fe4865f288ed3 cubicweb-debian-version-3.17.10-1
+fe0e1863a13772836f40f743cc6fe4865f288ed3 cubicweb-centos-version-3.17.10-1
+7f67db7c848ec20152daf489d9e11f0fc8402e9b cubicweb-version-3.17.11
+7f67db7c848ec20152daf489d9e11f0fc8402e9b cubicweb-centos-version-3.17.11-1
+7f67db7c848ec20152daf489d9e11f0fc8402e9b cubicweb-debian-version-3.17.11-1
+b02e2912cad5d80395e488c55b548495e8320198 cubicweb-debian-version-3.17.11-2
+838d58a30f7efc6a8f83ac27ae8de7d79b84b2bb cubicweb-version-3.17.12
+838d58a30f7efc6a8f83ac27ae8de7d79b84b2bb cubicweb-centos-version-3.17.12-1
+838d58a30f7efc6a8f83ac27ae8de7d79b84b2bb cubicweb-debian-version-3.17.12-1
+09b4ebb9b0f179009491410c07cd013a60258fc6 cubicweb-version-3.17.13
+09b4ebb9b0f179009491410c07cd013a60258fc6 cubicweb-centos-version-3.17.13-1
+09b4ebb9b0f179009491410c07cd013a60258fc6 cubicweb-debian-version-3.17.13-1
+fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-version-3.17.14
+fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-debian-version-3.17.14-1
+fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-centos-version-3.17.14-1
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-version-3.17.15
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-debian-version-3.17.15-1
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-centos-version-3.17.15-1
+a979d1594af6501a774fb32eb67cd32fea626655 cubicweb-version-3.17.16
+a979d1594af6501a774fb32eb67cd32fea626655 cubicweb-debian-version-3.17.16-1
+a979d1594af6501a774fb32eb67cd32fea626655 cubicweb-centos-version-3.17.16-1
+57e9d1c70512d0f4e2c33d33db436a8274e10c1a cubicweb-version-3.17.17
+57e9d1c70512d0f4e2c33d33db436a8274e10c1a cubicweb-debian-version-3.17.17-1
+57e9d1c70512d0f4e2c33d33db436a8274e10c1a cubicweb-centos-version-3.17.17-1
+cda4b066933f216abe185786f5458176894bdaf0 cubicweb-version-3.17.18
+cda4b066933f216abe185786f5458176894bdaf0 cubicweb-centos-version-3.17.18-1
+cda4b066933f216abe185786f5458176894bdaf0 cubicweb-debian-version-3.17.18-1
+db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-version-3.18.0
+db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-debian-version-3.18.0-1
+db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-centos-version-3.18.0-1
+60322cb8636c0402cdac025d3297626c41583023 cubicweb-version-3.18.1
+60322cb8636c0402cdac025d3297626c41583023 cubicweb-centos-version-3.18.1-1
+60322cb8636c0402cdac025d3297626c41583023 cubicweb-debian-version-3.18.1-1
+6880674c1a2669e3635abd688755116dda72e65e cubicweb-version-3.18.2
+6880674c1a2669e3635abd688755116dda72e65e cubicweb-centos-version-3.18.2-1
+6880674c1a2669e3635abd688755116dda72e65e cubicweb-debian-version-3.18.2-1
+afd21fea201a745051357b7aa6be3c7da1ae5bd2 cubicweb-version-3.18.3
+afd21fea201a745051357b7aa6be3c7da1ae5bd2 cubicweb-debian-version-3.18.3-1
+afd21fea201a745051357b7aa6be3c7da1ae5bd2 cubicweb-centos-version-3.18.3-1
+0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-version-3.18.4
+0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-debian-version-3.18.4-1
+0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-centos-version-3.18.4-1
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-version-3.18.5
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-centos-version-3.18.5-1
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-debian-version-3.18.5-1
+d915013567429b481cb2c367071e36451c07a226 cubicweb-version-3.18.6
+d915013567429b481cb2c367071e36451c07a226 cubicweb-centos-version-3.18.6-1
+d915013567429b481cb2c367071e36451c07a226 cubicweb-debian-version-3.18.6-1
+cb96f4403cf2837b595992ceb0dfef2070d55e70 cubicweb-version-3.18.7
+cb96f4403cf2837b595992ceb0dfef2070d55e70 cubicweb-debian-version-3.18.7-1
+cb96f4403cf2837b595992ceb0dfef2070d55e70 cubicweb-centos-version-3.18.7-1
+231094063d62fa7c5296f2e46bc204e728038e85 cubicweb-version-3.18.8
+231094063d62fa7c5296f2e46bc204e728038e85 cubicweb-debian-version-3.18.8-1
+231094063d62fa7c5296f2e46bc204e728038e85 cubicweb-centos-version-3.18.8-1
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-version-3.19.0
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-debian-version-3.19.0-1
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-centos-version-3.19.0-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-version-3.19.1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-debian-version-3.19.1-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-centos-version-3.19.1-1
+8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-version-3.19.2
+8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-debian-version-3.19.2-1
+8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-centos-version-3.19.2-1
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-version-3.19.3
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-debian-version-3.19.3-1
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-centos-version-3.19.3-1
+c4e740e50fc7d371d14df17d26bc42d1f8060261 cubicweb-version-3.19.4
+c4e740e50fc7d371d14df17d26bc42d1f8060261 cubicweb-debian-version-3.19.4-1
+c4e740e50fc7d371d14df17d26bc42d1f8060261 cubicweb-centos-version-3.19.4-1
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b cubicweb-version-3.19.5
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b cubicweb-debian-version-3.19.5-1
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b cubicweb-centos-version-3.19.5-1
+934341b848a6874688314d7c154183aca3aed530 cubicweb-version-3.19.6
+934341b848a6874688314d7c154183aca3aed530 cubicweb-debian-version-3.19.6-1
+934341b848a6874688314d7c154183aca3aed530 cubicweb-centos-version-3.19.6-1
+ac4f5f615597575bec32f8f591260e5a91e53855 cubicweb-version-3.19.7
+ac4f5f615597575bec32f8f591260e5a91e53855 cubicweb-debian-version-3.19.7-1
+ac4f5f615597575bec32f8f591260e5a91e53855 cubicweb-centos-version-3.19.7-1
+efc8645ece4300958e3628db81464fef12d5f6e8 cubicweb-version-3.19.8
+efc8645ece4300958e3628db81464fef12d5f6e8 cubicweb-debian-version-3.19.8-1
+efc8645ece4300958e3628db81464fef12d5f6e8 cubicweb-centos-version-3.19.8-1
+b7c373d74754f5ba9344575cb179b47282c413b6 cubicweb-version-3.19.9
+b7c373d74754f5ba9344575cb179b47282c413b6 cubicweb-debian-version-3.19.9-1
+b7c373d74754f5ba9344575cb179b47282c413b6 cubicweb-centos-version-3.19.9-1
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d cubicweb-version-3.19.10
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d cubicweb-debian-version-3.19.10-1
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d cubicweb-centos-version-3.19.10-1
+1ae64186af9448dffbeebdef910c8c7391c04313 cubicweb-version-3.19.11
+1ae64186af9448dffbeebdef910c8c7391c04313 cubicweb-debian-version-3.19.11-1
+1ae64186af9448dffbeebdef910c8c7391c04313 cubicweb-centos-version-3.19.11-1
+6d265ea7d56fe49e9dff261d3b2caf3c2b6f9409 cubicweb-debian-version-3.19.11-2
+7e6b7739afe6128589ad51b0318decb767cbae36 cubicweb-version-3.20.0
+7e6b7739afe6128589ad51b0318decb767cbae36 cubicweb-debian-version-3.20.0-1
+7e6b7739afe6128589ad51b0318decb767cbae36 cubicweb-centos-version-3.20.0-1
+43eef610ef11673d01750459356aec5a96174ca0 cubicweb-version-3.20.1
+43eef610ef11673d01750459356aec5a96174ca0 cubicweb-debian-version-3.20.1-1
+43eef610ef11673d01750459356aec5a96174ca0 cubicweb-centos-version-3.20.1-1
+138464fc1c3397979b729cca3a30bc4481fd1e2d cubicweb-version-3.20.2
+138464fc1c3397979b729cca3a30bc4481fd1e2d cubicweb-debian-version-3.20.2-1
+138464fc1c3397979b729cca3a30bc4481fd1e2d cubicweb-centos-version-3.20.2-1
+7d3a583ed5392ba528e56ef6902ced5468613f4d cubicweb-version-3.20.3
+7d3a583ed5392ba528e56ef6902ced5468613f4d cubicweb-debian-version-3.20.3-1
+7d3a583ed5392ba528e56ef6902ced5468613f4d cubicweb-centos-version-3.20.3-1
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 cubicweb-version-3.20.4
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 cubicweb-debian-version-3.20.4-1
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 cubicweb-centos-version-3.20.4-1
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b cubicweb-version-3.20.5
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b cubicweb-debian-version-3.20.5-1
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b cubicweb-centos-version-3.20.5-1
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 cubicweb-version-3.20.6
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 cubicweb-debian-version-3.20.6-1
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 cubicweb-centos-version-3.20.6-1
+359d68bc12602c73559531b09d00399f4cbca785 cubicweb-version-3.20.7
+359d68bc12602c73559531b09d00399f4cbca785 cubicweb-debian-version-3.20.7-1
+359d68bc12602c73559531b09d00399f4cbca785 cubicweb-centos-version-3.20.7-1
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f 3.19.0
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f debian/3.19.0-1
+1141927b8494aabd16e31b0d0d9a50fe1fed5f2f centos/3.19.0-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a 3.19.1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a debian/3.19.1-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a centos/3.19.1-1
+8ac2202866e747444ce12778ff8789edd9c92eae 3.19.2
+8ac2202866e747444ce12778ff8789edd9c92eae debian/3.19.2-1
+8ac2202866e747444ce12778ff8789edd9c92eae centos/3.19.2-1
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd 3.19.3
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd debian/3.19.3-1
+37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd centos/3.19.3-1
+c4e740e50fc7d371d14df17d26bc42d1f8060261 3.19.4
+c4e740e50fc7d371d14df17d26bc42d1f8060261 debian/3.19.4-1
+c4e740e50fc7d371d14df17d26bc42d1f8060261 centos/3.19.4-1
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b 3.19.5
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b debian/3.19.5-1
+3ac86df519af2a1194cb3fc882d30d0e1bf44e3b centos/3.19.5-1
+934341b848a6874688314d7c154183aca3aed530 3.19.6
+934341b848a6874688314d7c154183aca3aed530 debian/3.19.6-1
+934341b848a6874688314d7c154183aca3aed530 centos/3.19.6-1
+ac4f5f615597575bec32f8f591260e5a91e53855 3.19.7
+ac4f5f615597575bec32f8f591260e5a91e53855 debian/3.19.7-1
+ac4f5f615597575bec32f8f591260e5a91e53855 centos/3.19.7-1
+efc8645ece4300958e3628db81464fef12d5f6e8 3.19.8
+efc8645ece4300958e3628db81464fef12d5f6e8 debian/3.19.8-1
+efc8645ece4300958e3628db81464fef12d5f6e8 centos/3.19.8-1
+b7c373d74754f5ba9344575cb179b47282c413b6 3.19.9
+b7c373d74754f5ba9344575cb179b47282c413b6 debian/3.19.9-1
+b7c373d74754f5ba9344575cb179b47282c413b6 centos/3.19.9-1
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d 3.19.10
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d debian/3.19.10-1
+3bab0b9b0ee7355a6fea45c2adca88bffe130e5d centos/3.19.10-1
+1ae64186af9448dffbeebdef910c8c7391c04313 3.19.11
+1ae64186af9448dffbeebdef910c8c7391c04313 debian/3.19.11-1
+1ae64186af9448dffbeebdef910c8c7391c04313 centos/3.19.11-1
+6d265ea7d56fe49e9dff261d3b2caf3c2b6f9409 debian/3.19.11-2
+5932de3d50bf023544c8f54b47898e4db35eac7c 3.19.12
+5932de3d50bf023544c8f54b47898e4db35eac7c debian/3.19.12-1
+5932de3d50bf023544c8f54b47898e4db35eac7c centos/3.19.12-1
+f933a38d7ab5fc6f2ad593fe1cf9985ce9d7e873 3.19.13
+f933a38d7ab5fc6f2ad593fe1cf9985ce9d7e873 debian/3.19.13-1
+f933a38d7ab5fc6f2ad593fe1cf9985ce9d7e873 centos/3.19.13-1
+72a0f70879ac40ea57575be90bc6427f61ce3bd6 3.19.14
+72a0f70879ac40ea57575be90bc6427f61ce3bd6 debian/3.19.14-1
+72a0f70879ac40ea57575be90bc6427f61ce3bd6 centos/3.19.14-1
+7e6b7739afe6128589ad51b0318decb767cbae36 3.20.0
+7e6b7739afe6128589ad51b0318decb767cbae36 debian/3.20.0-1
+7e6b7739afe6128589ad51b0318decb767cbae36 centos/3.20.0-1
+43eef610ef11673d01750459356aec5a96174ca0 3.20.1
+43eef610ef11673d01750459356aec5a96174ca0 debian/3.20.1-1
+43eef610ef11673d01750459356aec5a96174ca0 centos/3.20.1-1
+138464fc1c3397979b729cca3a30bc4481fd1e2d 3.20.2
+138464fc1c3397979b729cca3a30bc4481fd1e2d debian/3.20.2-1
+138464fc1c3397979b729cca3a30bc4481fd1e2d centos/3.20.2-1
+7d3a583ed5392ba528e56ef6902ced5468613f4d 3.20.3
+7d3a583ed5392ba528e56ef6902ced5468613f4d debian/3.20.3-1
+7d3a583ed5392ba528e56ef6902ced5468613f4d centos/3.20.3-1
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 3.20.4
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 debian/3.20.4-1
+49831fdc84dc7e7bed01d5e8110a46242b5ccda6 centos/3.20.4-1
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b 3.20.5
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b debian/3.20.5-1
+51aa56e7d507958b3326abbb6a31d0e6dde6b47b centos/3.20.5-1
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 3.20.6
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 debian/3.20.6-1
+7f64859dcbcdc6394421b8a5175896ba2e5caeb5 centos/3.20.6-1
+359d68bc12602c73559531b09d00399f4cbca785 3.20.7
+359d68bc12602c73559531b09d00399f4cbca785 debian/3.20.7-1
+359d68bc12602c73559531b09d00399f4cbca785 centos/3.20.7-1
+ec284980ed9e214fe6c15cc4cf9617961d88928d 3.20.8
+ec284980ed9e214fe6c15cc4cf9617961d88928d debian/3.20.8-1
+ec284980ed9e214fe6c15cc4cf9617961d88928d centos/3.20.8-1
+d477e64475821c21632878062bf68d142252ffc2 3.20.9
+d477e64475821c21632878062bf68d142252ffc2 debian/3.20.9-1
+d477e64475821c21632878062bf68d142252ffc2 centos/3.20.9-1
+8f82e95239625d153a9f1de6e79820d96d9efe8a 3.20.10
+8f82e95239625d153a9f1de6e79820d96d9efe8a debian/3.20.10-1
+8f82e95239625d153a9f1de6e79820d96d9efe8a centos/3.20.10-1
+c44930ac9579fe4d526b26892954e56021af18be 3.20.11
+c44930ac9579fe4d526b26892954e56021af18be debian/3.20.11-1
+c44930ac9579fe4d526b26892954e56021af18be centos/3.20.11-1
+03e8fc9f79a6e489a1b5c695eb0cd3fbb1afe9d4 3.20.12
+03e8fc9f79a6e489a1b5c695eb0cd3fbb1afe9d4 debian/3.20.12-1
+03e8fc9f79a6e489a1b5c695eb0cd3fbb1afe9d4 centos/3.20.12-1
+8c5dabbcd4d9505c3a617f9dbe2b10172bdc2b3a 3.20.13
+8c5dabbcd4d9505c3a617f9dbe2b10172bdc2b3a debian/3.20.13-1
+8c5dabbcd4d9505c3a617f9dbe2b10172bdc2b3a centos/3.20.13-1
+f66a4895759e0913b1203943fc2cd7be1a821e05 3.20.14
+f66a4895759e0913b1203943fc2cd7be1a821e05 debian/3.20.14-1
+f66a4895759e0913b1203943fc2cd7be1a821e05 centos/3.20.14-1
+636a83e65870433c2560f3c49d55ca628bc96e11 3.20.15
+636a83e65870433c2560f3c49d55ca628bc96e11 debian/3.20.15-1
+636a83e65870433c2560f3c49d55ca628bc96e11 centos/3.20.15-1
+887c6eef807781560adcd4ecd2dea9011f5a6681 3.21.0
+887c6eef807781560adcd4ecd2dea9011f5a6681 debian/3.21.0-1
+887c6eef807781560adcd4ecd2dea9011f5a6681 centos/3.21.0-1
+a8a0de0298a58306d63dbc998ad60c48bf18c80a 3.21.1
+a8a0de0298a58306d63dbc998ad60c48bf18c80a debian/3.21.1-1
+a8a0de0298a58306d63dbc998ad60c48bf18c80a centos/3.21.1-1
+a5428e1ab36491a8e6d66ce09d23b708b97e1337 3.21.2
+a5428e1ab36491a8e6d66ce09d23b708b97e1337 debian/3.21.2-1
+a5428e1ab36491a8e6d66ce09d23b708b97e1337 centos/3.21.2-1
+9edfe9429209848e31d1998df48da7a84db0c819 3.21.3
+9edfe9429209848e31d1998df48da7a84db0c819 debian/3.21.3-1
+9edfe9429209848e31d1998df48da7a84db0c819 centos/3.21.3-1
+d3b92d3a7db098b25168beef9b3ee7b36263a652 3.21.4
+d3b92d3a7db098b25168beef9b3ee7b36263a652 debian/3.21.4-1
+d3b92d3a7db098b25168beef9b3ee7b36263a652 centos/3.21.4-1
+e0572a786e6b4b0965d405dd95cf5bce754005a2 3.21.5
+e0572a786e6b4b0965d405dd95cf5bce754005a2 debian/3.21.5-1
+e0572a786e6b4b0965d405dd95cf5bce754005a2 centos/3.21.5-1
+228b6d2777e44d7bc158d0b4579d09960acea926 debian/3.21.5-2
+b3cbbb7690b6e193570ffe4846615d372868a923 3.21.6
+b3cbbb7690b6e193570ffe4846615d372868a923 debian/3.21.6-1
+b3cbbb7690b6e193570ffe4846615d372868a923 centos/3.21.6-1
+de472896fc0a18d6b831e6fed0eeda5921ec522c 3.22.0
+de472896fc0a18d6b831e6fed0eeda5921ec522c debian/3.22.0-1
+de472896fc0a18d6b831e6fed0eeda5921ec522c centos/3.22.0-1
+d0d86803a804854be0a1b2d49079a94d1c193ee9 3.22.1
+d0d86803a804854be0a1b2d49079a94d1c193ee9 debian/3.22.1-1
+d0d86803a804854be0a1b2d49079a94d1c193ee9 centos/3.22.1-1
+1b93ff37755b0588081f6fcb93da0dde772a6adb 3.22.2
+1b93ff37755b0588081f6fcb93da0dde772a6adb debian/3.22.2-1
+1b93ff37755b0588081f6fcb93da0dde772a6adb centos/3.22.2-1
+b1e7de00053628968ea364ee9044fb4f8714fb50 3.22.3
+b1e7de00053628968ea364ee9044fb4f8714fb50 debian/3.22.3-1
+b1e7de00053628968ea364ee9044fb4f8714fb50 centos/3.22.3-1
+93b0f836cb075df970236b14f2128478c8b4bcbc 3.23.0
+93b0f836cb075df970236b14f2128478c8b4bcbc centos/3.23.0-1
+93b0f836cb075df970236b14f2128478c8b4bcbc debian/3.23.0-1
+93b0f836cb075df970236b14f2128478c8b4bcbc 3.23.0
+4b07f2a79021697f8a88a4665a6c957d540147bb 3.23.0
+93b0f836cb075df970236b14f2128478c8b4bcbc centos/3.23.0-1
+4b07f2a79021697f8a88a4665a6c957d540147bb centos/3.23.0-1
+93b0f836cb075df970236b14f2128478c8b4bcbc debian/3.23.0-1
+4b07f2a79021697f8a88a4665a6c957d540147bb debian/3.23.0-1
+8f035d5d595301a335e005516c5ab1b124897d8c 3.23.1
+8f035d5d595301a335e005516c5ab1b124897d8c centos/3.23.1-1
+8f035d5d595301a335e005516c5ab1b124897d8c debian/3.23.1-1
diff -r 1400aee10df4 -r faf279e33298 CHANGES
--- a/CHANGES Thu Jul 07 14:30:32 2016 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,67 +0,0 @@
-0.3.1 (2015-06-18)
-------------------
-
-- debian: add python-wsgicors dependency (:issue:`4751889`).
-- Handle absence of anonymous user (:issue:`4751862`).
-
-0.3.0 (2015-05-11)
-------------------
-
-- Reorganize the authentication stack around on pyramid_multiauth_
- (:issue:`4985962`).
-- Implement the CW message API on top of the Pyramid :ref:`flash_messages`
- (:issue:`5298654`).
-
-- Don't commit `'uncommitable'` connexions anymore (:issue:`5343870`).
-- Debug mode enables pyramid.reload_templates.
-- Testcases can override pyramid settings (:issue:`5307426`).
-- pyramid_debugtoolbar is not mandatory anymore (:issue:`5310434`).
-- Add unit tests (coverage 79%).
-- Performance improvements (:issue:`4891437` & :issue:`4870347`).
-- Documentation improvements
-- Set response headers on exceptions (:issue:`4939219`).
-- Rename the package name from 'pyramid_cubicweb' to 'pyramid-cubicweb', for
- consistency with other pyramid extensions.
-
-.. _pyramid_multiauth: https://github.com/mozilla-services/pyramid_multiauth
-
-0.2.1 (2015-01-23)
-------------------
-
-- Fix cors 'methods' and 'headers' parameters passing (:issue:`4849874`).
-
-0.2.0 (2015-01-21)
-------------------
-
-- Create a documentation (:issue:`4849313`)
-- Fix cors 'origin' parameter passing (:issue:`4783343`)
-- Fix configuration loading when 'cubicweb.includes' is not set
- (:issue:`4849314`)
-- Move auth-related code to :mod:`pyramid_cubicweb.auth`.
-- Add profiling tools
-- Cleanups
-
-0.1.3 (2014-12-08)
-------------------
-
-- Fix cookies max_age (:issue:`4731764`)
-
-0.1.2 (2014-11-15)
-------------------
-
-- Fix excessive rollbacks on HTTPSuccessful or HTTPRedirection
- (:issue:`4566482`)
-
-0.1.1 (2014-11-02)
-------------------
-
-- Have `CWUser.last_login_time` properly updated (:issue:`4549891`)
-
-0.1.0 (2014-10-23)
-------------------
-
-Initial release
-
-- Provides a pyramid-based authentication and session management for a
- cubicweb instance.
-- Run a cubicweb instance as a pyramid application
diff -r 1400aee10df4 -r faf279e33298 COPYING
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/COPYING Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ , 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff -r 1400aee10df4 -r faf279e33298 COPYING.LESSER
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/COPYING.LESSER Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,510 @@
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations
+below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it
+becomes a de-facto standard. To achieve this, non-free programs must
+be allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control
+compilation and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at least
+ three years, to give the same user the materials specified in
+ Subsection 6a, above, for a charge no more than the cost of
+ performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply, and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License
+may add an explicit geographical distribution limitation excluding those
+countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms
+of the ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library.
+It is safest to attach them to the start of each source file to most
+effectively convey the exclusion of warranty; and each file should
+have at least the "copyright" line and a pointer to where the full
+notice is found.
+
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or
+your school, if any, to sign a "copyright disclaimer" for the library,
+if necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James
+ Random Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff -r 1400aee10df4 -r faf279e33298 MANIFEST.in
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/MANIFEST.in Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,73 @@
+include README
+include README.pyramid.rst
+include COPYING
+include COPYING.LESSER
+include pylintrc
+include jshintrc
+include tox.ini
+include bin/cubicweb-*
+include man/cubicweb-ctl.1
+
+include doc/*.rst
+include doc/*.txt
+include doc/Makefile
+recursive-include doc/book *
+recursive-include doc/tools *.py
+recursive-include doc/tutorials *.rst *.py
+recursive-include doc/api *.rst
+recursive-include doc/_themes *
+recursive-include doc/_static *
+include doc/_templates/*.html
+include doc/changes/*.rst
+recursive-include doc/dev .txt *.rst
+recursive-include doc/images *.png *.svg
+include doc/conf.py
+
+include cubicweb/devtools/fix_po_encoding
+
+recursive-include cubicweb/misc *.py *.png *.display
+
+include cubicweb/web/views/*.pt
+recursive-include cubicweb/web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf *.svg *.woff *.eot
+recursive-include cubicweb/web/wdoc *.rst *.png *.xml ChangeLog*
+recursive-include cubicweb/devtools/data *.js *.css *.sh
+
+recursive-include cubicweb/i18n *.pot *.po
+recursive-include cubicweb/schemas *.py *.sql
+
+recursive-include requirements *.txt
+
+recursive-include cubicweb/test/data bootstrap_cubes *.py *.sql
+recursive-include cubicweb/test/data-rewrite bootstrap_cubes *.py
+recursive-include cubicweb/test/data_schemareader *.py
+recursive-include cubicweb/dataimport/test/data *.py *.csv *.txt
+recursive-include cubicweb/dataimport/test/data-massimport *.py
+recursive-include cubicweb/devtools/test/data bootstrap_cubes *.py *.txt *.js *.po.ref
+recursive-include cubicweb/entities/test/data bootstrap_cubes *.py
+recursive-include cubicweb/etwist/test/data *.py
+recursive-include cubicweb/ext/test/data *.py
+recursive-include cubicweb/hooks/test/data-computed *.py
+recursive-include cubicweb/hooks/test/data bootstrap_cubes *.py
+recursive-include cubicweb/pyramid/test/data bootstrap_cubes
+recursive-include cubicweb/sobjects/test/data bootstrap_cubes *.py
+recursive-include cubicweb/server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif
+recursive-include cubicweb/server/test/data-cwep002 *.py
+recursive-include cubicweb/server/test/datacomputed *.py
+recursive-include cubicweb/server/test/data-schema2sql bootstrap_cubes toignore
+recursive-include cubicweb/server/test/data-migractions bootstrap_cubes *.py
+recursive-include cubicweb/server/test/data-schemaserial *.py
+include cubicweb/web/test/testutils.js
+recursive-include cubicweb/web/test/data bootstrap_cubes pouet.css *.py
+recursive-include cubicweb/web/test/data/static/jstests *.js *.html *.css *.json
+recursive-include cubicweb/web/test/windmill *.py
+
+include cubicweb/web/data/jquery-treeview/*.md
+
+recursive-include cubicweb/skeleton *.py *.css *.js *.po compat *.in *.tmpl rules tox.ini
+
+prune doc/book/en/.static
+prune doc/book/fr/.static
+prune doc/html/_sources
+prune cubicweb/misc/cwfs
+prune doc/js_api
+global-exclude *.pyc
diff -r 1400aee10df4 -r faf279e33298 README
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,37 @@
+CubicWeb semantic web framework
+===============================
+
+CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+
+- a repository server
+- a RQL command line client to the repository
+- an adaptative modpython interface to the server
+- a bunch of other management tools
+
+Install
+-------
+
+More details at https://docs.cubicweb.org/book/admin/setup
+
+Getting started
+---------------
+
+Execute::
+
+ apt-get install cubicweb cubicweb-dev cubicweb-blog
+ cubicweb-ctl create blog myblog
+ cubicweb-ctl start -D myblog
+ sensible-browser http://localhost:8080/
+
+Details at https://docs.cubicweb.org/tutorials/base/blog-in-five-minutes
+
+Documentation
+-------------
+
+Look in the doc/ subdirectory or read https://docs.cubicweb.org/
+
+
+CubicWeb includes the Entypo pictograms by Daniel Bruce — www.entypo.com
diff -r 1400aee10df4 -r faf279e33298 README.pyramid.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README.pyramid.rst Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,85 @@
+
+pyramid_cubicweb_ is one specific way of integrating CubicWeb_ with a
+Pyramid_ web application.
+
+Features
+========
+
+* provides a default route that let a cubicweb instance handle the request.
+
+Usage
+=====
+
+To use, install ``pyramid_cubicweb`` in your python environment, and
+then include_ the package::
+
+ config.include('pyramid_cubicweb')
+
+
+Configuration
+=============
+
+Requires the following `INI setting / environment variable`_:
+
+* `cubicweb.instance` / `CW_INSTANCE`: the cubicweb instance name
+
+Authentication cookies
+----------------------
+
+When using the `pyramid_cubicweb.auth` (CubicWeb AuthTkt
+authentication policy), which is the default in most cases, you may
+have to configure the behaviour of these authentication policies using
+standard's Pyramid configuration. You may want to configure in your
+``pyramid.ini``:
+
+:Session Authentication:
+
+ This is a `AuthTktAuthenticationPolicy`_ so you may overwrite default
+ configuration values by adding configuration entries using the prefix
+ ``cubicweb.auth.authtkt.session``. Default values are:
+
+ ::
+
+ cubicweb.auth.authtkt.session.hashalg = sha512
+ cubicweb.auth.authtkt.session.cookie_name = auth_tkt
+ cubicweb.auth.authtkt.session.timeout = 1200
+ cubicweb.auth.authtkt.session.reissue_time = 120
+ cubicweb.auth.authtkt.session.http_only = True
+ cubicweb.auth.authtkt.session.secure = True
+
+
+:Persistent Authentication:
+
+ This is also a `AuthTktAuthenticationPolicy`_. It is used when persistent
+ sessions are activated (typically when using the cubicweb-rememberme_
+ cube). You may overwrite default configuration values by adding
+ configuration entries using the prefix
+ ``cubicweb.auth.authtkt.persistent``. Default values are:
+
+ ::
+
+ cubicweb.auth.authtkt.persistent.hashalg = sha512
+ cubicweb.auth.authtkt.persistent.cookie_name = pauth_tkt
+ cubicweb.auth.authtkt.persistent.max_age = 3600*24*30
+ cubicweb.auth.authtkt.persistent.reissue_time = 3600*24
+ cubicweb.auth.authtkt.persistent.http_only = True
+ cubicweb.auth.authtkt.persistent.secure = True
+
+
+.. Warning:: Legacy timeout values from the instance's
+ ``all-in-one.conf`` are **not** used at all (``
+ http-session-time`` and ``cleanup-session-time``)
+
+Please refer to the documentation_ for more details (available in the
+``docs`` directory of the source code).
+
+.. _pyramid_cubicweb: https://www.cubicweb.org/project/pyramid-cubicweb
+.. _CubicWeb: https://www.cubicweb.org/
+.. _`cubicweb-rememberme`: \
+ https://www.cubicweb.org/project/cubicweb-rememberme
+.. _Pyramid: http://pypi.python.org/pypi/pyramid
+.. _include: http://docs.pylonsproject.org/projects/pyramid/en/latest/api/config.html#pyramid.config.Configurator.include
+.. _`INI setting / environment variable`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html#adding-a-custom-setting
+.. _documentation: http://pyramid-cubicweb.readthedocs.org/
+.. _AuthTktAuthenticationPolicy: \
+ http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html#pyramid.authentication.AuthTktAuthenticationPolicy
diff -r 1400aee10df4 -r faf279e33298 README.rst
--- a/README.rst Thu Jul 07 14:30:32 2016 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,85 +0,0 @@
-
-pyramid_cubicweb_ is one specific way of integrating CubicWeb_ with a
-Pyramid_ web application.
-
-Features
-========
-
-* provides a default route that let a cubicweb instance handle the request.
-
-Usage
-=====
-
-To use, install ``pyramid_cubicweb`` in your python environment, and
-then include_ the package::
-
- config.include('pyramid_cubicweb')
-
-
-Configuration
-=============
-
-Requires the following `INI setting / environment variable`_:
-
-* `cubicweb.instance` / `CW_INSTANCE`: the cubicweb instance name
-
-Authentication cookies
-----------------------
-
-When using the `pyramid_cubicweb.auth` (CubicWeb AuthTkt
-authentication policy), which is the default in most cases, you may
-have to configure the behaviour of these authentication policies using
-standard's Pyramid configuration. You may want to configure in your
-``pyramid.ini``:
-
-:Session Authentication:
-
- This is a `AuthTktAuthenticationPolicy`_ so you may overwrite default
- configuration values by adding configuration entries using the prefix
- ``cubicweb.auth.authtkt.session``. Default values are:
-
- ::
-
- cubicweb.auth.authtkt.session.hashalg = sha512
- cubicweb.auth.authtkt.session.cookie_name = auth_tkt
- cubicweb.auth.authtkt.session.timeout = 1200
- cubicweb.auth.authtkt.session.reissue_time = 120
- cubicweb.auth.authtkt.session.http_only = True
- cubicweb.auth.authtkt.session.secure = True
-
-
-:Persistent Authentication:
-
- This is also a `AuthTktAuthenticationPolicy`_. It is used when persistent
- sessions are activated (typically when using the cubicweb-rememberme_
- cube). You may overwrite default configuration values by adding
- configuration entries using the prefix
- ``cubicweb.auth.authtkt.persistent``. Default values are:
-
- ::
-
- cubicweb.auth.authtkt.persistent.hashalg = sha512
- cubicweb.auth.authtkt.persistent.cookie_name = pauth_tkt
- cubicweb.auth.authtkt.persistent.max_age = 3600*24*30
- cubicweb.auth.authtkt.persistent.reissue_time = 3600*24
- cubicweb.auth.authtkt.persistent.http_only = True
- cubicweb.auth.authtkt.persistent.secure = True
-
-
-.. Warning:: Legacy timeout values from the instance's
- ``all-in-one.conf`` are **not** used at all (``
- http-session-time`` and ``cleanup-session-time``)
-
-Please refer to the documentation_ for more details (available in the
-``docs`` directory of the source code).
-
-.. _pyramid_cubicweb: https://www.cubicweb.org/project/pyramid-cubicweb
-.. _CubicWeb: https://www.cubicweb.org/
-.. _`cubicweb-rememberme`: \
- https://www.cubicweb.org/project/cubicweb-rememberme
-.. _Pyramid: http://pypi.python.org/pypi/pyramid
-.. _include: http://docs.pylonsproject.org/projects/pyramid/en/latest/api/config.html#pyramid.config.Configurator.include
-.. _`INI setting / environment variable`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html#adding-a-custom-setting
-.. _documentation: http://pyramid-cubicweb.readthedocs.org/
-.. _AuthTktAuthenticationPolicy: \
- http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html#pyramid.authentication.AuthTktAuthenticationPolicy
diff -r 1400aee10df4 -r faf279e33298 TODO.rst
--- a/TODO.rst Thu Jul 07 14:30:32 2016 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,134 +0,0 @@
-Next steps
-----------
-
-- finish what was started :
-
- - bypass publisher.
- - tighten the error handling and get a well-behaved application
- - provide sane default policies that match current cubicweb behavior.
-
-- identify what can be done without pushing the 'pyramid way' into cubicweb (as
- a first step for future evolutions).
-
-
-Provide a ctl command
-~~~~~~~~~~~~~~~~~~~~~
-
-Add a 'pyramid' command for cubicweb-ctl that starts a cubicweb instance within
-a pyramid container.
-
-Transactions
-~~~~~~~~~~~~
-
-A common transaction handling mechanism should be used so that the connexion
-can be safely used in both pyramid and cubicweb.
-
-Reimplement the base controllers of cw
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-- rest
-- static
-- data
-
-Bypass cw.handle_request in most case
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Use it only when no other mean works, which should provide backward compat of
-old cubes for a while.
-
-
-Views
------
-
-Goal: Have Cubicweb Views selected by pyramid.
-
-The selection behavior should be consistent with the cw predicates weight based
-priority system.
-
-Several approaches should be studied, some less integrated than others.
-
-Use a ViewMapper
-~~~~~~~~~~~~~~~~
-
-Here, the idea is to register a single pseudo view for each view __regid__
-present in the CW registry.
-
-The view mapper associated with these pseudo views would do a view lookup on
-the CW registry first, then call it for rendering.
-
-Pros
- * Easy to implement
-
-Cons
- * Need to keep two registries in the long term
- * Two phases lookup: once in pyramid, once in CW.
- * A lookup is performed when pyramid assumes it is finished and
- successful, which means we do not respect the pyramid API (A
- ViewMapper is just supposed to render an already selected view)
- * CW views are not registered directly by pyramid
-
-I (Christophe) don't like this solution because it is too much of a workaround
-and we would not use the pyramid API, just wrapping stuffs.
-
-
-Use a custom IMultiView
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Implements a IMultiView (see pyramid.config.views.MultiView) that lookups in
-the CW registry in hits __discriminator__.
-
-One instance of this class would be registered for each __regid__, like with
-the ViewMapper-based solution.
-
-Pros
- * Not too difficult to implement
- * Respect more the pyramid API: the lookup is performed at a moment it is
- expected by pyramid. In the end, pyramid will know the right view, and
- any other system looking up for a view will find an actual one, not a
- pseudo one.
-
-Cons
- * The CW views are not registered directly in pyramid
- * Still doing two lookups in two different registries.
-
-
-Use CW predicates in add_view (basic)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Here we add a "cwselect" predicate to pyramid, that makes it able to evaluate
-the cubicweb predicates.
-
-Pros
- * We by-pass the CW registry
-
-
-Cons
- * We loose the cw predicate weigths
-
-
-Use CW predicates in add_view + total ordering
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Here we choose to drop the runtime evaluation of the predicates weight.
-
-Instead, we evaluate the weight of a predicate when it matches, and use that to
-sort the views in the registry.
-
-This would need only a slight change of the pyramid MultiView, which would sort
-the views in this new order we compute instead of the default one.
-
-To use this system, we would need to duplicate the view registering when the
-expression has some "or" operators in it. The idea is to obtain 'and-only'
-predicate expressions for add_view.
-
-The only blocking point against that would be if some actual cw predicates
-returns a variable weight depending on the context, because it would make it
-impossible to pre-evaluate an expression weight if it matches.
-
-Use CW predicates in add_view + cw predicate weight
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Add runtine evalution of predicate weigths into pyramid.
-
-No real clue on how we can to that (yet), although it will most probably
-involve changes in MultiView.
diff -r 1400aee10df4 -r faf279e33298 __pkginfo__.py
--- a/__pkginfo__.py Thu Jul 07 14:30:32 2016 +0200
+++ b/__pkginfo__.py Mon Sep 26 14:52:12 2016 +0200
@@ -1,1 +1,1 @@
-modname = 'pyramid_cubicweb'
+cubicweb/__pkginfo__.py
\ No newline at end of file
diff -r 1400aee10df4 -r faf279e33298 bin/cubicweb-ctl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/cubicweb-ctl Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+from cubicweb.cwctl import run
+import sys
+run(sys.argv[1:])
diff -r 1400aee10df4 -r faf279e33298 bin/cubicweb-ctl.bat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/cubicweb-ctl.bat Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,20 @@
+@echo off
+rem = """-*-Python-*- script
+rem -------------------- DOS section --------------------
+rem You could set PYTHONPATH or TK environment variables here
+python -x "%~f0" %*
+goto exit
+
+"""
+# -------------------- Python section --------------------
+import sys
+from os.path import join, dirname, normpath
+sys.path.insert(0, normpath(join(dirname(__file__), '..', '..')))
+from cubicweb.cwctl import run
+run(sys.argv[1:])
+
+DosExitLabel = """
+:exit
+rem """
+
+
diff -r 1400aee10df4 -r faf279e33298 cubicweb.spec
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb.spec Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,63 @@
+%if 0%{?el5}
+%define python python26
+%define __python /usr/bin/python2.6
+%else
+%define python python
+%define __python /usr/bin/python
+%endif
+%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
+
+Name: cubicweb
+Version: 3.23.1
+Release: logilab.1%{?dist}
+Summary: CubicWeb is a semantic web application framework
+Source0: https://pypi.python.org/packages/source/c/cubicweb/cubicweb-%{version}.tar.gz
+License: LGPLv2+
+Group: Development/Languages/Python
+Vendor: Logilab
+Url: https://www.cubicweb.org/project/cubicweb
+
+BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot
+BuildArch: noarch
+
+Requires: %{python}
+Requires: %{python}-six >= 1.4.0
+Requires: %{python}-logilab-common >= 1.2.2
+Requires: %{python}-logilab-mtconverter >= 0.8.0
+Requires: %{python}-rql >= 0.34.0
+Requires: %{python}-yams >= 0.44.0
+Requires: %{python}-logilab-database >= 1.15.0
+Requires: %{python}-passlib
+Requires: %{python}-lxml
+Requires: %{python}-twisted-web < 16.0.0
+Requires: %{python}-markdown
+Requires: pytz
+# the schema view uses `dot'; at least on el5, png output requires graphviz-gd
+Requires: graphviz-gd
+Requires: gettext
+
+BuildRequires: %{python}
+
+%description
+a repository of entities / relations for knowledge management
+
+%prep
+%setup -q
+%if 0%{?el5}
+# change the python version in shebangs
+find . -name '*.py' -type f -print0 | xargs -0 sed -i '1,3s;^#!.*python.*$;#! /usr/bin/python2.6;'
+%endif
+
+%install
+%{__python} setup.py --quiet install --no-compile --prefix=%{_prefix} --root="$RPM_BUILD_ROOT"
+mkdir -p $RPM_BUILD_ROOT/var/log/cubicweb
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-, root, root)
+%dir /var/log/cubicweb
+%{_prefix}/share/cubicweb/*
+%{python_sitelib}/*
+%{_bindir}/*
diff -r 1400aee10df4 -r faf279e33298 cubicweb/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,308 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""CubicWeb is a generic framework to quickly build applications which describes
+relations between entitites.
+"""
+__docformat__ = "restructuredtext en"
+
+import imp
+import logging
+import os
+import pickle
+import pkgutil
+import sys
+import warnings
+import zlib
+
+warnings.filterwarnings('ignore', category=UserWarning,
+ message='.*was already imported',
+ module='.*pygments')
+
+
+from six import PY2, binary_type, text_type
+from six.moves import builtins
+
+if PY2:
+ # http://bugs.python.org/issue10211
+ from StringIO import StringIO as BytesIO
+else:
+ from io import BytesIO
+
+from logilab.common.deprecation import deprecated
+from logilab.common.logging_ext import set_log_methods
+from yams.constraints import BASE_CONVERTERS, BASE_CHECKERS
+
+# ignore the pygments UserWarnings
+warnings.filterwarnings('ignore', category=UserWarning,
+ message='.*was already imported',
+ module='.*pygments')
+
+# pre python 2.7.2 safety
+logging.basicConfig()
+
+# this is necessary for i18n devtools test where chdir is done while __path__ is relative, which
+# breaks later imports
+__path__[0] = os.path.abspath(__path__[0])
+CW_SOFTWARE_ROOT = __path__[0]
+
+
+from cubicweb.__pkginfo__ import version as __version__ # noqa
+
+
+set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb'))
+
+# make all exceptions accessible from the package
+from cubicweb._exceptions import * # noqa
+from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound # noqa
+
+
+# '_' is available to mark internationalized string but should not be used to
+# do the actual translation
+_ = text_type
+if not hasattr(builtins, '_'):
+ builtins._ = deprecated("[3.22] Use 'from cubicweb import _'")(_)
+
+
+# convert eid to the right type, raise ValueError if it's not a valid eid
+@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.')
+def typed_eid(eid):
+ return int(eid)
+
+
+class Binary(BytesIO):
+ """class to hold binary data. Use BytesIO to prevent use of unicode data"""
+ _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview)
+
+ def __init__(self, buf=b''):
+ assert isinstance(buf, self._allowed_types), \
+ "Binary objects must use bytes/buffer objects, not %s" % buf.__class__
+ # don't call super, BytesIO may be an old-style class (on python < 2.7.4)
+ BytesIO.__init__(self, buf)
+
+ def write(self, data):
+ assert isinstance(data, self._allowed_types), \
+ "Binary objects must use bytes/buffer objects, not %s" % data.__class__
+ # don't call super, BytesIO may be an old-style class (on python < 2.7.4)
+ BytesIO.write(self, data)
+
+ def to_file(self, fobj):
+ """write a binary to disk
+
+ the writing is performed in a safe way for files stored on
+ Windows SMB shares
+ """
+ pos = self.tell()
+ self.seek(0)
+ if sys.platform == 'win32':
+ while True:
+ # the 16kB chunksize comes from the shutil module
+ # in stdlib
+ chunk = self.read(16 * 1024)
+ if not chunk:
+ break
+ fobj.write(chunk)
+ else:
+ fobj.write(self.read())
+ self.seek(pos)
+
+ @staticmethod
+ def from_file(filename):
+ """read a file and returns its contents in a Binary
+
+ the reading is performed in a safe way for files stored on
+ Windows SMB shares
+ """
+ binary = Binary()
+ with open(filename, 'rb') as fobj:
+ if sys.platform == 'win32':
+ while True:
+ # the 16kB chunksize comes from the shutil module
+ # in stdlib
+ chunk = fobj.read(16 * 1024)
+ if not chunk:
+ break
+ binary.write(chunk)
+ else:
+ binary.write(fobj.read())
+ binary.seek(0)
+ return binary
+
+ def __eq__(self, other):
+ if not isinstance(other, Binary):
+ return False
+ return self.getvalue() == other.getvalue()
+
+ # Binary helpers to store/fetch python objects
+
+ @classmethod
+ def zpickle(cls, obj):
+ """ return a Binary containing a gzipped pickle of obj """
+ retval = cls()
+ retval.write(zlib.compress(pickle.dumps(obj, protocol=2)))
+ return retval
+
+ def unzpickle(self):
+ """ decompress and loads the stream before returning it """
+ return pickle.loads(zlib.decompress(self.getvalue()))
+
+
+def check_password(eschema, value):
+ return isinstance(value, (binary_type, Binary))
+BASE_CHECKERS['Password'] = check_password
+
+
+def str_or_binary(value):
+ if isinstance(value, Binary):
+ return value
+ return binary_type(value)
+BASE_CONVERTERS['Password'] = str_or_binary
+
+
+# use this dictionary to rename entity types while keeping bw compat
+ETYPE_NAME_MAP = {}
+
+# XXX cubic web cube migration map. See if it's worth keeping this mecanism
+# to help in cube renaming
+CW_MIGRATION_MAP = {}
+
+
+def neg_role(role):
+ if role == 'subject':
+ return 'object'
+ return 'subject'
+
+
+def role(obj):
+ try:
+ return obj.role
+ except AttributeError:
+ return neg_role(obj.target)
+
+
+def target(obj):
+ try:
+ return obj.target
+ except AttributeError:
+ return neg_role(obj.role)
+
+
+class CubicWebEventManager(object):
+ """simple event / callback manager.
+
+ Typical usage to register a callback::
+
+ >>> from cubicweb import CW_EVENT_MANAGER
+ >>> CW_EVENT_MANAGER.bind('after-registry-reload', mycallback)
+
+ Typical usage to emit an event::
+
+ >>> from cubicweb import CW_EVENT_MANAGER
+ >>> CW_EVENT_MANAGER.emit('after-registry-reload')
+
+ emit() accepts an additional context parameter that will be passed
+ to the callback if specified (and only in that case)
+ """
+ def __init__(self):
+ self.callbacks = {}
+
+ def bind(self, event, callback, *args, **kwargs):
+ self.callbacks.setdefault(event, []).append((callback, args, kwargs))
+
+ def emit(self, event, context=None):
+ for callback, args, kwargs in self.callbacks.get(event, ()):
+ if context is None:
+ callback(*args, **kwargs)
+ else:
+ callback(context, *args, **kwargs)
+
+CW_EVENT_MANAGER = CubicWebEventManager()
+
+
+def onevent(event, *args, **kwargs):
+ """decorator to ease event / callback binding
+
+ >>> from cubicweb import onevent
+ >>> @onevent('before-registry-reload')
+ ... def mycallback():
+ ... print 'hello'
+ ...
+ >>>
+ """
+ def _decorator(func):
+ CW_EVENT_MANAGER.bind(event, func, *args, **kwargs)
+ return func
+ return _decorator
+
+
+from yams.schema import role_name as rname
+
+
+def validation_error(entity, errors, substitutions=None, i18nvalues=None):
+ """easy way to retrieve a :class:`cubicweb.ValidationError` for an entity or eid.
+
+ You may also have 2-tuple as error keys, :func:`yams.role_name` will be
+ called automatically for them.
+
+ Messages in errors **should not be translated yet**, though marked for
+ internationalization. You may give an additional substition dictionary that
+ will be used for interpolation after the translation.
+ """
+ if substitutions is None:
+ # set empty dict else translation won't be done for backward
+ # compatibility reason (see ValidationError.translate method)
+ substitutions = {}
+ for key in list(errors):
+ if isinstance(key, tuple):
+ errors[rname(*key)] = errors.pop(key)
+ return ValidationError(getattr(entity, 'eid', entity), errors,
+ substitutions, i18nvalues)
+
+
+# exceptions ##################################################################
+
+class ProgrammingError(Exception):
+ """Exception raised for errors that are related to the database's operation
+ and not necessarily under the control of the programmer, e.g. an unexpected
+ disconnect occurs, the data source name is not found, a transaction could
+ not be processed, a memory allocation error occurred during processing,
+ etc.
+ """
+
+
+# Import hook for "legacy" cubes ##############################################
+
+class _CubesImporter(object):
+ """Module finder handling redirection of import of "cubes."
+ to "cubicweb_".
+ """
+
+ @classmethod
+ def install(cls):
+ if not any(isinstance(x, cls) for x in sys.meta_path):
+ self = cls()
+ sys.meta_path.append(self)
+
+ def find_module(self, fullname, path=None):
+ if fullname.startswith('cubes.'):
+ modname = 'cubicweb_' + fullname.split('.', 1)[1]
+ try:
+ modinfo = imp.find_module(modname)
+ except ImportError:
+ return None
+ else:
+ return pkgutil.ImpLoader(fullname, *modinfo)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/__main__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/__main__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+from cubicweb.cwctl import run
+import sys
+
+run(sys.argv[1:])
diff -r 1400aee10df4 -r faf279e33298 cubicweb/__pkginfo__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/__pkginfo__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,144 @@
+# pylint: disable=W0622,C0103
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""cubicweb global packaging information for the cubicweb knowledge management
+software
+"""
+import sys
+from os import listdir
+from os.path import join, isdir
+import glob
+
+
+modname = distname = "cubicweb"
+
+numversion = (3, 24, 0)
+version = '.'.join(str(num) for num in numversion) + '.dev0'
+
+description = "a repository of entities / relations for knowledge management"
+author = "Logilab"
+author_email = "contact@logilab.fr"
+web = 'https://www.cubicweb.org'
+license = 'LGPL'
+
+classifiers = [
+ 'Environment :: Web Environment',
+ 'Framework :: CubicWeb',
+ 'Programming Language :: Python',
+ 'Programming Language :: JavaScript',
+]
+
+__depends__ = {
+ 'six': '>= 1.4.0',
+ 'logilab-common': '>= 1.2.2',
+ 'logilab-mtconverter': '>= 0.8.0',
+ 'rql': '>= 0.34.0',
+ 'yams': '>= 0.44.0',
+ #gettext # for xgettext, msgcat, etc...
+ # web dependencies
+ 'lxml': '',
+ # XXX graphviz
+ # server dependencies
+ 'logilab-database': '>= 1.15.0',
+ 'passlib': '',
+ 'pytz': '',
+ 'Markdown': '',
+ 'unittest2': '>= 0.7.0',
+ # pyramid dependencies
+ 'pyramid': '>= 1.5.0',
+ 'waitress': '>= 0.8.9',
+ 'wsgicors': '>= 0.3',
+ 'pyramid_multiauth': '',
+ }
+
+__recommends__ = {
+ 'docutils': '>= 0.6',
+ 'Pillow': '', # for captcha
+ 'pycrypto': '', # for crypto extensions
+ 'fyzz': '>= 0.1.0', # for sparql
+ 'vobject': '>= 0.6.0', # for ical view
+ 'rdflib': None, #
+ 'pyzmq': None,
+ 'Twisted': '< 16.0.0',
+ #'Products.FCKeditor':'',
+ #'SimpleTAL':'>= 4.1.6',
+}
+
+scripts = [s for s in glob.glob(join('bin', 'cubicweb-*'))
+ if not s.endswith('.bat')]
+include_dirs = [join('test', 'data'),
+ join('server', 'test', 'data'),
+ join('hooks', 'test', 'data'),
+ join('web', 'test', 'data'),
+ join('devtools', 'data'),
+ join('devtools', 'test', 'data'),
+ 'schemas', 'skeleton']
+
+
+_server_migration_dir = join(modname, 'misc', 'migration')
+_data_dir = join(modname, 'web', 'data')
+_wdoc_dir = join(modname, 'web', 'wdoc')
+_wdocimages_dir = join(_wdoc_dir, 'images')
+_views_dir = join(modname, 'web', 'views')
+_i18n_dir = join(modname, 'i18n')
+
+_pyversion = '.'.join(str(num) for num in sys.version_info[0:2])
+if '--home' in sys.argv:
+ # --home install
+ pydir = 'python' + _pyversion
+else:
+ pydir = join('python' + _pyversion, 'site-packages')
+
+# data files that shall be copied into the main package directory
+package_data = {
+ 'cubicweb.web.views': ['*.pt'],
+}
+
+try:
+ # data files that shall be copied outside the main package directory
+ data_files = [
+ # server data
+ [join('share', 'cubicweb', 'schemas'),
+ glob.glob(join(modname, 'schemas', '*.sql'))],
+ [join('share', 'cubicweb', 'migration'),
+ [join(_server_migration_dir, filename)
+ for filename in listdir(_server_migration_dir)]],
+ # web data
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
+ [join(_data_dir, fname) for fname in listdir(_data_dir)
+ if not isdir(join(_data_dir, fname))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'),
+ [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'),
+ [join(_data_dir, 'jquery-treeview', fname) for fname in listdir(join(_data_dir, 'jquery-treeview'))
+ if not isdir(join(_data_dir, 'jquery-treeview', fname))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview', 'images'),
+ [join(_data_dir, 'jquery-treeview', 'images', fname)
+ for fname in listdir(join(_data_dir, 'jquery-treeview', 'images'))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
+ [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir)
+ if not isdir(join(_wdoc_dir, fname))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
+ [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+ glob.glob(join(_i18n_dir, '*.po'))],
+ # skeleton
+ ]
+except OSError:
+ # we are in an installed directory, don't care about this
+ pass
diff -r 1400aee10df4 -r faf279e33298 cubicweb/_exceptions.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/_exceptions.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,209 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Exceptions shared by different cubicweb packages."""
+
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from six import PY3, text_type
+
+from logilab.common.decorators import cachedproperty
+
+from yams import ValidationError
+
+# abstract exceptions #########################################################
+
+class CubicWebException(Exception):
+ """base class for cubicweb server exception"""
+ msg = ""
+ def __unicode__(self):
+ if self.msg:
+ if self.args:
+ return self.msg % tuple(self.args)
+ else:
+ return self.msg
+ else:
+ return u' '.join(text_type(arg) for arg in self.args)
+ __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8')
+
+class ConfigurationError(CubicWebException):
+ """a misconfiguration error"""
+
+class InternalError(CubicWebException):
+ """base class for exceptions which should not occur"""
+
+class SecurityError(CubicWebException):
+ """base class for cubicweb server security exceptions"""
+
+class RepositoryError(CubicWebException):
+ """base class for repository exceptions"""
+
+class SourceException(CubicWebException):
+ """base class for source exceptions"""
+
+class CubicWebRuntimeError(CubicWebException):
+ """base class for runtime exceptions"""
+
+# repository exceptions #######################################################
+
+class ConnectionError(RepositoryError):
+ """raised when a bad connection id is given or when an attempt to establish
+ a connection failed
+ """
+
+class AuthenticationError(ConnectionError):
+ """raised when an attempt to establish a connection failed due to wrong
+ connection information (login / password or other authentication token)
+ """
+
+class BadConnectionId(ConnectionError):
+ """raised when a bad connection id is given"""
+
+class UnknownEid(RepositoryError):
+ """the eid is not defined in the system tables"""
+ msg = 'No entity with eid %s in the repository'
+
+class UniqueTogetherError(RepositoryError):
+ """raised when a unique_together constraint caused an IntegrityError"""
+ def __init__(self, session, **kwargs):
+ self.session = session
+ assert 'rtypes' in kwargs or 'cstrname' in kwargs
+ self.kwargs = kwargs
+ # fill cache while the session is open
+ self.rtypes
+
+ @cachedproperty
+ def rtypes(self):
+ if 'rtypes' in self.kwargs:
+ return self.kwargs['rtypes']
+ cstrname = unicode(self.kwargs['cstrname'])
+ cstr = self.session.find('CWUniqueTogetherConstraint', name=cstrname).one()
+ return sorted(rtype.name for rtype in cstr.relations)
+
+ @cachedproperty
+ def args(self):
+ warn('[3.18] UniqueTogetherError.args is deprecated, just use '
+ 'the .rtypes accessor.',
+ DeprecationWarning)
+ # the first argument, etype, is never used and was never garanteed anyway
+ return None, self.rtypes
+
+
+class ViolatedConstraint(RepositoryError):
+ def __init__(self, cnx, cstrname):
+ self.cnx = cnx
+ self.cstrname = cstrname
+
+
+# security exceptions #########################################################
+
+class Unauthorized(SecurityError):
+ """raised when a user tries to perform an action without sufficient
+ credentials
+ """
+ msg = u'You are not allowed to perform this operation'
+ msg1 = u'You are not allowed to perform %s operation on %s'
+ var = None
+
+ def __unicode__(self):
+ try:
+ if self.args and len(self.args) == 2:
+ return self.msg1 % self.args
+ if self.args:
+ return u' '.join(self.args)
+ return self.msg
+ except Exception as ex:
+ return text_type(ex)
+
+class Forbidden(SecurityError):
+ """raised when a user tries to perform a forbidden action
+ """
+
+# source exceptions ###########################################################
+
+class EidNotInSource(SourceException):
+ """trying to access an object with a particular eid from a particular
+ source has failed
+ """
+ msg = 'No entity with eid %s in %s'
+
+
+# registry exceptions #########################################################
+
+# pre 3.15 bw compat
+from logilab.common.registry import RegistryException, ObjectNotFound, NoSelectableObject
+
+class UnknownProperty(RegistryException):
+ """property found in database but unknown in registry"""
+
+# query exception #############################################################
+
+class QueryError(CubicWebRuntimeError):
+ """a query try to do something it shouldn't"""
+
+class NotAnEntity(CubicWebRuntimeError):
+ """raised when get_entity is called for a column which doesn't contain
+ a non final entity
+ """
+
+class MultipleResultsError(CubicWebRuntimeError):
+ """raised when ResultSet.one() is called on a resultset with multiple rows
+ of multiple columns.
+ """
+
+class NoResultError(CubicWebRuntimeError):
+ """raised when no result is found but at least one is expected.
+ """
+
+class UndoTransactionException(QueryError):
+ """Raised when undoing a transaction could not be performed completely.
+
+ Note that :
+ 1) the partial undo operation might be acceptable
+ depending upon the final application
+
+ 2) the undo operation can also fail with a `ValidationError` in
+ cases where the undoing breaks integrity constraints checked
+ immediately.
+
+ 3) It might be that neither of those exception is raised but a
+ subsequent `commit` might raise a `ValidationError` in cases
+ where the undoing breaks integrity constraints checked at
+ commit time.
+
+ :type txuuix: int
+ :param txuuid: Unique identifier of the partially undone transaction
+
+ :type errors: list
+ :param errors: List of errors occurred during undoing
+ """
+ msg = u"The following error(s) occurred while undoing transaction #%d : %s"
+
+ def __init__(self, txuuid, errors):
+ super(UndoTransactionException, self).__init__(txuuid, errors)
+ self.txuuid = txuuid
+ self.errors = errors
+
+# tools exceptions ############################################################
+
+class ExecutionError(Exception):
+ """server execution control error (already started, not running...)"""
+
+# pylint: disable=W0611
+from logilab.common.clcommands import BadCommandUsage
diff -r 1400aee10df4 -r faf279e33298 cubicweb/_gcdebug.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/_gcdebug.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,112 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from __future__ import print_function
+
+import gc, types, weakref
+
+from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
+try:
+ from cubicweb.web.request import _NeedAuthAccessMock
+except ImportError:
+ _NeedAuthAccessMock = None
+
+listiterator = type(iter([]))
+
+IGNORE_CLASSES = (
+ type, tuple, dict, list, set, frozenset, type(len),
+ weakref.ref, weakref.WeakKeyDictionary,
+ listiterator,
+ property, classmethod,
+ types.ModuleType, types.FunctionType, types.MethodType,
+ types.MemberDescriptorType, types.GetSetDescriptorType,
+ )
+if _NeedAuthAccessMock is not None:
+ IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,)
+
+def _get_counted_class(obj, classes):
+ for cls in classes:
+ if isinstance(obj, cls):
+ return cls
+ raise AssertionError()
+
+def gc_info(countclasses,
+ ignoreclasses=IGNORE_CLASSES,
+ viewreferrersclasses=(), showobjs=False, maxlevel=1):
+ gc.collect()
+ gc.collect()
+ counters = {}
+ ocounters = {}
+ for obj in gc.get_objects():
+ if isinstance(obj, countclasses):
+ cls = _get_counted_class(obj, countclasses)
+ try:
+ counters[cls.__name__] += 1
+ except KeyError:
+ counters[cls.__name__] = 1
+ elif not isinstance(obj, ignoreclasses):
+ try:
+ key = '%s.%s' % (obj.__class__.__module__,
+ obj.__class__.__name__)
+ except AttributeError:
+ key = str(obj)
+ try:
+ ocounters[key] += 1
+ except KeyError:
+ ocounters[key] = 1
+ if isinstance(obj, viewreferrersclasses):
+ print(' ', obj, referrers(obj, showobjs, maxlevel))
+ garbage = [repr(obj) for obj in gc.garbage]
+ return counters, ocounters, garbage
+
+
+def referrers(obj, showobj=False, maxlevel=1):
+ objreferrers = _referrers(obj, maxlevel)
+ try:
+ return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x)))
+ for x in objreferrers))
+ except TypeError:
+ s = set()
+ unhashable = []
+ for x in objreferrers:
+ try:
+ s.add(x)
+ except TypeError:
+ unhashable.append(x)
+ return sorted(s) + unhashable
+
+def _referrers(obj, maxlevel, _seen=None, _level=0):
+ interesting = []
+ if _seen is None:
+ _seen = set()
+ for x in gc.get_referrers(obj):
+ if id(x) in _seen:
+ continue
+ _seen.add(id(x))
+ if isinstance(x, types.FrameType):
+ continue
+ if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)):
+ continue
+ if isinstance(x, (list, tuple, set, dict, listiterator)):
+ if _level >= maxlevel:
+ pass
+ #interesting.append(x)
+ else:
+ interesting += _referrers(x, maxlevel, _seen, _level+1)
+ else:
+ interesting.append(x)
+ return interesting
diff -r 1400aee10df4 -r faf279e33298 cubicweb/appobject.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/appobject.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,161 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+
+The `AppObject` class
+---------------------
+
+The AppObject class is the base class for all dynamically loaded objects
+(application objects) accessible through the vregistry.
+
+We can find a certain number of attributes and methods defined in this class and
+common to all the application objects.
+
+"""
+__docformat__ = "restructuredtext en"
+
+from logging import getLogger
+
+from logilab.common.deprecation import deprecated, class_renamed
+from logilab.common.logging_ext import set_log_methods
+
+# first line imports for bw compat
+from logilab.common.registry import (objectify_predicate, traced_selection, Predicate,
+ RegistrableObject, yes)
+
+
+objectify_selector = deprecated('[3.15] objectify_selector has been '
+ 'renamed to objectify_predicates in '
+ 'logilab.common.registry')(objectify_predicate)
+traced_selection = deprecated('[3.15] traced_selection has been '
+ 'moved to logilab.common.registry')(traced_selection)
+Selector = class_renamed('Selector', Predicate,
+ '[3.15] Selector has been renamed to Predicate '
+ 'in logilab.common.registry')
+
+@deprecated('[3.15] lltrace decorator can now be removed')
+def lltrace(func):
+ return func
+
+# the base class for all appobjects ############################################
+
+class AppObject(RegistrableObject):
+ """This is the base class for CubicWeb application objects which are
+ selected in a request context.
+
+ The following attributes should be set on concrete appobject classes:
+
+ At selection time, the following attributes are set on the instance:
+
+ :attr:`_cw`
+ current request
+ :attr:`cw_extra_kwargs`
+ other received arguments
+
+ And also the following, only if `rset` is found in arguments (in which case
+ rset/row/col will be removed from `cwextra_kwargs`):
+
+ :attr:`cw_rset`
+ context result set or None
+
+ :attr:`cw_row`
+ if a result set is set and the context is about a particular cell in the
+ result set, and not the result set as a whole, specify the row number we
+ are interested in, else None
+
+ :attr:`cw_col`
+ if a result set is set and the context is about a particular cell in the
+ result set, and not the result set as a whole, specify the col number we
+ are interested in, else None
+
+
+ .. Note::
+
+ * do not inherit directly from this class but from a more specific class
+ such as `AnyEntity`, `EntityView`, `AnyRsetView`, `Action`...
+
+ """
+ __select__ = yes()
+
+ @classmethod
+ def __registered__(cls, registry):
+ """called by the registry when the appobject has been registered.
+
+ It must return the object that will be actually registered (this may be
+ the right hook to create an instance for example). By default the
+ appobject is returned without any transformation.
+ """
+ pdefs = getattr(cls, 'cw_property_defs', {})
+ for propid, pdef in pdefs.items():
+ pdef = pdef.copy() # may be shared
+ pdef['default'] = getattr(cls, propid, pdef['default'])
+ pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide'))
+ registry.vreg.register_property(cls._cwpropkey(propid), **pdef)
+ assert callable(cls.__select__), cls
+ return cls
+
+ def __init__(self, req, **extra):
+ super(AppObject, self).__init__()
+ self._cw = req
+ try:
+ self.cw_rset = extra.pop('rset')
+ self.cw_row = extra.pop('row', None)
+ self.cw_col = extra.pop('col', None)
+ except KeyError:
+ pass
+ self.cw_extra_kwargs = extra
+
+ # persistent class properties ##############################################
+ #
+ # optional `cw_property_defs` dict on a class defines available persistent
+ # properties for this class:
+ #
+ # * key: id of the property (the actual CWProperty key is build using
+ # ..
+ # * value: tuple (property type, vocabfunc, default value, property description)
+ # possible types are those used by `logilab.common.configuration`
+ #
+ # notice that when it exists multiple objects with the same id (adaptation,
+ # overriding) only the first encountered definition is considered, so those
+ # objects can't try to have different default values for instance.
+ #
+ # you can then access to a property value using self.cw_propval, where self
+ # is an instance of class
+
+ @classmethod
+ def _cwpropkey(cls, propid):
+ """return cw property key for the property of the given id for this
+ class
+ """
+ return '%s.%s.%s' % (cls.__registry__, cls.__regid__, propid)
+
+ def cw_propval(self, propid):
+ """return cw property value associated to key
+
+ ..
+ """
+ return self._cw.property_value(self._cwpropkey(propid))
+
+ # these are overridden by set_log_methods below
+ # only defining here to prevent pylint from complaining
+ info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+
+set_log_methods(AppObject, getLogger('cubicweb.appobject'))
+
+# defined here to avoid warning on usage on the AppObject class
+yes = deprecated('[3.15] yes has been moved to logilab.common.registry')(yes)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/crypto.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/crypto.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,47 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Simple cryptographic routines, based on python-crypto."""
+__docformat__ = "restructuredtext en"
+
+from base64 import b64encode, b64decode
+
+from six.moves import cPickle as pickle
+
+from Crypto.Cipher import Blowfish
+
+
+_CYPHERERS = {}
+def _cypherer(seed):
+ try:
+ return _CYPHERERS[seed]
+ except KeyError:
+ _CYPHERERS[seed] = Blowfish.new(seed, Blowfish.MODE_ECB)
+ return _CYPHERERS[seed]
+
+
+def encrypt(data, seed):
+ string = pickle.dumps(data)
+ string = string + '*' * (8 - len(string) % 8)
+ string = b64encode(_cypherer(seed).encrypt(string))
+ return unicode(string)
+
+
+def decrypt(string, seed):
+ # pickle ignores trailing characters so we do not need to strip them off
+ string = _cypherer(seed).decrypt(b64decode(string))
+ return pickle.loads(string)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/cwconfig.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/cwconfig.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1440 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+.. _ResourceMode:
+
+Resource mode
+-------------
+
+Standard resource mode
+``````````````````````
+
+A resource *mode* is a predefined set of settings for various resources
+directories, such as cubes, instances, etc. to ease development with the
+framework. There are two running modes with *CubicWeb*:
+
+* **system**: resources are searched / created in the system directories (eg
+ usually requiring root access):
+
+ - instances are stored in :file:`/etc/cubicweb.d`
+ - temporary files (such as pid file) in :file:`/var/run/cubicweb`
+
+ where `` is the detected installation prefix ('/usr/local' for
+ instance).
+
+* **user**: resources are searched / created in the user home directory:
+
+ - instances are stored in :file:`~/etc/cubicweb.d`
+ - temporary files (such as pid file) in :file:`/tmp`
+
+
+.. _CubicwebWithinVirtualEnv:
+
+Within virtual environment
+``````````````````````````
+
+If you are not administrator of you machine or if you need to play with some
+specific version of |cubicweb| you can use virtualenv_ a tool to create
+isolated Python environments.
+
+- instances are stored in :file:`/etc/cubicweb.d`
+- temporary files (such as pid file) in :file:`/var/run/cubicweb`
+
+.. _virtualenv: http://pypi.python.org/pypi/virtualenv
+
+
+Custom resource location
+````````````````````````
+
+Notice that each resource path may be explicitly set using an environment
+variable if the default doesn't suit your needs. Here are the default resource
+directories that are affected according to mode:
+
+* **system**: ::
+
+ CW_INSTANCES_DIR = /etc/cubicweb.d/
+ CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/
+ CW_RUNTIME_DIR = /var/run/cubicweb/
+
+* **user**: ::
+
+ CW_INSTANCES_DIR = ~/etc/cubicweb.d/
+ CW_INSTANCES_DATA_DIR = ~/etc/cubicweb.d/
+ CW_RUNTIME_DIR = /tmp
+
+Cubes search path is also affected, see the :ref:`Cube` section.
+
+
+Setting Cubicweb Mode
+`````````````````````
+
+By default, the mode is set to 'system' for standard installation. The mode is
+set to 'user' if `cubicweb is used from a mercurial repository`_. You can force
+this by setting the :envvar:`CW_MODE` environment variable to either 'user' or
+'system' so you can easily:
+
+* use system wide installation but user specific instances and all, without root
+ privileges on the system (`export CW_MODE=user`)
+
+* use local checkout of cubicweb on system wide instances (requires root
+ privileges on the system (`export CW_MODE=system`)
+
+If you've a doubt about the mode you're currently running, check the first line
+outputed by the :command:`cubicweb-ctl list` command.
+
+.. _`cubicweb is used from a mercurial repository`: CubicwebDevelopmentMod_
+
+
+.. _CubicwebDevelopmentMod:
+
+Development Mode (source)
+`````````````````````````
+
+If :file:`.hg` directory is found into the cubicweb package, there are
+specific resource rules.
+
+`` is the source checkout's ``cubicweb`` directory:
+
+* main cubes directory is `/../../cubes`. You can specify
+ another one with :envvar:`CW_INSTANCES_DIR` environment variable or simply
+ add some other directories by using :envvar:`CW_CUBES_PATH`
+
+* cubicweb migration files are searched in `/misc/migration`
+ instead of `/share/cubicweb/migration/`.
+
+
+Development Mode (virtualenv)
+`````````````````````````````
+
+If a virtualenv is found to be activated (i.e. a VIRTUAL_ENV variable is found
+in environment), the virtualenv root is used as ``. This, in
+particular, makes it possible to work in `setuptools development mode`_
+(``python setup.py develop``) without any further configuration.
+
+.. _`setuptools development mode`: https://pythonhosted.org/setuptools/setuptools.html#development-mode
+
+.. _ConfigurationEnv:
+
+Environment configuration
+-------------------------
+
+Python
+``````
+
+If you installed *CubicWeb* by cloning the Mercurial shell repository or from source
+distribution, then you will need to update the environment variable PYTHONPATH by
+adding the path to `cubicweb`:
+
+Add the following lines to either :file:`.bashrc` or :file:`.bash_profile` to
+configure your development environment ::
+
+ export PYTHONPATH=/full/path/to/grshell-cubicweb
+
+If you installed *CubicWeb* with packages, no configuration is required and your
+new cubes will be placed in `/usr/share/cubicweb/cubes` and your instances will
+be placed in `/etc/cubicweb.d`.
+
+
+CubicWeb
+````````
+
+Here are all environment variables that may be used to configure *CubicWeb*:
+
+.. envvar:: CW_MODE
+
+ Resource mode: user or system, as explained in :ref:`ResourceMode`.
+
+.. envvar:: CW_CUBES_PATH
+
+ Augments the default search path for cubes. You may specify several
+ directories using ':' as separator (';' under windows environment).
+
+.. envvar:: CW_INSTANCES_DIR
+
+ Directory where cubicweb instances will be found.
+
+.. envvar:: CW_INSTANCES_DATA_DIR
+
+ Directory where cubicweb instances data will be written (backup file...)
+
+.. envvar:: CW_RUNTIME_DIR
+
+ Directory where pid files will be written
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import importlib
+import logging
+import logging.config
+import os
+from os.path import (exists, join, expanduser, abspath, normpath,
+ basename, isdir, dirname, splitext)
+import pkgutil
+import pkg_resources
+import re
+from smtplib import SMTP
+import stat
+import sys
+from threading import Lock
+from warnings import warn, filterwarnings
+
+from six import text_type
+
+from logilab.common.decorators import cached, classproperty
+from logilab.common.deprecation import deprecated
+from logilab.common.logging_ext import set_log_methods, init_log
+from logilab.common.configuration import (Configuration, Method,
+ ConfigurationMixIn, merge_options)
+
+from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
+ ConfigurationError, Binary, _)
+from cubicweb.toolsutils import create_dir, option_value_from_env
+
+CONFIGURATIONS = []
+
+SMTP_LOCK = Lock()
+
+
+def configuration_cls(name):
+ """return the configuration class registered with the given name"""
+ try:
+ return [c for c in CONFIGURATIONS if c.name == name][0]
+ except IndexError:
+ raise ConfigurationError('no such config %r (check it exists with "cubicweb-ctl list")' % name)
+
+def possible_configurations(directory):
+ """return a list of installed configurations in a directory
+ according to \*-ctl files
+ """
+ return [name for name in ('repository', 'all-in-one')
+ if exists(join(directory, '%s.conf' % name))]
+
+def guess_configuration(directory):
+ """try to guess the configuration to use for a directory. If multiple
+ configurations are found, ConfigurationError is raised
+ """
+ modes = possible_configurations(directory)
+ if len(modes) != 1:
+ raise ConfigurationError('unable to guess configuration from %r %s'
+ % (directory, modes))
+ return modes[0]
+
+def _find_prefix(start_path=None):
+ """Return the prefix path of CubicWeb installation.
+
+ Walk parent directories of `start_path` looking for one containing a
+ 'share/cubicweb' directory. The first matching directory is assumed as the
+ prefix installation of CubicWeb.
+
+ If run from within a virtualenv, the virtualenv root is used as
+ `start_path`. Otherwise, `start_path` defaults to cubicweb package
+ directory path.
+ """
+ if start_path is None:
+ try:
+ prefix = os.environ['VIRTUAL_ENV']
+ except KeyError:
+ prefix = CW_SOFTWARE_ROOT
+ else:
+ prefix = start_path
+ if not isdir(prefix):
+ prefix = dirname(prefix)
+ old_prefix = None
+ while (not isdir(join(prefix, 'share', 'cubicweb'))
+ or prefix.endswith('.egg')):
+ if prefix == old_prefix:
+ return sys.prefix
+ old_prefix = prefix
+ prefix = dirname(prefix)
+ return prefix
+
+
+def _cube_pkgname(cube):
+ if not cube.startswith('cubicweb_'):
+ return 'cubicweb_' + cube
+ return cube
+
+
+# persistent options definition
+PERSISTENT_OPTIONS = (
+ ('encoding',
+ {'type' : 'string',
+ 'default': 'UTF-8',
+ 'help': _('user interface encoding'),
+ 'group': 'ui', 'sitewide': True,
+ }),
+ ('language',
+ {'type' : 'string',
+ 'default': 'en',
+ 'vocabulary': Method('available_languages'),
+ 'help': _('language of the user interface'),
+ 'group': 'ui',
+ }),
+ ('date-format',
+ {'type' : 'string',
+ 'default': '%Y/%m/%d',
+ 'help': _('how to format date in the ui (see this page for format description)'),
+ 'group': 'ui',
+ }),
+ ('datetime-format',
+ {'type' : 'string',
+ 'default': '%Y/%m/%d %H:%M',
+ 'help': _('how to format date and time in the ui (see this page for format description)'),
+ 'group': 'ui',
+ }),
+ ('time-format',
+ {'type' : 'string',
+ 'default': '%H:%M',
+ 'help': _('how to format time in the ui (see this page for format description)'),
+ 'group': 'ui',
+ }),
+ ('float-format',
+ {'type' : 'string',
+ 'default': '%.3f',
+ 'help': _('how to format float numbers in the ui'),
+ 'group': 'ui',
+ }),
+ ('default-text-format',
+ {'type' : 'choice',
+ 'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'),
+ 'default': 'text/plain',
+ 'help': _('default text format for rich text fields.'),
+ 'group': 'ui',
+ }),
+ ('short-line-size',
+ {'type' : 'int',
+ 'default': 80,
+ 'help': _('maximum number of characters in short description'),
+ 'group': 'navigation',
+ }),
+ )
+
+def register_persistent_options(options):
+ global PERSISTENT_OPTIONS
+ PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options)
+
+CFGTYPE2ETYPE_MAP = {
+ 'string': 'String',
+ 'choice': 'String',
+ 'yn': 'Boolean',
+ 'int': 'Int',
+ 'float' : 'Float',
+ }
+
+_forced_mode = os.environ.get('CW_MODE')
+assert _forced_mode in (None, 'system', 'user')
+
+# CWDEV tells whether directories such as i18n/, web/data/, etc. (ie containing
+# some other resources than python libraries) are located with the python code
+# or as a 'shared' cube
+CWDEV = exists(join(CW_SOFTWARE_ROOT, 'i18n'))
+
+try:
+ _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX']
+except KeyError:
+ _INSTALL_PREFIX = _find_prefix()
+_USR_INSTALL = _INSTALL_PREFIX == '/usr'
+
+class CubicWebNoAppConfiguration(ConfigurationMixIn):
+ """base class for cubicweb configuration without a specific instance directory
+ """
+ # to set in concrete configuration
+ name = None
+ # log messages format (see logging module documentation for available keys)
+ log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s'
+ # the format below can be useful to debug multi thread issues:
+ # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s'
+ # nor remove appobjects based on unused interface [???]
+ cleanup_unused_appobjects = True
+
+ quick_start = False
+
+ if 'VIRTUAL_ENV' in os.environ:
+ _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
+ mode = 'user'
+ elif CWDEV and _forced_mode != 'system':
+ mode = 'user'
+ _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../../cubes')
+ else:
+ mode = _forced_mode or 'system'
+ _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
+
+ CUBES_DIR = abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR))
+ CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep)
+
+ options = (
+ ('log-threshold',
+ {'type' : 'string', # XXX use a dedicated type?
+ 'default': 'WARNING',
+ 'help': 'server\'s log level',
+ 'group': 'main', 'level': 1,
+ }),
+ ('umask',
+ {'type' : 'int',
+ 'default': 0o077,
+ 'help': 'permission umask for files created by the server',
+ 'group': 'main', 'level': 2,
+ }),
+ # common configuration options which are potentially required as soon as
+ # you're using "base" application objects (ie to really server/web
+ # specific)
+ ('base-url',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'web server root url',
+ 'group': 'main', 'level': 1,
+ }),
+ ('allow-email-login',
+ {'type' : 'yn',
+ 'default': False,
+ 'help': 'allow users to login with their primary email if set',
+ 'group': 'main', 'level': 2,
+ }),
+ ('mangle-emails',
+ {'type' : 'yn',
+ 'default': False,
+ 'help': "don't display actual email addresses but mangle them if \
+this option is set to yes",
+ 'group': 'email', 'level': 3,
+ }),
+ )
+
+ def __getitem__(self, key):
+ """Get configuration option, by first looking at environmnent."""
+ file_value = super(CubicWebNoAppConfiguration, self).__getitem__(key)
+ return option_value_from_env(key, file_value)
+
+ # static and class methods used to get instance independant resources ##
+ @staticmethod
+ def cubicweb_version():
+ """return installed cubicweb version"""
+ from logilab.common.changelog import Version
+ from cubicweb import __pkginfo__
+ version = __pkginfo__.numversion
+ assert len(version) == 3, version
+ return Version(version)
+
+ @staticmethod
+ def persistent_options_configuration():
+ return Configuration(options=PERSISTENT_OPTIONS)
+
+ @classmethod
+ def shared_dir(cls):
+ """return the shared data directory (i.e. directory where standard
+ library views and data may be found)
+ """
+ if CWDEV:
+ return join(CW_SOFTWARE_ROOT, 'web')
+ return cls.cube_dir('shared')
+
+ @classmethod
+ def i18n_lib_dir(cls):
+ """return instance's i18n directory"""
+ if CWDEV:
+ return join(CW_SOFTWARE_ROOT, 'i18n')
+ return join(cls.shared_dir(), 'i18n')
+
+ @classmethod
+ def cw_languages(cls):
+ for fname in os.listdir(join(cls.i18n_lib_dir())):
+ if fname.endswith('.po'):
+ yield splitext(fname)[0]
+
+
+ @classmethod
+ def available_cubes(cls):
+ cubes = set()
+ for entry_point in pkg_resources.iter_entry_points(
+ group='cubicweb.cubes', name=None):
+ try:
+ module = entry_point.load()
+ except ImportError:
+ continue
+ else:
+ modname = module.__name__
+ if not modname.startswith('cubicweb_'):
+ cls.warning('entry point %s does not appear to be a cube',
+ entry_point)
+ continue
+ cubes.add(modname)
+ # Legacy cubes.
+ for directory in cls.cubes_search_path():
+ if not exists(directory):
+ cls.error('unexistant directory in cubes search path: %s'
+ % directory)
+ continue
+ for cube in os.listdir(directory):
+ if cube == 'shared':
+ continue
+ if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube):
+ continue # skip invalid python package name
+ cubedir = join(directory, cube)
+ if isdir(cubedir) and exists(join(cubedir, '__init__.py')):
+ cubes.add(cube)
+
+ def sortkey(cube):
+ """Preserve sorting with "cubicweb_" prefix."""
+ prefix = 'cubicweb_'
+ if cube.startswith(prefix):
+ # add a suffix to have a deterministic sorting between
+ # 'cubicweb_' and '' (useful in tests with "hash
+ # randomization" turned on).
+ return cube[len(prefix):] + '~'
+ return cube
+
+ return sorted(cubes, key=sortkey)
+
+ @classmethod
+ def cubes_search_path(cls):
+ """return the path of directories where cubes should be searched"""
+ path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH
+ if directory.strip() and exists(directory.strip())]
+ if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR):
+ path.append(cls.CUBES_DIR)
+ return path
+
+ @classproperty
+ def extrapath(cls):
+ extrapath = {}
+ for cubesdir in cls.cubes_search_path():
+ if cubesdir != cls.CUBES_DIR:
+ extrapath[cubesdir] = 'cubes'
+ return extrapath
+
+ @classmethod
+ def cube_dir(cls, cube):
+ """return the cube directory for the given cube id, raise
+ `ConfigurationError` if it doesn't exist
+ """
+ pkgname = _cube_pkgname(cube)
+ loader = pkgutil.find_loader(pkgname)
+ if loader:
+ return dirname(loader.get_filename())
+ # Legacy cubes.
+ for directory in cls.cubes_search_path():
+ cubedir = join(directory, cube)
+ if exists(cubedir):
+ return cubedir
+ msg = 'no module %(pkg)s in search path nor cube %(cube)r in %(path)s'
+ raise ConfigurationError(msg % {'cube': cube,
+ 'pkg': _cube_pkgname(cube),
+ 'path': cls.cubes_search_path()})
+
+ @classmethod
+ def cube_migration_scripts_dir(cls, cube):
+ """cube migration scripts directory"""
+ return join(cls.cube_dir(cube), 'migration')
+
+ @classmethod
+ def cube_pkginfo(cls, cube):
+ """return the information module for the given cube"""
+ pkgname = _cube_pkgname(cube)
+ try:
+ return importlib.import_module('%s.__pkginfo__' % pkgname)
+ except ImportError:
+ cube = CW_MIGRATION_MAP.get(cube, cube)
+ try:
+ parent = __import__('cubes.%s.__pkginfo__' % cube)
+ return getattr(parent, cube).__pkginfo__
+ except Exception as ex:
+ raise ConfigurationError(
+ 'unable to find packaging information for cube %s (%s: %s)'
+ % (cube, ex.__class__.__name__, ex))
+
+ @classmethod
+ def cube_version(cls, cube):
+ """return the version of the cube located in the given directory
+ """
+ from logilab.common.changelog import Version
+ version = cls.cube_pkginfo(cube).numversion
+ assert len(version) == 3, version
+ return Version(version)
+
+ @classmethod
+ def _cube_deps(cls, cube, key, oldkey):
+ """return cubicweb cubes used by the given cube"""
+ pkginfo = cls.cube_pkginfo(cube)
+ try:
+ # explicit __xxx_cubes__ attribute
+ deps = getattr(pkginfo, key)
+ except AttributeError:
+ # deduce cubes from generic __xxx__ attribute
+ try:
+ gendeps = getattr(pkginfo, key.replace('_cubes', ''))
+ except AttributeError:
+ deps = {}
+ else:
+ deps = dict( (x[len('cubicweb-'):], v)
+ for x, v in gendeps.items()
+ if x.startswith('cubicweb-'))
+ for depcube in deps:
+ try:
+ newname = CW_MIGRATION_MAP[depcube]
+ except KeyError:
+ pass
+ else:
+ deps[newname] = deps.pop(depcube)
+ return deps
+
+ @classmethod
+ def cube_depends_cubicweb_version(cls, cube):
+ # XXX no backward compat (see _cube_deps above)
+ try:
+ pkginfo = cls.cube_pkginfo(cube)
+ deps = getattr(pkginfo, '__depends__')
+ return deps.get('cubicweb')
+ except AttributeError:
+ return None
+
+ @classmethod
+ def cube_dependencies(cls, cube):
+ """return cubicweb cubes used by the given cube"""
+ return cls._cube_deps(cube, '__depends_cubes__', '__use__')
+
+ @classmethod
+ def cube_recommends(cls, cube):
+ """return cubicweb cubes recommended by the given cube"""
+ return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__')
+
+ @classmethod
+ def expand_cubes(cls, cubes, with_recommends=False):
+ """expand the given list of top level cubes used by adding recursivly
+ each cube dependencies
+ """
+ cubes = list(cubes)
+ todo = cubes[:]
+ if with_recommends:
+ available = set(cls.available_cubes())
+ while todo:
+ cube = todo.pop(0)
+ for depcube in cls.cube_dependencies(cube):
+ if depcube not in cubes:
+ cubes.append(depcube)
+ todo.append(depcube)
+ if with_recommends:
+ for depcube in cls.cube_recommends(cube):
+ if depcube not in cubes and depcube in available:
+ cubes.append(depcube)
+ todo.append(depcube)
+ return cubes
+
+ @classmethod
+ def reorder_cubes(cls, cubes):
+ """reorder cubes from the top level cubes to inner dependencies
+ cubes
+ """
+ from logilab.common.graph import ordered_nodes, UnorderableGraph
+ graph = {}
+ for cube in cubes:
+ cube = CW_MIGRATION_MAP.get(cube, cube)
+ graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+ if dep in cubes)
+ graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+ if dep in cubes)
+ try:
+ return ordered_nodes(graph)
+ except UnorderableGraph as ex:
+ raise ConfigurationError(ex)
+
+ @classmethod
+ def cls_adjust_sys_path(cls):
+ """update python path if necessary"""
+ from cubicweb import _CubesImporter
+ _CubesImporter.install()
+ cubes_parent_dir = normpath(join(cls.CUBES_DIR, '..'))
+ if not cubes_parent_dir in sys.path:
+ sys.path.insert(0, cubes_parent_dir)
+ try:
+ import cubes
+ cubes.__path__ = cls.cubes_search_path()
+ except ImportError:
+ return # cubes dir doesn't exists
+
+ @classmethod
+ def load_available_configs(cls):
+ for confmod in ('web.webconfig', 'etwist.twconfig',
+ 'server.serverconfig',):
+ try:
+ __import__('cubicweb.%s' % confmod)
+ except ImportError:
+ pass
+
+ @classmethod
+ def load_cwctl_plugins(cls):
+ cls.cls_adjust_sys_path()
+ for ctlmod in ('web.webctl', 'etwist.twctl', 'server.serverctl',
+ 'devtools.devctl'):
+ try:
+ __import__('cubicweb.%s' % ctlmod)
+ except ImportError:
+ continue
+ cls.info('loaded cubicweb-ctl plugin %s', ctlmod)
+ for cube in cls.available_cubes():
+ cubedir = cls.cube_dir(cube)
+ pluginfile = join(cubedir, 'ccplugin.py')
+ initfile = join(cubedir, '__init__.py')
+ if cube.startswith('cubicweb_'):
+ pkgname = cube
+ else:
+ pkgname = 'cubes.%s' % cube
+ if exists(pluginfile):
+ try:
+ __import__(pkgname + '.ccplugin')
+ cls.info('loaded cubicweb-ctl plugin from %s', cube)
+ except Exception:
+ cls.exception('while loading plugin %s', pluginfile)
+ elif exists(initfile):
+ try:
+ __import__(pkgname)
+ except Exception:
+ cls.exception('while loading cube %s', cube)
+ else:
+ cls.warning('no __init__ file in cube %s', cube)
+
+ @classmethod
+ def init_available_cubes(cls):
+ """cubes may register some sources (svnfile for instance) in their
+ __init__ file, so they should be loaded early in the startup process
+ """
+ for cube in cls.available_cubes():
+ try:
+ __import__('cubes.%s' % cube)
+ except Exception as ex:
+ cls.warning("can't init cube %s: %s", cube, ex)
+
+ cubicweb_appobject_path = set(['entities'])
+ cube_appobject_path = set(['entities'])
+
+ def __init__(self, debugmode=False):
+ if debugmode:
+ # in python 2.7, DeprecationWarning are not shown anymore by default
+ filterwarnings('default', category=DeprecationWarning)
+ register_stored_procedures()
+ self._cubes = None
+ super(CubicWebNoAppConfiguration, self).__init__()
+ self.debugmode = debugmode
+ self.adjust_sys_path()
+ self.load_defaults()
+ # will be properly initialized later by _gettext_init
+ self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )}
+ self._site_loaded = set()
+ # don't register ReStructured Text directives by simple import, avoid pb
+ # with eg sphinx.
+ # XXX should be done properly with a function from cw.uicfg
+ try:
+ from cubicweb.ext.rest import cw_rest_init
+ except ImportError:
+ pass
+ else:
+ cw_rest_init()
+
+ def adjust_sys_path(self):
+ # overriden in CubicWebConfiguration
+ self.cls_adjust_sys_path()
+
+ def init_log(self, logthreshold=None, logfile=None, syslog=False):
+ """init the log service"""
+ if logthreshold is None:
+ if self.debugmode:
+ logthreshold = 'DEBUG'
+ else:
+ logthreshold = self['log-threshold']
+ if sys.platform == 'win32':
+ # no logrotate on win32, so use logging rotation facilities
+ # for now, hard code weekly rotation every sunday, and 52 weeks kept
+ # idea: make this configurable?
+ init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format,
+ rotation_parameters={'when': 'W6', # every sunday
+ 'interval': 1,
+ 'backupCount': 52})
+ else:
+ init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format)
+ # configure simpleTal logger
+ logging.getLogger('simpleTAL').setLevel(logging.ERROR)
+
+ def appobjects_path(self):
+ """return a list of files or directories where the registry will look
+ for application objects. By default return nothing in NoApp config.
+ """
+ return []
+
+ def build_appobjects_path(self, templpath, evobjpath=None, tvobjpath=None):
+ """given a list of directories, return a list of sub files and
+ directories that should be loaded by the instance objects registry.
+
+ :param evobjpath:
+ optional list of sub-directories (or files without the .py ext) of
+ the cubicweb library that should be tested and added to the output list
+ if they exists. If not give, default to `cubicweb_appobject_path` class
+ attribute.
+ :param tvobjpath:
+ optional list of sub-directories (or files without the .py ext) of
+ directories given in `templpath` that should be tested and added to
+ the output list if they exists. If not give, default to
+ `cube_appobject_path` class attribute.
+ """
+ vregpath = self.build_appobjects_cubicweb_path(evobjpath)
+ vregpath += self.build_appobjects_cube_path(templpath, tvobjpath)
+ return vregpath
+
+ def build_appobjects_cubicweb_path(self, evobjpath=None):
+ vregpath = []
+ if evobjpath is None:
+ evobjpath = self.cubicweb_appobject_path
+ # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799
+ # it is clearly a workaround
+ for subdir in sorted(evobjpath, key=lambda x:x != 'entities'):
+ path = join(CW_SOFTWARE_ROOT, subdir)
+ if exists(path):
+ vregpath.append(path)
+ return vregpath
+
+ def build_appobjects_cube_path(self, templpath, tvobjpath=None):
+ vregpath = []
+ if tvobjpath is None:
+ tvobjpath = self.cube_appobject_path
+ for directory in templpath:
+ # NOTE: for the order, see http://www.cubicweb.org/ticket/2330799
+ for subdir in sorted(tvobjpath, key=lambda x:x != 'entities'):
+ path = join(directory, subdir)
+ if exists(path):
+ vregpath.append(path)
+ elif exists(path + '.py'):
+ vregpath.append(path + '.py')
+ return vregpath
+
+ apphome = None
+
+ def load_site_cubicweb(self, cubes=()):
+ """load site_cubicweb file for `cubes`"""
+ for cube in reversed(cubes or self.cubes()):
+ if cube in self._site_loaded:
+ continue
+ try:
+ self._load_site_cubicweb(cube)
+ self._site_loaded.add(cube)
+ except ImportError:
+ continue
+ if self.apphome is not None:
+ # Would occur, e.g., upon `cubicweb-ctl i18ncube `.
+ self._load_site_cubicweb(None)
+
+ def _load_site_cubicweb(self, cube):
+ """Load site_cubicweb.py from `cube` (or apphome if cube is None)."""
+ if cube is not None:
+ modname = 'cubes.%s.site_cubicweb' % cube
+ __import__(modname)
+ return sys.modules[modname]
+ else:
+ import imp
+ apphome_site = join(self.apphome, 'site_cubicweb.py')
+ if exists(apphome_site):
+ with open(apphome_site, 'rb') as f:
+ return imp.load_source('site_cubicweb', apphome_site, f)
+
+ def cwproperty_definitions(self):
+ cfg = self.persistent_options_configuration()
+ for section, options in cfg.options_by_section():
+ section = section.lower()
+ for optname, optdict, value in options:
+ key = '%s.%s' % (section, optname)
+ type, vocab = self.map_option(optdict)
+ default = cfg.option_default(optname, optdict)
+ pdef = {'type': type, 'vocabulary': vocab, 'default': default,
+ 'help': optdict['help'],
+ 'sitewide': optdict.get('sitewide', False)}
+ yield key, pdef
+
+ def map_option(self, optdict):
+ try:
+ vocab = optdict['choices']
+ except KeyError:
+ vocab = optdict.get('vocabulary')
+ if isinstance(vocab, Method):
+ vocab = getattr(self, vocab.method, ())
+ return CFGTYPE2ETYPE_MAP[optdict['type']], vocab
+
+ def default_instance_id(self):
+ """return the instance identifier, useful for option which need this
+ as default value
+ """
+ return None
+
+ _cubes = None
+
+ def init_cubes(self, cubes):
+ self._cubes = self.reorder_cubes(cubes)
+ # load cubes'__init__.py file first
+ for cube in cubes:
+ try:
+ importlib.import_module(_cube_pkgname(cube))
+ except ImportError:
+ # Legacy cube.
+ __import__('cubes.%s' % cube)
+ self.load_site_cubicweb()
+
+ def cubes(self):
+ """return the list of cubes used by this instance
+
+ result is ordered from the top level cubes to inner dependencies
+ cubes
+ """
+ assert self._cubes is not None, 'cubes not initialized'
+ return self._cubes
+
+ def cubes_path(self):
+ """return the list of path to cubes used by this instance, from outer
+ most to inner most cubes
+ """
+ return [self.cube_dir(p) for p in self.cubes()]
+
+ # these are overridden by set_log_methods below
+ # only defining here to prevent pylint from complaining
+ @classmethod
+ def debug(cls, msg, *a, **kw):
+ pass
+ info = warning = error = critical = exception = debug
+
+
+class CubicWebConfiguration(CubicWebNoAppConfiguration):
+ """base class for cubicweb server and web configurations"""
+
+ if CubicWebNoAppConfiguration.mode == 'user':
+ _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/')
+ #mode == system'
+ elif _USR_INSTALL:
+ _INSTANCES_DIR = '/etc/cubicweb.d/'
+ else:
+ _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d')
+
+ # set to true during repair (shell, migration) to allow some things which
+ # wouldn't be possible otherwise
+ repairing = False
+
+ # set by upgrade command
+ verbosity = 0
+ cmdline_options = None
+ options = CubicWebNoAppConfiguration.options + (
+ ('log-file',
+ {'type' : 'string',
+ 'default': Method('default_log_file'),
+ 'help': 'file where output logs should be written',
+ 'group': 'main', 'level': 2,
+ }),
+ ('statsd-endpoint',
+ {'type' : 'string',
+ 'default': '',
+ 'help': 'UDP address of the statsd endpoint; it must be formatted'
+ 'like :; disabled is unset.',
+ 'group': 'main', 'level': 2,
+ }),
+ # email configuration
+ ('smtp-host',
+ {'type' : 'string',
+ 'default': 'mail',
+ 'help': 'hostname of the SMTP mail server',
+ 'group': 'email', 'level': 1,
+ }),
+ ('smtp-port',
+ {'type' : 'int',
+ 'default': 25,
+ 'help': 'listening port of the SMTP mail server',
+ 'group': 'email', 'level': 1,
+ }),
+ ('sender-name',
+ {'type' : 'string',
+ 'default': Method('default_instance_id'),
+ 'help': 'name used as HELO name for outgoing emails from the \
+repository.',
+ 'group': 'email', 'level': 2,
+ }),
+ ('sender-addr',
+ {'type' : 'string',
+ 'default': 'cubicweb@mydomain.com',
+ 'help': 'email address used as HELO address for outgoing emails from \
+the repository',
+ 'group': 'email', 'level': 1,
+ }),
+ ('logstat-interval',
+ {'type' : 'int',
+ 'default': 0,
+ 'help': 'interval (in seconds) at which stats are dumped in the logstat file; set 0 to disable',
+ 'group': 'main', 'level': 2,
+ }),
+ ('logstat-file',
+ {'type' : 'string',
+ 'default': Method('default_stats_file'),
+ 'help': 'file where stats for the instance should be written',
+ 'group': 'main', 'level': 2,
+ }),
+ )
+
+ @classmethod
+ def instances_dir(cls):
+ """return the control directory"""
+ return abspath(os.environ.get('CW_INSTANCES_DIR', cls._INSTANCES_DIR))
+
+ @classmethod
+ def migration_scripts_dir(cls):
+ """cubicweb migration scripts directory"""
+ if CWDEV:
+ return join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+ mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration')
+ if not exists(mdir):
+ raise ConfigurationError('migration path %s doesn\'t exist' % mdir)
+ return mdir
+
+ @classmethod
+ def config_for(cls, appid, config=None, debugmode=False, creating=False):
+ """return a configuration instance for the given instance identifier
+ """
+ cls.load_available_configs()
+ config = config or guess_configuration(cls.instance_home(appid))
+ configcls = configuration_cls(config)
+ return configcls(appid, debugmode, creating)
+
+ @classmethod
+ def possible_configurations(cls, appid):
+ """return the name of possible configurations for the given
+ instance id
+ """
+ home = cls.instance_home(appid)
+ return possible_configurations(home)
+
+ @classmethod
+ def instance_home(cls, appid):
+ """return the home directory of the instance with the given
+ instance id
+ """
+ home = join(cls.instances_dir(), appid)
+ if not exists(home):
+ raise ConfigurationError('no such instance %s (check it exists with'
+ ' "cubicweb-ctl list")' % appid)
+ return home
+
+ MODES = ('common', 'repository', 'Any')
+ MCOMPAT = {'all-in-one': MODES,
+ 'repository': ('common', 'repository', 'Any')}
+ @classmethod
+ def accept_mode(cls, mode):
+ #assert mode in cls.MODES, mode
+ return mode in cls.MCOMPAT[cls.name]
+
+ # default configuration methods ###########################################
+
+ def default_instance_id(self):
+ """return the instance identifier, useful for option which need this
+ as default value
+ """
+ return self.appid
+
+ def default_log_file(self):
+ """return default path to the log file of the instance'server"""
+ if self.mode == 'user':
+ import tempfile
+ basepath = join(tempfile.gettempdir(), '%s-%s' % (
+ basename(self.appid), self.name))
+ path = basepath + '.log'
+ i = 1
+ while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
+ try:
+ open(path, 'a')
+ break
+ except IOError:
+ path = '%s-%s.log' % (basepath, i)
+ i += 1
+ return path
+ if _USR_INSTALL:
+ return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name)
+ else:
+ log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log')
+ return log_path % (self.appid, self.name)
+
+ def default_stats_file(self):
+ """return default path to the stats file of the instance'server"""
+ logfile = self.default_log_file()
+ if logfile.endswith('.log'):
+ logfile = logfile[:-4]
+ return logfile + '.stats'
+
+ def default_pid_file(self):
+ """return default path to the pid file of the instance'server"""
+ if self.mode == 'system':
+ if _USR_INSTALL:
+ default = '/var/run/cubicweb/'
+ else:
+ default = os.path.join(_INSTALL_PREFIX, 'var', 'run', 'cubicweb')
+ else:
+ import tempfile
+ default = tempfile.gettempdir()
+ # runtime directory created on startup if necessary, don't check it
+ # exists
+ rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default))
+ return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
+
+ # config -> repository
+
+ def repository(self, vreg=None):
+ from cubicweb.server.repository import Repository
+ from cubicweb.server.utils import TasksManager
+ return Repository(self, TasksManager(), vreg=vreg)
+
+ # instance methods used to get instance specific resources #############
+
+ def __init__(self, appid, debugmode=False, creating=False):
+ self.appid = appid
+ # set to true while creating an instance
+ self.creating = creating
+ super(CubicWebConfiguration, self).__init__(debugmode)
+ fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid))
+ for lang in self.available_languages():
+ self.translations[lang] = fake_gettext
+ self._cubes = None
+ self.load_file_configuration(self.main_config_file())
+
+ def adjust_sys_path(self):
+ super(CubicWebConfiguration, self).adjust_sys_path()
+ # adding apphome to python path is not usually necessary in production
+ # environments, but necessary for tests
+ if self.apphome and self.apphome not in sys.path:
+ sys.path.insert(0, self.apphome)
+
+ @property
+ def apphome(self):
+ return join(self.instances_dir(), self.appid)
+
+ @property
+ def appdatahome(self):
+ if self.mode == 'system':
+ if _USR_INSTALL:
+ iddir = os.path.join('/var','lib', 'cubicweb', 'instances')
+ else:
+ iddir = os.path.join(_INSTALL_PREFIX, 'var', 'lib', 'cubicweb', 'instances')
+ else:
+ iddir = self.instances_dir()
+ iddir = abspath(os.environ.get('CW_INSTANCES_DATA_DIR', iddir))
+ return join(iddir, self.appid)
+
+ def init_cubes(self, cubes):
+ super(CubicWebConfiguration, self).init_cubes(cubes)
+ # reload config file in cases options are defined in cubes __init__
+ # or site_cubicweb files
+ self.load_file_configuration(self.main_config_file())
+ # configuration initialization hook
+ self.load_configuration(**(self.cmdline_options or {}))
+
+ def add_cubes(self, cubes):
+ """add given cubes to the list of used cubes"""
+ if not isinstance(cubes, list):
+ cubes = list(cubes)
+ self._cubes = self.reorder_cubes(list(self._cubes) + cubes)
+ self.load_site_cubicweb(cubes)
+
+ def main_config_file(self):
+ """return instance's control configuration file"""
+ return join(self.apphome, '%s.conf' % self.name)
+
+ def save(self):
+ """write down current configuration"""
+ with open(self.main_config_file(), 'w') as fobj:
+ self.generate_config(fobj)
+
+ def check_writeable_uid_directory(self, path):
+ """check given directory path exists, belongs to the user running the
+ server process and is writeable.
+
+ If not, try to fix this, letting exception propagate when not possible.
+ """
+ if not exists(path):
+ self.info('creating %s directory', path)
+ try:
+ os.makedirs(path)
+ except OSError as ex:
+ self.warning('error while creating %s directory: %s', path, ex)
+ return
+ if self['uid']:
+ try:
+ uid = int(self['uid'])
+ except ValueError:
+ from pwd import getpwnam
+ uid = getpwnam(self['uid']).pw_uid
+ else:
+ try:
+ uid = os.getuid()
+ except AttributeError: # we are on windows
+ return
+ fstat = os.stat(path)
+ if fstat.st_uid != uid:
+ self.info('giving ownership of %s directory to %s', path, self['uid'])
+ try:
+ os.chown(path, uid, os.getgid())
+ except OSError as ex:
+ self.warning('error while giving ownership of %s directory to %s: %s',
+ path, self['uid'], ex)
+ if not (fstat.st_mode & stat.S_IWUSR):
+ self.info('forcing write permission on directory %s', path)
+ try:
+ os.chmod(path, fstat.st_mode | stat.S_IWUSR)
+ except OSError as ex:
+ self.warning('error while forcing write permission on directory %s: %s',
+ path, ex)
+ return
+
+ @cached
+ def instance_md5_version(self):
+ from hashlib import md5 # pylint: disable=E0611
+ infos = []
+ for pkg in sorted(self.cubes()):
+ version = self.cube_version(pkg)
+ infos.append('%s-%s' % (pkg, version))
+ infos.append('cubicweb-%s' % str(self.cubicweb_version()))
+ return md5((';'.join(infos)).encode('ascii')).hexdigest()
+
+ def load_configuration(self, **kw):
+ """load instance's configuration files"""
+ super(CubicWebConfiguration, self).load_configuration(**kw)
+ if self.apphome and not self.creating:
+ # init gettext
+ self._gettext_init()
+
+ def _load_site_cubicweb(self, cube):
+ # overridden to register cube specific options
+ mod = super(CubicWebConfiguration, self)._load_site_cubicweb(cube)
+ if getattr(mod, 'options', None):
+ self.register_options(mod.options)
+ self.load_defaults()
+
+ def init_log(self, logthreshold=None, force=False):
+ """init the log service"""
+ if not force and hasattr(self, '_logging_initialized'):
+ return
+ self._logging_initialized = True
+ super_self = super(CubicWebConfiguration, self)
+ super_self.init_log(logthreshold, logfile=self.get('log-file'))
+ # read a config file if it exists
+ logconfig = join(self.apphome, 'logging.conf')
+ if exists(logconfig):
+ logging.config.fileConfig(logconfig)
+ # set the statsd address, if any
+ if self.get('statsd-endpoint'):
+ try:
+ address, port = self.get('statsd-endpoint').split(':')
+ port = int(port)
+ except:
+ self.error('statsd-endpoint: invalid address format ({}); '
+ 'it should be "ip:port"'.format(self.get('statsd-endpoint')))
+ else:
+ import statsd_logger
+ statsd_logger.setup('cubicweb.%s' % self.appid, (address, port))
+
+ def available_languages(self, *args):
+ """return available translation for an instance, by looking for
+ compiled catalog
+
+ take \*args to be usable as a vocabulary method
+ """
+ from glob import glob
+ yield 'en' # ensure 'en' is yielded even if no .mo found
+ for path in glob(join(self.apphome, 'i18n',
+ '*', 'LC_MESSAGES')):
+ lang = path.split(os.sep)[-2]
+ if lang != 'en':
+ yield lang
+
+ def _gettext_init(self):
+ """set language for gettext"""
+ from cubicweb.cwgettext import translation
+ path = join(self.apphome, 'i18n')
+ for language in self.available_languages():
+ self.info("loading language %s", language)
+ try:
+ tr = translation('cubicweb', path, languages=[language])
+ self.translations[language] = (tr.ugettext, tr.upgettext)
+ except (ImportError, AttributeError, IOError):
+ if self.mode != 'test':
+ # in test contexts, data/i18n does not exist, hence
+ # logging will only pollute the logs
+ self.exception('localisation support error for language %s',
+ language)
+
+ def appobjects_path(self):
+ """return a list of files or directories where the registry will look
+ for application objects
+ """
+ templpath = list(reversed(self.cubes_path()))
+ if self.apphome: # may be unset in tests
+ templpath.append(self.apphome)
+ return self.build_appobjects_path(templpath)
+
+ def set_sources_mode(self, sources):
+ if not 'all' in sources:
+ print('warning: ignoring specified sources, requires a repository '
+ 'configuration')
+
+ def i18ncompile(self, langs=None):
+ from cubicweb import i18n
+ if langs is None:
+ langs = self.available_languages()
+ i18ndir = join(self.apphome, 'i18n')
+ if not exists(i18ndir):
+ create_dir(i18ndir)
+ sourcedirs = [join(path, 'i18n') for path in self.cubes_path()]
+ sourcedirs.append(self.i18n_lib_dir())
+ return i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs)
+
+ def sendmails(self, msgs, fromaddr=None):
+ """msgs: list of 2-uple (message object, recipients). Return False
+ if connection to the smtp server failed, else True.
+ """
+ server, port = self['smtp-host'], self['smtp-port']
+ if fromaddr is None:
+ fromaddr = '%s <%s>' % (self['sender-name'], self['sender-addr'])
+ SMTP_LOCK.acquire()
+ try:
+ try:
+ smtp = SMTP(server, port)
+ except Exception as ex:
+ self.exception("can't connect to smtp server %s:%s (%s)",
+ server, port, ex)
+ return False
+ for msg, recipients in msgs:
+ try:
+ smtp.sendmail(fromaddr, recipients, msg.as_string())
+ except Exception as ex:
+ self.exception("error sending mail to %s (%s)",
+ recipients, ex)
+ smtp.close()
+ finally:
+ SMTP_LOCK.release()
+ return True
+
+set_log_methods(CubicWebNoAppConfiguration,
+ logging.getLogger('cubicweb.configuration'))
+
+# alias to get a configuration instance from an instance id
+instance_configuration = CubicWebConfiguration.config_for
+application_configuration = deprecated('use instance_configuration')(instance_configuration)
+
+
+_EXT_REGISTERED = False
+def register_stored_procedures():
+ from logilab.database import FunctionDescr
+ from rql.utils import register_function, iter_funcnode_variables
+ from rql.nodes import SortTerm, Constant, VariableRef
+
+ global _EXT_REGISTERED
+ if _EXT_REGISTERED:
+ return
+ _EXT_REGISTERED = True
+
+ class COMMA_JOIN(FunctionDescr):
+ supported_backends = ('postgres', 'sqlite',)
+ rtype = 'String'
+
+ def st_description(self, funcnode, mainindex, tr):
+ return ', '.join(sorted(term.get_description(mainindex, tr)
+ for term in iter_funcnode_variables(funcnode)))
+
+ register_function(COMMA_JOIN) # XXX do not expose?
+
+
+ class CONCAT_STRINGS(COMMA_JOIN):
+ aggregat = True
+
+ register_function(CONCAT_STRINGS) # XXX bw compat
+
+
+ class GROUP_CONCAT(CONCAT_STRINGS):
+ supported_backends = ('mysql', 'postgres', 'sqlite',)
+
+ register_function(GROUP_CONCAT)
+
+
+ class LIMIT_SIZE(FunctionDescr):
+ supported_backends = ('postgres', 'sqlite',)
+ minargs = maxargs = 3
+ rtype = 'String'
+
+ def st_description(self, funcnode, mainindex, tr):
+ return funcnode.children[0].get_description(mainindex, tr)
+
+ register_function(LIMIT_SIZE)
+
+
+ class TEXT_LIMIT_SIZE(LIMIT_SIZE):
+ supported_backends = ('mysql', 'postgres', 'sqlite',)
+ minargs = maxargs = 2
+
+ register_function(TEXT_LIMIT_SIZE)
+
+
+ class FTIRANK(FunctionDescr):
+ """return ranking of a variable that must be used as some has_text
+ relation subject in the query's restriction. Usually used to sort result
+ of full-text search by ranking.
+ """
+ supported_backends = ('postgres',)
+ rtype = 'Float'
+
+ def st_check_backend(self, backend, funcnode):
+ """overriden so that on backend not supporting fti ranking, the
+ function is removed when in an orderby clause, or replaced by a 1.0
+ constant.
+ """
+ if not self.supports(backend):
+ parent = funcnode.parent
+ while parent is not None and not isinstance(parent, SortTerm):
+ parent = parent.parent
+ if isinstance(parent, SortTerm):
+ parent.parent.remove(parent)
+ else:
+ funcnode.parent.replace(funcnode, Constant(1.0, 'Float'))
+ parent = funcnode
+ for vref in parent.iget_nodes(VariableRef):
+ vref.unregister_reference()
+
+ register_function(FTIRANK)
+
+
+ class FSPATH(FunctionDescr):
+ """return path of some bytes attribute stored using the Bytes
+ File-System Storage (bfss)
+ """
+ rtype = 'Bytes' # XXX return a String? potential pb with fs encoding
+
+ def update_cb_stack(self, stack):
+ assert len(stack) == 1
+ stack[0] = self.source_execute
+
+ def as_sql(self, backend, args):
+ raise NotImplementedError(
+ 'This callback is only available for BytesFileSystemStorage '
+ 'managed attribute. Is FSPATH() argument BFSS managed?')
+
+ def source_execute(self, source, session, value):
+ fpath = source.binary_to_str(value)
+ try:
+ return Binary(fpath)
+ except OSError as ex:
+ source.critical("can't open %s: %s", fpath, ex)
+ return None
+
+ register_function(FSPATH)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/cwctl.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/cwctl.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1081 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""the cubicweb-ctl tool, based on logilab.common.clcommands to
+provide a pluggable commands system.
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+# *ctl module should limit the number of import to be imported as quickly as
+# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash
+# completion). So import locally in command helpers.
+import sys
+from warnings import warn, filterwarnings
+from os import remove, listdir, system, pathsep
+from os.path import exists, join, isfile, isdir, dirname, abspath
+
+try:
+ from os import kill, getpgid
+except ImportError:
+ def kill(*args):
+ """win32 kill implementation"""
+ def getpgid():
+ """win32 getpgid implementation"""
+
+from six.moves.urllib.parse import urlparse
+
+from logilab.common.clcommands import CommandLine
+from logilab.common.shellutils import ASK
+from logilab.common.configuration import merge_options
+from logilab.common.deprecation import deprecated
+
+from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS
+from cubicweb.toolsutils import Command, rm, create_dir, underline_title
+from cubicweb.__pkginfo__ import version
+
+# don't check duplicated commands, it occurs when reloading site_cubicweb
+CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.',
+ version=version, check_duplicated_command=False)
+
+def wait_process_end(pid, maxtry=10, waittime=1):
+ """wait for a process to actually die"""
+ import signal
+ from time import sleep
+ nbtry = 0
+ while nbtry < maxtry:
+ try:
+ kill(pid, signal.SIGUSR1)
+ except (OSError, AttributeError): # XXX win32
+ break
+ nbtry += 1
+ sleep(waittime)
+ else:
+ raise ExecutionError('can\'t kill process %s' % pid)
+
+def list_instances(regdir):
+ if isdir(regdir):
+ return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir)))
+ else:
+ return []
+
+def detect_available_modes(templdir):
+ modes = []
+ for fname in ('schema', 'schema.py'):
+ if exists(join(templdir, fname)):
+ modes.append('repository')
+ break
+ for fname in ('data', 'views', 'views.py'):
+ if exists(join(templdir, fname)):
+ modes.append('web ui')
+ break
+ return modes
+
+
+class InstanceCommand(Command):
+ """base class for command taking 0 to n instance id as arguments
+ (0 meaning all registered instances)
+ """
+ arguments = '[...]'
+ options = (
+ ("force",
+ {'short': 'f', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'force command without asking confirmation',
+ }
+ ),
+ )
+ actionverb = None
+
+ @deprecated('[3.22] startorder is not used any more')
+ def ordered_instances(self):
+ """return list of known instances
+ """
+ regdir = cwcfg.instances_dir()
+ return list_instances(regdir)
+
+ def run(self, args):
+ """run the _method on each argument (a list of instance
+ identifiers)
+ """
+ if not args:
+ args = list_instances(cwcfg.instances_dir())
+ try:
+ askconfirm = not self.config.force
+ except AttributeError:
+ # no force option
+ askconfirm = False
+ else:
+ askconfirm = False
+ self.run_args(args, askconfirm)
+
+ def run_args(self, args, askconfirm):
+ status = 0
+ for appid in args:
+ if askconfirm:
+ print('*'*72)
+ if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
+ continue
+ try:
+ status = max(status, self.run_arg(appid))
+ except (KeyboardInterrupt, SystemExit):
+ sys.stderr.write('%s aborted\n' % self.name)
+ return 2 # specific error code
+ sys.exit(status)
+
+ def run_arg(self, appid):
+ cmdmeth = getattr(self, '%s_instance' % self.name)
+ try:
+ status = cmdmeth(appid)
+ except (ExecutionError, ConfigurationError) as ex:
+ sys.stderr.write('instance %s not %s: %s\n' % (
+ appid, self.actionverb, ex))
+ status = 4
+ except Exception as ex:
+ import traceback
+ traceback.print_exc()
+ sys.stderr.write('instance %s not %s: %s\n' % (
+ appid, self.actionverb, ex))
+ status = 8
+ return status
+
+class InstanceCommandFork(InstanceCommand):
+ """Same as `InstanceCommand`, but command is forked in a new environment
+ for each argument
+ """
+
+ def run_args(self, args, askconfirm):
+ if len(args) > 1:
+ forkcmd = ' '.join(w for w in sys.argv if not w in args)
+ else:
+ forkcmd = None
+ for appid in args:
+ if askconfirm:
+ print('*'*72)
+ if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
+ continue
+ if forkcmd:
+ status = system('%s %s' % (forkcmd, appid))
+ if status:
+ print('%s exited with status %s' % (forkcmd, status))
+ else:
+ self.run_arg(appid)
+
+
+# base commands ###############################################################
+
+class ListCommand(Command):
+ """List configurations, cubes and instances.
+
+ List available configurations, installed cubes, and registered instances.
+
+ If given, the optional argument allows to restrict listing only a category of items.
+ """
+ name = 'list'
+ arguments = '[all|cubes|configurations|instances]'
+ options = (
+ ('verbose',
+ {'short': 'v', 'action' : 'store_true',
+ 'help': "display more information."}),
+ )
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ if not args:
+ mode = 'all'
+ elif len(args) == 1:
+ mode = args[0]
+ else:
+ raise BadCommandUsage('Too many arguments')
+
+ from cubicweb.migration import ConfigurationProblem
+
+ if mode == 'all':
+ print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode))
+ print()
+
+ if mode in ('all', 'config', 'configurations'):
+ print('Available configurations:')
+ for config in CONFIGURATIONS:
+ print('*', config.name)
+ for line in config.__doc__.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ print(' ', line)
+ print()
+
+ if mode in ('all', 'cubes'):
+ cfgpb = ConfigurationProblem(cwcfg)
+ try:
+ cubesdir = pathsep.join(cwcfg.cubes_search_path())
+ namesize = max(len(x) for x in cwcfg.available_cubes())
+ except ConfigurationError as ex:
+ print('No cubes available:', ex)
+ except ValueError:
+ print('No cubes available in %s' % cubesdir)
+ else:
+ print('Available cubes (%s):' % cubesdir)
+ for cube in cwcfg.available_cubes():
+ try:
+ tinfo = cwcfg.cube_pkginfo(cube)
+ tversion = tinfo.version
+ cfgpb.add_cube(cube, tversion)
+ except (ConfigurationError, AttributeError) as ex:
+ tinfo = None
+ tversion = '[missing cube information: %s]' % ex
+ print('* %s %s' % (cube.ljust(namesize), tversion))
+ if self.config.verbose:
+ if tinfo:
+ descr = getattr(tinfo, 'description', '')
+ if not descr:
+ descr = tinfo.__doc__
+ if descr:
+ print(' '+ ' \n'.join(descr.splitlines()))
+ modes = detect_available_modes(cwcfg.cube_dir(cube))
+ print(' available modes: %s' % ', '.join(modes))
+ print()
+
+ if mode in ('all', 'instances'):
+ try:
+ regdir = cwcfg.instances_dir()
+ except ConfigurationError as ex:
+ print('No instance available:', ex)
+ print()
+ return
+ instances = list_instances(regdir)
+ if instances:
+ print('Available instances (%s):' % regdir)
+ for appid in instances:
+ modes = cwcfg.possible_configurations(appid)
+ if not modes:
+ print('* %s (BROKEN instance, no configuration found)' % appid)
+ continue
+ print('* %s (%s)' % (appid, ', '.join(modes)))
+ try:
+ config = cwcfg.config_for(appid, modes[0])
+ except Exception as exc:
+ print(' (BROKEN instance, %s)' % exc)
+ continue
+ else:
+ print('No instance available in %s' % regdir)
+ print()
+
+ if mode == 'all':
+ # configuration management problem solving
+ cfgpb.solve()
+ if cfgpb.warnings:
+ print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings))
+ if cfgpb.errors:
+ print('Errors:')
+ for op, cube, version, src in cfgpb.errors:
+ if op == 'add':
+ print('* cube', cube, end=' ')
+ if version:
+ print(' version', version, end=' ')
+ print('is not installed, but required by %s' % src)
+ else:
+ print('* cube %s version %s is installed, but version %s is required by %s' % (
+ cube, cfgpb.cubes[cube], version, src))
+
+def check_options_consistency(config):
+ if config.automatic and config.config_level > 0:
+ raise BadCommandUsage('--automatic and --config-level should not be '
+ 'used together')
+
+class CreateInstanceCommand(Command):
+ """Create an instance from a cube. This is a unified
+ command which can handle web / server / all-in-one installation
+ according to available parts of the software library and of the
+ desired cube.
+
+
+ the name of cube to use (list available cube names using
+ the "list" command). You can use several cubes by separating
+ them using comma (e.g. 'jpl,email')
+
+ an identifier for the instance to create
+ """
+ name = 'create'
+ arguments = ''
+ min_args = max_args = 2
+ options = (
+ ('automatic',
+ {'short': 'a', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'automatic mode: never ask and use default answer to every '
+ 'question. this may require that your login match a database super '
+ 'user (allowed to create database & all).',
+ }),
+ ('config-level',
+ {'short': 'l', 'type' : 'int', 'metavar': '',
+ 'default': 0,
+ 'help': 'configuration level (0..2): 0 will ask for essential '
+ 'configuration parameters only while 2 will ask for all parameters',
+ }),
+ ('config',
+ {'short': 'c', 'type' : 'choice', 'metavar': '',
+ 'choices': ('all-in-one', 'repository'),
+ 'default': 'all-in-one',
+ 'help': 'installation type, telling which part of an instance '
+ 'should be installed. You can list available configurations using the'
+ ' "list" command. Default to "all-in-one", e.g. an installation '
+ 'embedding both the RQL repository and the web server.',
+ }),
+ ('no-db-create',
+ {'short': 'S',
+ 'action': 'store_true',
+ 'default': False,
+ 'help': 'stop after creation and do not continue with db-create',
+ }),
+ )
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ from logilab.common.textutils import splitstrip
+ check_options_consistency(self.config)
+ configname = self.config.config
+ cubes, appid = args
+ cubes = splitstrip(cubes)
+ # get the configuration and helper
+ config = cwcfg.config_for(appid, configname, creating=True)
+ cubes = config.expand_cubes(cubes)
+ config.init_cubes(cubes)
+ helper = self.config_helper(config)
+ # check the cube exists
+ try:
+ templdirs = [cwcfg.cube_dir(cube)
+ for cube in cubes]
+ except ConfigurationError as ex:
+ print(ex)
+ print('\navailable cubes:', end=' ')
+ print(', '.join(cwcfg.available_cubes()))
+ return
+ # create the registry directory for this instance
+ print('\n'+underline_title('Creating the instance %s' % appid))
+ create_dir(config.apphome)
+ # cubicweb-ctl configuration
+ if not self.config.automatic:
+ print('\n'+underline_title('Configuring the instance (%s.conf)'
+ % configname))
+ config.input_config('main', self.config.config_level)
+ # configuration'specific stuff
+ print()
+ helper.bootstrap(cubes, self.config.automatic, self.config.config_level)
+ # input for cubes specific options
+ if not self.config.automatic:
+ sections = set(sect.lower() for sect, opt, odict in config.all_options()
+ if 'type' in odict
+ and odict.get('level', 0) <= self.config.config_level)
+ for section in sections:
+ if section not in ('main', 'email', 'web'):
+ print('\n' + underline_title('%s options' % section))
+ config.input_config(section, self.config.config_level)
+ # write down configuration
+ config.save()
+ self._handle_win32(config, appid)
+ print('-> generated config %s' % config.main_config_file())
+ # handle i18n files structure
+ # in the first cube given
+ from cubicweb import i18n
+ langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
+ errors = config.i18ncompile(langs)
+ if errors:
+ print('\n'.join(errors))
+ if self.config.automatic \
+ or not ASK.confirm('error while compiling message catalogs, '
+ 'continue anyway ?'):
+ print('creation not completed')
+ return
+ # create the additional data directory for this instance
+ if config.appdatahome != config.apphome: # true in dev mode
+ create_dir(config.appdatahome)
+ create_dir(join(config.appdatahome, 'backup'))
+ if config['uid']:
+ from logilab.common.shellutils import chown
+ # this directory should be owned by the uid of the server process
+ print('set %s as owner of the data directory' % config['uid'])
+ chown(config.appdatahome, config['uid'])
+ print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1])
+ if not self.config.no_db_create:
+ helper.postcreate(self.config.automatic, self.config.config_level)
+
+ def _handle_win32(self, config, appid):
+ if sys.platform != 'win32':
+ return
+ service_template = """
+import sys
+import win32serviceutil
+sys.path.insert(0, r"%(CWPATH)s")
+
+from cubicweb.etwist.service import CWService
+
+classdict = {'_svc_name_': 'cubicweb-%(APPID)s',
+ '_svc_display_name_': 'CubicWeb ' + '%(CNAME)s',
+ 'instance': '%(APPID)s'}
+%(CNAME)sService = type('%(CNAME)sService', (CWService,), classdict)
+
+if __name__ == '__main__':
+ win32serviceutil.HandleCommandLine(%(CNAME)sService)
+"""
+ open(join(config.apphome, 'win32svc.py'), 'wb').write(
+ service_template % {'APPID': appid,
+ 'CNAME': appid.capitalize(),
+ 'CWPATH': abspath(join(dirname(__file__), '..'))})
+
+
+class DeleteInstanceCommand(Command):
+ """Delete an instance. Will remove instance's files and
+ unregister it.
+ """
+ name = 'delete'
+ arguments = ''
+ min_args = max_args = 1
+ options = ()
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ appid = args[0]
+ configs = [cwcfg.config_for(appid, configname)
+ for configname in cwcfg.possible_configurations(appid)]
+ if not configs:
+ raise ExecutionError('unable to guess configuration for %s' % appid)
+ for config in configs:
+ helper = self.config_helper(config, required=False)
+ if helper:
+ helper.cleanup()
+ # remove home
+ rm(config.apphome)
+ # remove instance data directory
+ try:
+ rm(config.appdatahome)
+ except OSError as ex:
+ import errno
+ if ex.errno != errno.ENOENT:
+ raise
+ confignames = ', '.join([config.name for config in configs])
+ print('-> instance %s (%s) deleted.' % (appid, confignames))
+
+
+# instance commands ########################################################
+
+class StartInstanceCommand(InstanceCommandFork):
+ """Start the given instances. If no instance is given, start them all.
+
+ ...
+ identifiers of the instances to start. If no instance is
+ given, start them all.
+ """
+ name = 'start'
+ actionverb = 'started'
+ options = (
+ ("debug",
+ {'short': 'D', 'action' : 'store_true',
+ 'help': 'start server in debug mode.'}),
+ ("force",
+ {'short': 'f', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'start the instance even if it seems to be already \
+running.'}),
+ ('profile',
+ {'short': 'P', 'type' : 'string', 'metavar': '',
+ 'default': None,
+ 'help': 'profile code and use the specified file to store stats',
+ }),
+ ('loglevel',
+ {'short': 'l', 'type' : 'choice', 'metavar': '',
+ 'default': None, 'choices': ('debug', 'info', 'warning', 'error'),
+ 'help': 'debug if -D is set, error otherwise',
+ }),
+ ('param',
+ {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2',
+ 'default': {},
+ 'help': 'override configuration file option with .',
+ }),
+ )
+
+ def start_instance(self, appid):
+ """start the instance's server"""
+ try:
+ import twisted # noqa
+ except ImportError:
+ msg = (
+ "Twisted is required by the 'start' command\n"
+ "Either install it, or use one of the alternative commands:\n"
+ "- '{ctl} wsgi {appid}'\n"
+ "- '{ctl} pyramid {appid}' (requires the pyramid cube)\n")
+ raise ExecutionError(msg.format(ctl='cubicweb-ctl', appid=appid))
+ config = cwcfg.config_for(appid, debugmode=self['debug'])
+ # override config file values with cmdline options
+ config.cmdline_options = self.config.param
+ init_cmdline_log_threshold(config, self['loglevel'])
+ if self['profile']:
+ config.global_set_option('profile', self.config.profile)
+ helper = self.config_helper(config, cmdname='start')
+ pidf = config['pid-file']
+ if exists(pidf) and not self['force']:
+ msg = "%s seems to be running. Remove %s by hand if necessary or use \
+the --force option."
+ raise ExecutionError(msg % (appid, pidf))
+ if helper.start_server(config) == 1:
+ print('instance %s started' % appid)
+
+
+def init_cmdline_log_threshold(config, loglevel):
+ if loglevel is not None:
+ config.global_set_option('log-threshold', loglevel.upper())
+ config.init_log(config['log-threshold'], force=True)
+
+
+class StopInstanceCommand(InstanceCommand):
+ """Stop the given instances.
+
+ ...
+ identifiers of the instances to stop. If no instance is
+ given, stop them all.
+ """
+ name = 'stop'
+ actionverb = 'stopped'
+
+ def stop_instance(self, appid):
+ """stop the instance's server"""
+ config = cwcfg.config_for(appid)
+ helper = self.config_helper(config, cmdname='stop')
+ helper.poststop() # do this anyway
+ pidf = config['pid-file']
+ if not exists(pidf):
+ sys.stderr.write("%s doesn't exist.\n" % pidf)
+ return
+ import signal
+ pid = int(open(pidf).read().strip())
+ try:
+ kill(pid, signal.SIGTERM)
+ except Exception:
+ sys.stderr.write("process %s seems already dead.\n" % pid)
+ else:
+ try:
+ wait_process_end(pid)
+ except ExecutionError as ex:
+ sys.stderr.write('%s\ntrying SIGKILL\n' % ex)
+ try:
+ kill(pid, signal.SIGKILL)
+ except Exception:
+ # probably dead now
+ pass
+ wait_process_end(pid)
+ try:
+ remove(pidf)
+ except OSError:
+ # already removed by twistd
+ pass
+ print('instance %s stopped' % appid)
+
+
+class RestartInstanceCommand(StartInstanceCommand):
+ """Restart the given instances.
+
+ ...
+ identifiers of the instances to restart. If no instance is
+ given, restart them all.
+ """
+ name = 'restart'
+ actionverb = 'restarted'
+
+ def restart_instance(self, appid):
+ StopInstanceCommand(self.logger).stop_instance(appid)
+ self.start_instance(appid)
+
+
+class ReloadConfigurationCommand(RestartInstanceCommand):
+ """Reload the given instances. This command is equivalent to a
+ restart for now.
+
+ ...
+ identifiers of the instances to reload. If no instance is
+ given, reload them all.
+ """
+ name = 'reload'
+
+ def reload_instance(self, appid):
+ self.restart_instance(appid)
+
+
+class StatusCommand(InstanceCommand):
+ """Display status information about the given instances.
+
+ ...
+ identifiers of the instances to status. If no instance is
+ given, get status information about all registered instances.
+ """
+ name = 'status'
+ options = ()
+
+ @staticmethod
+ def status_instance(appid):
+ """print running status information for an instance"""
+ status = 0
+ for mode in cwcfg.possible_configurations(appid):
+ config = cwcfg.config_for(appid, mode)
+ print('[%s-%s]' % (appid, mode), end=' ')
+ try:
+ pidf = config['pid-file']
+ except KeyError:
+ print('buggy instance, pid file not specified')
+ continue
+ if not exists(pidf):
+ print("doesn't seem to be running")
+ status = 1
+ continue
+ pid = int(open(pidf).read().strip())
+ # trick to guess whether or not the process is running
+ try:
+ getpgid(pid)
+ except OSError:
+ print("should be running with pid %s but the process can not be found" % pid)
+ status = 1
+ continue
+ print("running with pid %s" % (pid))
+ return status
+
+class UpgradeInstanceCommand(InstanceCommandFork):
+ """Upgrade an instance after cubicweb and/or component(s) upgrade.
+
+ For repository update, you will be prompted for a login / password to use
+ to connect to the system database. For some upgrades, the given user
+ should have create or alter table permissions.
+
+ ...
+ identifiers of the instances to upgrade. If no instance is
+ given, upgrade them all.
+ """
+ name = 'upgrade'
+ actionverb = 'upgraded'
+ options = InstanceCommand.options + (
+ ('force-cube-version',
+ {'short': 't', 'type' : 'named', 'metavar': 'cube1:X.Y.Z,cube2:X.Y.Z',
+ 'default': None,
+ 'help': 'force migration from the indicated version for the specified cube(s).'}),
+
+ ('force-cubicweb-version',
+ {'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z',
+ 'default': None,
+ 'help': 'force migration from the indicated cubicweb version.'}),
+
+ ('fs-only',
+ {'short': 's', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'only upgrade files on the file system, not the database.'}),
+
+ ('nostartstop',
+ {'short': 'n', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'don\'t try to stop instance before migration and to restart it after.'}),
+
+ ('verbosity',
+ {'short': 'v', 'type' : 'int', 'metavar': '<0..2>',
+ 'default': 1,
+ 'help': "0: no confirmation, 1: only main commands confirmed, 2 ask \
+for everything."}),
+
+ ('backup-db',
+ {'short': 'b', 'type' : 'yn', 'metavar': '',
+ 'default': None,
+ 'help': "Backup the instance database before upgrade.\n"\
+ "If the option is ommitted, confirmation will be ask.",
+ }),
+
+ ('ext-sources',
+ {'short': 'E', 'type' : 'csv', 'metavar': '',
+ 'default': None,
+ 'help': "For multisources instances, specify to which sources the \
+repository should connect to for upgrading. When unspecified or 'migration' is \
+given, appropriate sources for migration will be automatically selected \
+(recommended). If 'all' is given, will connect to all defined sources.",
+ }),
+ )
+
+ def upgrade_instance(self, appid):
+ print('\n' + underline_title('Upgrading the instance %s' % appid))
+ from logilab.common.changelog import Version
+ config = cwcfg.config_for(appid)
+ instance_running = exists(config['pid-file'])
+ config.repairing = True # notice we're not starting the server
+ config.verbosity = self.config.verbosity
+ set_sources_mode = getattr(config, 'set_sources_mode', None)
+ if set_sources_mode is not None:
+ set_sources_mode(self.config.ext_sources or ('migration',))
+ # get instance and installed versions for the server and the componants
+ mih = config.migration_handler()
+ repo = mih.repo
+ vcconf = repo.get_versions()
+ helper = self.config_helper(config, required=False)
+ if self.config.force_cube_version:
+ for cube, version in self.config.force_cube_version.items():
+ vcconf[cube] = Version(version)
+ toupgrade = []
+ for cube in config.cubes():
+ installedversion = config.cube_version(cube)
+ try:
+ applversion = vcconf[cube]
+ except KeyError:
+ config.error('no version information for %s' % cube)
+ continue
+ if installedversion > applversion:
+ toupgrade.append( (cube, applversion, installedversion) )
+ cubicwebversion = config.cubicweb_version()
+ if self.config.force_cubicweb_version:
+ applcubicwebversion = Version(self.config.force_cubicweb_version)
+ vcconf['cubicweb'] = applcubicwebversion
+ else:
+ applcubicwebversion = vcconf.get('cubicweb')
+ if cubicwebversion > applcubicwebversion:
+ toupgrade.append(('cubicweb', applcubicwebversion, cubicwebversion))
+ # only stop once we're sure we have something to do
+ if instance_running and not (CWDEV or self.config.nostartstop):
+ StopInstanceCommand(self.logger).stop_instance(appid)
+ # run cubicweb/componants migration scripts
+ if self.config.fs_only or toupgrade:
+ for cube, fromversion, toversion in toupgrade:
+ print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube))
+ with mih.cnx:
+ with mih.cnx.security_enabled(False, False):
+ mih.migrate(vcconf, reversed(toupgrade), self.config)
+ else:
+ print('-> no data migration needed for instance %s.' % appid)
+ # rewrite main configuration file
+ mih.rewrite_configuration()
+ mih.shutdown()
+ # handle i18n upgrade
+ if not self.i18nupgrade(config):
+ return
+ print()
+ if helper:
+ helper.postupgrade(repo)
+ print('-> instance migrated.')
+ if instance_running and not (CWDEV or self.config.nostartstop):
+ # restart instance through fork to get a proper environment, avoid
+ # uicfg pb (and probably gettext catalogs, to check...)
+ forkcmd = '%s start %s' % (sys.argv[0], appid)
+ status = system(forkcmd)
+ if status:
+ print('%s exited with status %s' % (forkcmd, status))
+ print()
+
+ def i18nupgrade(self, config):
+ # handle i18n upgrade:
+ # * install new languages
+ # * recompile catalogs
+ # XXX search available language in the first cube given
+ from cubicweb import i18n
+ templdir = cwcfg.cube_dir(config.cubes()[0])
+ langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
+ errors = config.i18ncompile(langs)
+ if errors:
+ print('\n'.join(errors))
+ if not ASK.confirm('Error while compiling message catalogs, '
+ 'continue anyway?'):
+ print('-> migration not completed.')
+ return False
+ return True
+
+
+class ListVersionsInstanceCommand(InstanceCommand):
+ """List versions used by an instance.
+
+ ...
+ identifiers of the instances to list versions for.
+ """
+ name = 'versions'
+
+ def versions_instance(self, appid):
+ config = cwcfg.config_for(appid)
+ # should not raise error if db versions don't match fs versions
+ config.repairing = True
+ # no need to load all appobjects and schema
+ config.quick_start = True
+ if hasattr(config, 'set_sources_mode'):
+ config.set_sources_mode(('migration',))
+ vcconf = config.repository().get_versions()
+ for key in sorted(vcconf):
+ print(key+': %s.%s.%s' % vcconf[key])
+
+class ShellCommand(Command):
+ """Run an interactive migration shell on an instance. This is a python shell
+ with enhanced migration commands predefined in the namespace. An additional
+ argument may be given corresponding to a file containing commands to execute
+ in batch mode.
+
+ By default it will connect to a local instance using an in memory
+ connection, unless a URL to a running instance is specified.
+
+ Arguments after bare "--" string will not be processed by the shell command
+ You can use it to pass extra arguments to your script and expect for
+ them in '__args__' afterwards.
+
+
+ the identifier of the instance to connect.
+ """
+ name = 'shell'
+ arguments = ' [batch command file(s)] [-- ", re.M|re.I|re.S)
+def _remove_script_tags(data):
+ """Remove the script (usually javascript) tags to help the lxml
+ XMLParser / HTMLParser do their job. Without that, they choke on
+ tags embedded in JS strings.
+ """
+ # Notice we may want to use lxml cleaner, but it's far too intrusive:
+ #
+ # cleaner = Cleaner(scripts=True,
+ # javascript=False,
+ # comments=False,
+ # style=False,
+ # links=False,
+ # meta=False,
+ # page_structure=False,
+ # processing_instructions=False,
+ # embedded=False,
+ # frames=False,
+ # forms=False,
+ # annoying_tags=False,
+ # remove_tags=(),
+ # remove_unknown_tags=False,
+ # safe_attrs_only=False,
+ # add_nofollow=False)
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html(' ')
+ # ' '
+ # >>> cleaner.clean_html(' ')
+ # ' '
+ #
+ # using that, we'll miss most actual validation error we want to
+ # catch. For now, use dumb regexp
+ return _REM_SCRIPT_RGX.sub(b'', data)
+
+
+class Validator(object):
+ """ base validator API """
+ parser = None
+
+ def parse_string(self, source):
+ etree = self._parse(self.preprocess_data(source))
+ return PageInfo(source, etree)
+
+ def preprocess_data(self, data):
+ return data
+
+ def _parse(self, pdata):
+ try:
+ return etree.fromstring(pdata, self.parser)
+ except etree.XMLSyntaxError as exc:
+ new_exc = AssertionError(u'invalid document: %s' % exc)
+ new_exc.position = exc.position
+ raise new_exc
+
+
+class DTDValidator(Validator):
+ def __init__(self):
+ Validator.__init__(self)
+ # XXX understand what's happening under windows
+ self.parser = etree.XMLParser(dtd_validation=sys.platform != 'win32')
+
+ def preprocess_data(self, data):
+ """used to fix potential blockquote mess generated by docutils"""
+ if STRICT_DOCTYPE not in data:
+ return data
+ # parse using transitional DTD
+ data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE)
+ tree = self._parse(data)
+ namespace = tree.nsmap.get(None)
+ # this is the list of authorized child tags for
nodes
+ expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \
+ 'fieldset table form noscript ins del script'.split()
+ if namespace:
+ blockquotes = tree.findall('.//{%s}blockquote' % namespace)
+ expected = ['{%s}%s' % (namespace, tag) for tag in expected]
+ else:
+ blockquotes = tree.findall('.//blockquote')
+ # quick and dirty approach: remove all blockquotes
+ for blockquote in blockquotes:
+ parent = blockquote.getparent()
+ parent.remove(blockquote)
+ data = etree.tostring(tree)
+ return '%s\n%s' % (
+ STRICT_DOCTYPE, data)
+
+
+class XMLValidator(Validator):
+ """XML validator, checks that XML is well-formed and used XMLNS are defined"""
+
+ def __init__(self):
+ Validator.__init__(self)
+ self.parser = etree.XMLParser()
+
+SaxOnlyValidator = class_renamed('SaxOnlyValidator',
+ XMLValidator,
+ '[3.17] you should use the '
+ 'XMLValidator class instead')
+
+
+class XMLSyntaxValidator(Validator):
+ """XML syntax validator, check XML is well-formed"""
+
+ class MySaxErrorHandler(sax.ErrorHandler):
+ """override default handler to avoid choking because of unknown entity"""
+ def fatalError(self, exception):
+ # XXX check entity in htmlentitydefs
+ if not str(exception).endswith('undefined entity'):
+ raise exception
+ _parser = sax.make_parser()
+ _parser.setContentHandler(sax.handler.ContentHandler())
+ _parser.setErrorHandler(MySaxErrorHandler())
+
+ def __init__(self):
+ super(XMLSyntaxValidator, self).__init__()
+ # XMLParser() wants xml namespaces defined
+ # XMLParser(recover=True) will accept almost anything
+ #
+ # -> use the later but preprocess will check xml well-formness using a
+ # dumb SAX parser
+ self.parser = etree.XMLParser(recover=True)
+
+ def preprocess_data(self, data):
+ return _remove_script_tags(data)
+
+ def _parse(self, data):
+ inpsrc = sax.InputSource()
+ inpsrc.setByteStream(BytesIO(data))
+ try:
+ self._parser.parse(inpsrc)
+ except sax.SAXParseException as exc:
+ new_exc = AssertionError(u'invalid document: %s' % exc)
+ new_exc.position = (exc._linenum, exc._colnum)
+ raise new_exc
+ return super(XMLSyntaxValidator, self)._parse(data)
+
+
+class HTMLValidator(Validator):
+
+ def __init__(self):
+ Validator.__init__(self)
+ self.parser = etree.HTMLParser(recover=False)
+
+ def preprocess_data(self, data):
+ return _remove_script_tags(data)
+
+
+class PageInfo(object):
+ """holds various informations on the view's output"""
+ def __init__(self, source, root):
+ self.source = source
+ self.etree = root
+ self.raw_text = u''.join(root.xpath('//text()'))
+ self.namespace = self.etree.nsmap
+ self.default_ns = self.namespace.get(None)
+ self.a_tags = self.find_tag('a')
+ self.h1_tags = self.find_tag('h1')
+ self.h2_tags = self.find_tag('h2')
+ self.h3_tags = self.find_tag('h3')
+ self.h4_tags = self.find_tag('h4')
+ self.input_tags = self.find_tag('input')
+ self.title_tags = [self.h1_tags, self.h2_tags, self.h3_tags, self.h4_tags]
+
+ def _iterstr(self, tag):
+ if self.default_ns is None:
+ return ".//%s" % tag
+ else:
+ return ".//{%s}%s" % (self.default_ns, tag)
+
+ def matching_nodes(self, tag, **attrs):
+ for elt in self.etree.iterfind(self._iterstr(tag)):
+ eltattrs = elt.attrib
+ for attr, value in attrs.items():
+ try:
+ if eltattrs[attr] != value:
+ break
+ except KeyError:
+ break
+ else: # all attributes match
+ yield elt
+
+ def has_tag(self, tag, nboccurs=1, **attrs):
+ """returns True if tag with given attributes appears in the page
+ `nbtimes` (any if None)
+ """
+ for elt in self.matching_nodes(tag, **attrs):
+ if nboccurs is None: # no need to check number of occurences
+ return True
+ if not nboccurs: # too much occurences
+ return False
+ nboccurs -= 1
+ if nboccurs == 0: # correct number of occurences
+ return True
+ return False # no matching tag/attrs
+
+ def find_tag(self, tag, gettext=True):
+ """return a list which contains text of all "tag" elements """
+ iterstr = self._iterstr(tag)
+ if not gettext or tag in ('a', 'input'):
+ return [(elt.text, elt.attrib)
+ for elt in self.etree.iterfind(iterstr)]
+ return [u''.join(elt.xpath('.//text()'))
+ for elt in self.etree.iterfind(iterstr)]
+
+ def appears(self, text):
+ """returns True if appears in the page"""
+ return text in self.raw_text
+
+ def __contains__(self, text):
+ return text in self.source
+
+ def has_title(self, text, level=None):
+ """returns True if text
+
+ :param level: the title's level (1 for h1, 2 for h2, etc.)
+ """
+ if level is None:
+ for hlist in self.title_tags:
+ if text in hlist:
+ return True
+ return False
+ else:
+ hlist = self.title_tags[level - 1]
+ return text in hlist
+
+ def has_title_regexp(self, pattern, level=None):
+ """returns True if pattern"""
+ sre = re.compile(pattern)
+ if level is None:
+ for hlist in self.title_tags:
+ for title in hlist:
+ if sre.match(title):
+ return True
+ return False
+ else:
+ hlist = self.title_tags[level - 1]
+ for title in hlist:
+ if sre.match(title):
+ return True
+ return False
+
+ def has_link(self, text, url=None):
+ """returns True if text was found in the page"""
+ for link_text, attrs in self.a_tags:
+ if text == link_text:
+ if url is None:
+ return True
+ try:
+ href = attrs['href']
+ if href == url:
+ return True
+ except KeyError:
+ continue
+ return False
+
+ def has_link_regexp(self, pattern, url=None):
+ """returns True if pattern was found in the page"""
+ sre = re.compile(pattern)
+ for link_text, attrs in self.a_tags:
+ if sre.match(link_text):
+ if url is None:
+ return True
+ try:
+ href = attrs['href']
+ if href == url:
+ return True
+ except KeyError:
+ continue
+ return False
+
+VALMAP = {None: None,
+ 'dtd': DTDValidator,
+ 'xml': XMLValidator,
+ 'html': HTMLValidator,
+ }
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/httptest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/httptest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,226 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""this module contains base classes and utilities for integration with running
+http server
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import random
+import threading
+import socket
+
+from six import PY3
+from six.moves import range, http_client
+from six.moves.urllib.parse import urlparse
+
+
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.devtools import ApptestConfiguration
+
+
+def get_available_port(ports_scan):
+ """return the first available port from the given ports range
+
+ Try to connect port by looking for refused connection (111) or transport
+ endpoint already connected (106) errors
+
+ Raise a RuntimeError if no port can be found
+
+ :type ports_range: list
+ :param ports_range: range of ports to test
+ :rtype: int
+
+ .. see:: :func:`test.test_support.bind_port`
+ """
+ ports_scan = list(ports_scan)
+ random.shuffle(ports_scan) # lower the chance of race condition
+ for port in ports_scan:
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock = s.connect(("localhost", port))
+ except socket.error as err:
+ if err.args[0] in (111, 106):
+ return port
+ finally:
+ s.close()
+ raise RuntimeError('get_available_port([ports_range]) cannot find an available port')
+
+
+class _CubicWebServerTC(CubicWebTC):
+ """Class for running a Twisted-based test web server.
+ """
+ ports_range = range(7000, 8000)
+
+ def start_server(self):
+ raise NotImplementedError
+
+ def stop_server(self, timeout=15):
+ """Stop the webserver, waiting for the thread to return"""
+ raise NotImplementedError
+
+ def web_login(self, user=None, passwd=None):
+ """Log the current http session for the provided credential
+
+ If no user is provided, admin connection are used.
+ """
+ if user is None:
+ user = self.admlogin
+ passwd = self.admpassword
+ if passwd is None:
+ passwd = user
+ response = self.web_get("login?__login=%s&__password=%s" %
+ (user, passwd))
+ assert response.status == http_client.SEE_OTHER, response.status
+ self._ident_cookie = response.getheader('Set-Cookie')
+ assert self._ident_cookie
+ return True
+
+ def web_logout(self, user='admin', pwd=None):
+ """Log out current http user"""
+ if self._ident_cookie is not None:
+ response = self.web_get('logout')
+ self._ident_cookie = None
+
+ def web_request(self, path='', method='GET', body=None, headers=None):
+ """Return an http_client.HTTPResponse object for the specified path
+
+ Use available credential if available.
+ """
+ if headers is None:
+ headers = {}
+ if self._ident_cookie is not None:
+ assert 'Cookie' not in headers
+ headers['Cookie'] = self._ident_cookie
+ self._web_test_cnx.request(method, '/' + path, headers=headers, body=body)
+ response = self._web_test_cnx.getresponse()
+ response.body = response.read() # to chain request
+ response.read = lambda : response.body
+ return response
+
+ def web_get(self, path='', body=None, headers=None):
+ return self.web_request(path=path, body=body, headers=headers)
+
+ def setUp(self):
+ super(_CubicWebServerTC, self).setUp()
+ port = self.config['port'] or get_available_port(self.ports_range)
+ self.config.global_set_option('port', port) # force rewrite here
+ self.config.global_set_option('base-url', 'http://127.0.0.1:%d/' % port)
+ # call load_configuration again to let the config reset its datadir_url
+ self.config.load_configuration()
+ self.start_server()
+
+ def tearDown(self):
+ self.stop_server()
+ super(_CubicWebServerTC, self).tearDown()
+
+
+class CubicWebServerTC(_CubicWebServerTC):
+ def start_server(self):
+ if PY3:
+ self.skipTest('not using twisted on python3')
+ from twisted.internet import reactor
+ from cubicweb.etwist.server import run
+ # use a semaphore to avoid starting test while the http server isn't
+ # fully initilialized
+ semaphore = threading.Semaphore(0)
+ def safe_run(*args, **kwargs):
+ try:
+ run(*args, **kwargs)
+ finally:
+ semaphore.release()
+
+ reactor.addSystemEventTrigger('after', 'startup', semaphore.release)
+ t = threading.Thread(target=safe_run, name='cubicweb_test_web_server',
+ args=(self.config, True), kwargs={'repo': self.repo})
+ self.web_thread = t
+ t.start()
+ semaphore.acquire()
+ if not self.web_thread.isAlive():
+ # XXX race condition with actual thread death
+ raise RuntimeError('Could not start the web server')
+ #pre init utils connection
+ parseurl = urlparse(self.config['base-url'])
+ assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port'])
+ self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname,
+ parseurl.port)
+ self._ident_cookie = None
+
+ def stop_server(self, timeout=15):
+ """Stop the webserver, waiting for the thread to return"""
+ from twisted.internet import reactor
+ if self._web_test_cnx is None:
+ self.web_logout()
+ self._web_test_cnx.close()
+ try:
+ reactor.stop()
+ self.web_thread.join(timeout)
+ assert not self.web_thread.isAlive()
+
+ finally:
+ reactor.__init__()
+
+
+class CubicWebWsgiTC(CubicWebServerTC):
+ def start_server(self):
+ from cubicweb.wsgi.handler import CubicWebWSGIApplication
+ from wsgiref import simple_server
+ from six.moves import queue
+
+ config = self.config
+ port = config['port'] or 8080
+ interface = config['interface']
+ handler_cls = simple_server.WSGIRequestHandler
+ app = CubicWebWSGIApplication(config)
+ start_flag = queue.Queue()
+
+ def run(config, *args, **kwargs):
+ try:
+ self.httpd = simple_server.WSGIServer((interface, port), handler_cls)
+ self.httpd.set_app(app)
+ except Exception as exc:
+ start_flag.put(False)
+ start_flag.put(exc)
+ raise
+ else:
+ start_flag.put(True)
+ try:
+ self.httpd.serve_forever()
+ finally:
+ self.httpd.server_close()
+ t = threading.Thread(target=run, name='cubicweb_test_web_server',
+ args=(self.config, True), kwargs={'repo': self.repo})
+ self.web_thread = t
+ t.start()
+ flag = start_flag.get()
+ if not flag:
+ t.join()
+ self.fail(start_flag.get())
+ parseurl = urlparse(self.config['base-url'])
+ assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port'])
+ self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname,
+ parseurl.port)
+ self._ident_cookie = None
+
+ def stop_server(self, timeout=15):
+ if self._web_test_cnx is None:
+ self.web_logout()
+ self._web_test_cnx.close()
+ self.httpd.shutdown()
+ self.web_thread.join(timeout)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/instrument.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/instrument.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,225 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Instrumentation utilities"""
+from __future__ import print_function
+
+import os
+
+try:
+ import pygraphviz
+except ImportError:
+ pygraphviz = None
+
+from cubicweb.cwvreg import CWRegistryStore
+from cubicweb.devtools.devctl import DevConfiguration
+
+
+ALL_COLORS = [
+ "00FF00", "0000FF", "FFFF00", "FF00FF", "00FFFF", "000000",
+ "800000", "008000", "000080", "808000", "800080", "008080", "808080",
+ "C00000", "00C000", "0000C0", "C0C000", "C000C0", "00C0C0", "C0C0C0",
+ "400000", "004000", "000040", "404000", "400040", "004040", "404040",
+ "200000", "002000", "000020", "202000", "200020", "002020", "202020",
+ "600000", "006000", "000060", "606000", "600060", "006060", "606060",
+ "A00000", "00A000", "0000A0", "A0A000", "A000A0", "00A0A0", "A0A0A0",
+ "E00000", "00E000", "0000E0", "E0E000", "E000E0", "00E0E0", "E0E0E0",
+ ]
+_COLORS = {}
+def get_color(key):
+ try:
+ return _COLORS[key]
+ except KeyError:
+ _COLORS[key] = '#'+ALL_COLORS[len(_COLORS) % len(ALL_COLORS)]
+ return _COLORS[key]
+
+def warn(msg, *args):
+ print('WARNING: %s' % (msg % args))
+
+def info(msg):
+ print('INFO: ' + msg)
+
+
+class PropagationAnalyzer(object):
+ """Abstract propagation analyzer, providing utility function to extract
+ entities involved in propagation from a schema, as well as propagation
+ rules from hooks (provided they use intrumentalized sets, see
+ :class:`CubeTracerSet`).
+
+ Concrete classes should at least define `prop_rel` class attribute and
+ implements the `is_root` method.
+
+ See `localperms` or `nosylist` cubes for example usage (`ccplugin` module).
+ """
+ prop_rel = None # name of the propagation relation
+
+ def init(self, cube):
+ """Initialize analyze for the given cube, returning the (already loaded)
+ vregistry and a set of entities which we're interested in.
+ """
+ config = DevConfiguration(cube)
+ schema = config.load_schema()
+ vreg = CWRegistryStore(config)
+ vreg.set_schema(schema) # set_schema triggers objects registrations
+ eschemas = set(eschema for eschema in schema.entities()
+ if self.should_include(eschema))
+ return vreg, eschemas
+
+ def is_root(self, eschema):
+ """Return `True` if given entity schema is a root of the graph"""
+ raise NotImplementedError()
+
+ def should_include(self, eschema):
+ """Return `True` if given entity schema should be included by the graph.
+ """
+
+ if self.prop_rel in eschema.subjrels or self.is_root(eschema):
+ return True
+ return False
+
+ def prop_edges(self, s_rels, o_rels, eschemas):
+ """Return a set of edges where propagation has been detected.
+
+ Each edge is defined by a 4-uple (from node, to node, rtype, package)
+ where `rtype` is the relation type bringing from to and `package` is the cube adding the rule to the propagation
+ control set (see see :class:`CubeTracerSet`).
+ """
+ schema = iter(eschemas).next().schema
+ prop_edges = set()
+ for rtype in s_rels:
+ found = False
+ for subj, obj in schema.rschema(rtype).rdefs:
+ if subj in eschemas and obj in eschemas:
+ found = True
+ prop_edges.add( (subj, obj, rtype, s_rels.value_cube[rtype]) )
+ if not found:
+ warn('no rdef match for %s', rtype)
+ for rtype in o_rels:
+ found = False
+ for subj, obj in schema.rschema(rtype).rdefs:
+ if subj in eschemas and obj in eschemas:
+ found = True
+ prop_edges.add( (obj, subj, rtype, o_rels.value_cube[rtype]) )
+ if not found:
+ warn('no rdef match for %s', rtype)
+ return prop_edges
+
+ def detect_problems(self, eschemas, edges):
+ """Given the set of analyzed entity schemas and edges between them,
+ return a set of entity schemas where a problem has been detected.
+ """
+ problematic = set()
+ for eschema in eschemas:
+ if self.has_problem(eschema, edges):
+ problematic.add(eschema)
+ not_problematic = set(eschemas).difference(problematic)
+ if not_problematic:
+ info('nothing problematic in: %s' %
+ ', '.join(e.type for e in not_problematic))
+ return problematic
+
+ def has_problem(self, eschema, edges):
+ """Return `True` if the given schema is considered problematic,
+ considering base propagation rules.
+ """
+ root = self.is_root(eschema)
+ has_prop_rel = self.prop_rel in eschema.subjrels
+ # root but no propagation relation
+ if root and not has_prop_rel:
+ warn('%s is root but miss %s', eschema, self.prop_rel)
+ return True
+ # propagated but without propagation relation / not propagated but
+ # with propagation relation
+ if not has_prop_rel and \
+ any(edge for edge in edges if edge[1] == eschema):
+ warn("%s miss %s but is reached by propagation",
+ eschema, self.prop_rel)
+ return True
+ elif has_prop_rel and not root:
+ rdef = eschema.rdef(self.prop_rel, takefirst=True)
+ edges = [edge for edge in edges if edge[1] == eschema]
+ if not edges:
+ warn("%s has %s but isn't reached by "
+ "propagation", eschema, self.prop_rel)
+ return True
+ # require_permission relation / propagation rule not added by
+ # the same cube
+ elif not any(edge for edge in edges if edge[-1] == rdef.package):
+ warn('%s has %s relation / propagation rule'
+ ' not added by the same cube (%s / %s)', eschema,
+ self.prop_rel, rdef.package, edges[0][-1])
+ return True
+ return False
+
+ def init_graph(self, eschemas, edges, problematic):
+ """Initialize and return graph, adding given nodes (entity schemas) and
+ edges between them.
+
+ Require pygraphviz installed.
+ """
+ if pygraphviz is None:
+ raise RuntimeError('pygraphviz is not installed')
+ graph = pygraphviz.AGraph(strict=False, directed=True)
+ for eschema in eschemas:
+ if eschema in problematic:
+ params = {'color': '#ff0000', 'fontcolor': '#ff0000'}
+ else:
+ params = {}#'color': get_color(eschema.package)}
+ graph.add_node(eschema.type, **params)
+ for subj, obj, rtype, package in edges:
+ graph.add_edge(str(subj), str(obj), label=rtype,
+ color=get_color(package))
+ return graph
+
+ def add_colors_legend(self, graph):
+ """Add a legend of used colors to the graph."""
+ for package, color in sorted(_COLORS.items()):
+ graph.add_node(package, color=color, fontcolor=color, shape='record')
+
+
+class CubeTracerSet(object):
+ """Dumb set implementation whose purpose is to keep track of which cube is
+ being loaded when something is added to the set.
+
+ Results will be found in the `value_cube` attribute dictionary.
+
+ See `localperms` or `nosylist` cubes for example usage (`hooks` module).
+ """
+ def __init__(self, vreg, wrapped):
+ self.vreg = vreg
+ self.wrapped = wrapped
+ self.value_cube = {}
+
+ def add(self, value):
+ self.wrapped.add(value)
+ cube = self.vreg.currently_loading_cube
+ if value in self.value_cube:
+ warn('%s is propagated by cube %s and cube %s',
+ value, self.value_cube[value], cube)
+ else:
+ self.value_cube[value] = cube
+
+ def __iter__(self):
+ return iter(self.wrapped)
+
+ def __ior__(self, other):
+ for value in other:
+ self.add(value)
+ return self
+
+ def __ror__(self, other):
+ other |= self.wrapped
+ return other
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/qunit.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/qunit.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,293 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from __future__ import absolute_import
+
+import os, os.path as osp
+import errno
+from tempfile import mkdtemp
+from subprocess import Popen, PIPE, STDOUT
+
+from six.moves.queue import Queue, Empty
+
+# imported by default to simplify further import statements
+from logilab.common.testlib import unittest_main, with_tempdir, Tags
+import webtest.http
+
+import cubicweb
+from cubicweb.view import View
+from cubicweb.web.controller import Controller
+from cubicweb.web.views.staticcontrollers import StaticFileController, STATIC_CONTROLLERS
+from cubicweb.devtools import webtest as cwwebtest
+
+
+class FirefoxHelper(object):
+
+ def __init__(self, url=None):
+ self._process = None
+ self._profile_dir = mkdtemp(prefix='cwtest-ffxprof-')
+ self.firefox_cmd = ['firefox', '-no-remote']
+ if os.name == 'posix':
+ self.firefox_cmd = [osp.join(osp.dirname(__file__), 'data', 'xvfb-run.sh'),
+ '-a', '-s', '-noreset -screen 0 800x600x24'] + self.firefox_cmd
+
+ def test(self):
+ try:
+ proc = Popen(['firefox', '--help'], stdout=PIPE, stderr=STDOUT)
+ stdout, _ = proc.communicate()
+ return proc.returncode == 0, stdout
+ except OSError as exc:
+ if exc.errno == errno.ENOENT:
+ msg = '[%s] %s' % (errno.errorcode[exc.errno], exc.strerror)
+ return False, msg
+ raise
+
+ def start(self, url):
+ self.stop()
+ cmd = self.firefox_cmd + ['-silent', '--profile', self._profile_dir,
+ '-url', url]
+ with open(os.devnull, 'w') as fnull:
+ self._process = Popen(cmd, stdout=fnull, stderr=fnull)
+
+ def stop(self):
+ if self._process is not None:
+ assert self._process.returncode is None, self._process.returncode
+ self._process.terminate()
+ self._process.wait()
+ self._process = None
+
+ def __del__(self):
+ self.stop()
+
+
+class QUnitTestCase(cwwebtest.CubicWebTestTC):
+
+ tags = cwwebtest.CubicWebTestTC.tags | Tags(('qunit',))
+
+ # testfile, (dep_a, dep_b)
+ all_js_tests = ()
+
+ def setUp(self):
+ super(QUnitTestCase, self).setUp()
+ self.test_queue = Queue()
+ class MyQUnitResultController(QUnitResultController):
+ tc = self
+ test_queue = self.test_queue
+ self._qunit_controller = MyQUnitResultController
+ self.webapp.app.appli.vreg.register(MyQUnitResultController)
+ self.webapp.app.appli.vreg.register(QUnitView)
+ self.webapp.app.appli.vreg.register(CWDevtoolsStaticController)
+ self.server = webtest.http.StopableWSGIServer.create(self.webapp.app)
+ self.config.global_set_option('base-url', self.server.application_url)
+
+ def tearDown(self):
+ self.server.shutdown()
+ self.webapp.app.appli.vreg.unregister(self._qunit_controller)
+ self.webapp.app.appli.vreg.unregister(QUnitView)
+ self.webapp.app.appli.vreg.unregister(CWDevtoolsStaticController)
+ super(QUnitTestCase, self).tearDown()
+
+ def test_javascripts(self):
+ for args in self.all_js_tests:
+ self.assertIn(len(args), (1, 2))
+ test_file = args[0]
+ if len(args) > 1:
+ depends = args[1]
+ else:
+ depends = ()
+ for name, func, args in self._test_qunit(test_file, depends):
+ with self.subTest(name=name):
+ func(*args)
+
+ @with_tempdir
+ def _test_qunit(self, test_file, depends=(), timeout=10):
+ QUnitView.test_file = test_file
+ QUnitView.depends = depends
+
+ while not self.test_queue.empty():
+ self.test_queue.get(False)
+
+ browser = FirefoxHelper()
+ isavailable, reason = browser.test()
+ if not isavailable:
+ self.fail('firefox not available or not working properly (%s)' % reason)
+ browser.start(self.config['base-url'] + "?vid=qunit")
+ test_count = 0
+ error = False
+
+ def runtime_error(*data):
+ raise RuntimeError(*data)
+
+ while not error:
+ try:
+ result, test_name, msg = self.test_queue.get(timeout=timeout)
+ test_name = '%s (%s)' % (test_name, test_file)
+ if result is None:
+ break
+ test_count += 1
+ if result:
+ yield test_name, lambda *args: 1, ()
+ else:
+ yield test_name, self.fail, (msg, )
+ except Empty:
+ error = True
+ msg = '%s inactivity timeout (%is). %i test results received'
+ yield test_file, runtime_error, (msg % (test_file, timeout, test_count), )
+ browser.stop()
+ if test_count <= 0 and not error:
+ yield test_name, runtime_error, ('No test yielded by qunit for %s' % test_file, )
+
+class QUnitResultController(Controller):
+
+ __regid__ = 'qunit_result'
+
+
+ # Class variables to circumvent the instantiation of a new Controller for each request.
+ _log_stack = [] # store QUnit log messages
+ _current_module_name = '' # store the current QUnit module name
+
+ def publish(self, rset=None):
+ event = self._cw.form['event']
+ getattr(self, 'handle_%s' % event)()
+ return b''
+
+ def handle_module_start(self):
+ self.__class__._current_module_name = self._cw.form.get('name', '')
+
+ def handle_test_done(self):
+ name = '%s // %s' % (self._current_module_name, self._cw.form.get('name', ''))
+ failures = int(self._cw.form.get('failures', 0))
+ total = int(self._cw.form.get('total', 0))
+
+ self._log_stack.append('%i/%i assertions failed' % (failures, total))
+ msg = '\n'.join(self._log_stack)
+
+ if failures:
+ self.tc.test_queue.put((False, name, msg))
+ else:
+ self.tc.test_queue.put((True, name, msg))
+ self._log_stack[:] = []
+
+ def handle_done(self):
+ self.tc.test_queue.put((None, None, None))
+
+ def handle_log(self):
+ result = self._cw.form['result']
+ message = self._cw.form.get('message', '')
+ actual = self._cw.form.get('actual')
+ expected = self._cw.form.get('expected')
+ source = self._cw.form.get('source')
+ log = '%s: %s' % (result, message)
+ if result == 'false' and actual is not None and expected is not None:
+ log += ' (got: %s, expected: %s)' % (actual, expected)
+ if source is not None:
+ log += '\n' + source
+ self._log_stack.append(log)
+
+
+class QUnitView(View):
+ __regid__ = 'qunit'
+
+ templatable = False
+
+ depends = None
+ test_file = None
+
+ def call(self, **kwargs):
+ w = self.w
+ req = self._cw
+ w(u'''
+
+
+
+
+
+
+
+ ''')
+ w(u'')
+ w(u'')
+ w(u'')
+
+ for dep in self.depends:
+ w(u' \n' % dep)
+
+ w(u' ')
+ w(u' ' % self.test_file)
+ w(u'''
+
+
+
+
+ ''')
+
+
+class CWDevtoolsStaticController(StaticFileController):
+ __regid__ = 'devtools'
+
+ def publish(self, rset=None):
+ staticdir = osp.join(osp.dirname(__file__), 'data')
+ relpath = self.relpath[len(self.__regid__) + 1:]
+ return self.static_file(osp.join(staticdir, relpath))
+
+
+STATIC_CONTROLLERS.append(CWDevtoolsStaticController)
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/realdbtest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/realdbtest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,59 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from cubicweb import toolsutils
+from cubicweb.devtools import DEFAULT_SOURCES, BaseApptestConfiguration
+
+class RealDatabaseConfiguration(BaseApptestConfiguration):
+ init_repository = False
+ sourcesdef = DEFAULT_SOURCES.copy()
+
+ def sources(self):
+ """
+ By default, we run tests with the sqlite DB backend.
+ One may use its own configuration by just creating a
+ 'sources' file in the test directory from wich tests are
+ launched.
+ """
+ self._sources = self.sourcesdef
+ return self._sources
+
+
+def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None):
+ """convenience function that builds a real-db configuration class"""
+ sourcesdef = {'system': {'adapter' : 'native',
+ 'db-encoding' : 'UTF-8', #'ISO-8859-1',
+ 'db-user' : dbuser,
+ 'db-password' : dbpassword,
+ 'db-name' : dbname,
+ 'db-driver' : 'postgres',
+ 'db-host' : dbhost,
+ },
+ 'admin' : {'login': adminuser,
+ 'password': adminpassword,
+ },
+ }
+ return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+ {'sourcesdef': sourcesdef})
+
+
+def loadconfig(filename):
+ """convenience function that builds a real-db configuration class
+ from a file
+ """
+ return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+ {'sourcesdef': toolsutils.read_config(filename)})
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/repotest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/repotest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,353 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""some utilities to ease repository testing
+
+This module contains functions to initialize a new repository.
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+from pprint import pprint
+
+from logilab.common.decorators import cachedproperty
+from logilab.common.testlib import SkipTest
+
+from cubicweb.devtools.testlib import RepoAccess
+
+def tuplify(mylist):
+ return [tuple(item) for item in mylist]
+
+def snippet_key(a):
+ # a[0] may be a dict or a key/value tuple
+ return (sorted(dict(a[0]).items()), [e.expression for e in a[1]])
+
+def check_plan(self, rql, expected, kwargs=None):
+ with self.session.new_cnx() as cnx:
+ plan = self._prepare_plan(cnx, rql, kwargs)
+ self.planner.build_plan(plan)
+ try:
+ self.assertEqual(len(plan.steps), len(expected),
+ 'expected %s steps, got %s' % (len(expected), len(plan.steps)))
+ # step order is important
+ for i, step in enumerate(plan.steps):
+ compare_steps(self, step.test_repr(), expected[i])
+ except AssertionError:
+ pprint([step.test_repr() for step in plan.steps])
+ raise
+
+def compare_steps(self, step, expected):
+ try:
+ self.assertEqual(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0]))
+ if len(step) > 2 and isinstance(step[1], list) and isinstance(expected[1], list):
+ queries, equeries = step[1], expected[1]
+ self.assertEqual(len(queries), len(equeries),
+ 'expected %s queries, got %s' % (len(equeries), len(queries)))
+ for i, (rql, sol) in enumerate(queries):
+ self.assertEqual(rql, equeries[i][0])
+ self.assertEqual(sorted(sorted(x.items()) for x in sol), sorted(sorted(x.items()) for x in equeries[i][1]))
+ idx = 2
+ else:
+ idx = 1
+ self.assertEqual(step[idx:-1], expected[idx:-1],
+ 'expected step characteristic \n%s\n, got\n%s' % (expected[1:-1], step[1:-1]))
+ self.assertEqual(len(step[-1]), len(expected[-1]),
+ 'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1])))
+ except AssertionError:
+ print('error on step ', end=' ')
+ pprint(step[:-1])
+ raise
+ children = step[-1]
+ if step[0] in ('UnionFetchStep', 'UnionStep'):
+ # sort children
+ children = sorted(children)
+ expectedchildren = sorted(expected[-1])
+ else:
+ expectedchildren = expected[-1]
+ for i, substep in enumerate(children):
+ compare_steps(self, substep, expectedchildren[i])
+
+
+class DumbOrderedDict(list):
+ def __iter__(self):
+ return self.iterkeys()
+ def __contains__(self, key):
+ return key in self.iterkeys()
+ def __getitem__(self, key):
+ for key_, value in list.__iter__(self):
+ if key == key_:
+ return value
+ raise KeyError(key)
+ def iterkeys(self):
+ return (x for x, y in list.__iter__(self))
+ def iteritems(self):
+ return (x for x in list.__iter__(self))
+ def items(self):
+ return [x for x in list.__iter__(self)]
+
+
+def schema_eids_idx(schema):
+ """return a dictionary mapping schema types to their eids so we can reread
+ it from the fs instead of the db (too costly) between tests
+ """
+ schema_eids = {}
+ for x in schema.entities():
+ schema_eids[x] = x.eid
+ for x in schema.relations():
+ schema_eids[x] = x.eid
+ for rdef in x.rdefs.values():
+ schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid
+ return schema_eids
+
+def restore_schema_eids_idx(schema, schema_eids):
+ """rebuild schema eid index"""
+ for x in schema.entities():
+ x.eid = schema_eids[x]
+ schema._eid_index[x.eid] = x
+ for x in schema.relations():
+ x.eid = schema_eids[x]
+ schema._eid_index[x.eid] = x
+ for rdef in x.rdefs.values():
+ rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)]
+ schema._eid_index[rdef.eid] = rdef
+
+
+from logilab.common.testlib import TestCase, mock_object
+from logilab.database import get_db_helper
+
+from rql import RQLHelper
+
+from cubicweb.devtools.testlib import BaseTestCase
+from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeSession, FakeRequest
+from cubicweb.server import set_debug, debugged
+from cubicweb.server.querier import QuerierHelper
+from cubicweb.server.session import Session
+from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
+
+class RQLGeneratorTC(BaseTestCase):
+ schema = backend = None # set this in concrete class
+
+ @classmethod
+ def setUpClass(cls):
+ if cls.backend is not None:
+ try:
+ cls.dbhelper = get_db_helper(cls.backend)
+ except ImportError as ex:
+ raise SkipTest(str(ex))
+
+ def setUp(self):
+ self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir))
+ self.repo.system_source = mock_object(dbdriver=self.backend)
+ self.rqlhelper = RQLHelper(self.schema,
+ special_relations={'eid': 'uid',
+ 'has_text': 'fti'},
+ backend=self.backend)
+ self.qhelper = QuerierHelper(self.repo, self.schema)
+ ExecutionPlan._check_permissions = _dummy_check_permissions
+ rqlannotation._select_principal = _select_principal
+ if self.backend is not None:
+ self.o = SQLGenerator(self.schema, self.dbhelper)
+
+ def tearDown(self):
+ ExecutionPlan._check_permissions = _orig_check_permissions
+ rqlannotation._select_principal = _orig_select_principal
+
+ def set_debug(self, debug):
+ set_debug(debug)
+ def debugged(self, debug):
+ return debugged(debug)
+
+ def _prepare(self, rql):
+ #print '******************** prepare', rql
+ union = self.rqlhelper.parse(rql)
+ #print '********* parsed', union.as_string()
+ self.rqlhelper.compute_solutions(union)
+ #print '********* solutions', solutions
+ self.rqlhelper.simplify(union)
+ #print '********* simplified', union.as_string()
+ plan = self.qhelper.plan_factory(union, {}, FakeSession(self.repo))
+ plan.preprocess(union)
+ for select in union.children:
+ select.solutions.sort(key=lambda x: list(x.items()))
+ #print '********* ppsolutions', solutions
+ return union
+
+
+class BaseQuerierTC(TestCase):
+ repo = None # set this in concrete class
+
+ @cachedproperty
+ def session(self):
+ return self._access._session
+
+ def setUp(self):
+ self.o = self.repo.querier
+ self._access = RepoAccess(self.repo, 'admin', FakeRequest)
+ self.ueid = self.session.user.eid
+ assert self.ueid != -1
+ self.repo._type_source_cache = {} # clear cache
+ self.maxeid = self.get_max_eid()
+ do_monkey_patch()
+ self._dumb_sessions = []
+
+ def get_max_eid(self):
+ with self.session.new_cnx() as cnx:
+ return cnx.execute('Any MAX(X)')[0][0]
+
+ def cleanup(self):
+ with self.session.new_cnx() as cnx:
+ cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
+ cnx.commit()
+
+ def tearDown(self):
+ undo_monkey_patch()
+ self.cleanup()
+ assert self.session.user.eid != -1
+
+ def set_debug(self, debug):
+ set_debug(debug)
+ def debugged(self, debug):
+ return debugged(debug)
+
+ def _rqlhelper(self):
+ rqlhelper = self.repo.vreg.rqlhelper
+ # reset uid_func so it don't try to get type from eids
+ rqlhelper._analyser.uid_func = None
+ rqlhelper._analyser.uid_func_mapping = {}
+ return rqlhelper
+
+ def _prepare_plan(self, cnx, rql, kwargs=None, simplify=True):
+ rqlhelper = self._rqlhelper()
+ rqlst = rqlhelper.parse(rql)
+ rqlhelper.compute_solutions(rqlst, kwargs=kwargs)
+ if simplify:
+ rqlhelper.simplify(rqlst)
+ for select in rqlst.children:
+ select.solutions.sort(key=lambda x: list(x.items()))
+ return self.o.plan_factory(rqlst, kwargs, cnx)
+
+ def _prepare(self, cnx, rql, kwargs=None):
+ plan = self._prepare_plan(cnx, rql, kwargs, simplify=False)
+ plan.preprocess(plan.rqlst)
+ rqlst = plan.rqlst.children[0]
+ rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, self.repo.schema)[0]
+ return rqlst
+
+ def user_groups_session(self, *groups):
+ """lightweight session using the current user with hi-jacked groups"""
+ # use self.session.user.eid to get correct owned_by relation, unless explicit eid
+ with self.session.new_cnx() as cnx:
+ u = self.repo._build_user(cnx, self.session.user.eid)
+ u._groups = set(groups)
+ s = Session(u, self.repo)
+ return s
+
+ def qexecute(self, rql, args=None, build_descr=True):
+ with self.session.new_cnx() as cnx:
+ try:
+ return self.o.execute(cnx, rql, args, build_descr)
+ finally:
+ if rql.startswith(('INSERT', 'DELETE', 'SET')):
+ cnx.commit()
+
+
+class BasePlannerTC(BaseQuerierTC):
+
+ def setup(self):
+ # XXX source_defs
+ self.o = self.repo.querier
+ self.session = self.repo._sessions.values()[0]
+ self.schema = self.o.schema
+ self.system = self.repo.system_source
+ do_monkey_patch()
+ self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered
+
+ def tearDown(self):
+ undo_monkey_patch()
+
+ def _prepare_plan(self, cnx, rql, kwargs=None):
+ rqlst = self.o.parse(rql, annotate=True)
+ self.o.solutions(cnx, rqlst, kwargs)
+ if rqlst.TYPE == 'select':
+ self.repo.vreg.rqlhelper.annotate(rqlst)
+ for select in rqlst.children:
+ select.solutions.sort(key=lambda x: list(x.items()))
+ else:
+ rqlst.solutions.sort(key=lambda x: list(x.items()))
+ return self.o.plan_factory(rqlst, kwargs, cnx)
+
+
+# monkey patch some methods to get predictable results #######################
+
+from cubicweb import rqlrewrite
+_orig_iter_relations = rqlrewrite.iter_relations
+_orig_insert_snippets = rqlrewrite.RQLRewriter.insert_snippets
+_orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes
+
+def _insert_snippets(self, snippets, varexistsmap=None):
+ _orig_insert_snippets(self, sorted(snippets, key=snippet_key), varexistsmap)
+
+def _build_variantes(self, newsolutions):
+ variantes = _orig_build_variantes(self, newsolutions)
+ sortedvariantes = []
+ for variante in variantes:
+ orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items())
+ variante = DumbOrderedDict(sorted(variante.items(),
+ key=lambda a: (a[0][1], a[0][2], a[1])))
+ sortedvariantes.append( (orderedkeys, variante) )
+ return [v for ok, v in sorted(sortedvariantes)]
+
+from cubicweb.server.querier import ExecutionPlan
+_orig_check_permissions = ExecutionPlan._check_permissions
+
+def _check_permissions(*args, **kwargs):
+ res, restricted = _orig_check_permissions(*args, **kwargs)
+ res = DumbOrderedDict(sorted(res.items(), key=lambda x: [list(y.items()) for y in x[1]]))
+ return res, restricted
+
+def _dummy_check_permissions(self, rqlst):
+ return {(): rqlst.solutions}, set()
+
+from cubicweb.server import rqlannotation
+_orig_select_principal = rqlannotation._select_principal
+
+def _select_principal(scope, relations):
+ def sort_key(something):
+ try:
+ return something.r_type
+ except AttributeError:
+ return (something[0].r_type, something[1])
+ return _orig_select_principal(scope, relations,
+ _sort=lambda rels: sorted(rels, key=sort_key))
+
+
+def _ordered_iter_relations(stinfo):
+ return sorted(_orig_iter_relations(stinfo), key=lambda x:x.r_type)
+
+def do_monkey_patch():
+ rqlrewrite.iter_relations = _ordered_iter_relations
+ rqlrewrite.RQLRewriter.insert_snippets = _insert_snippets
+ rqlrewrite.RQLRewriter.build_variantes = _build_variantes
+ ExecutionPlan._check_permissions = _check_permissions
+ ExecutionPlan.tablesinorder = None
+
+def undo_monkey_patch():
+ rqlrewrite.iter_relations = _orig_iter_relations
+ rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets
+ rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes
+ ExecutionPlan._check_permissions = _orig_check_permissions
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/stresstester.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/stresstester.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,192 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+""" Usage: %s [OPTIONS]
+
+Stress test a CubicWeb repository
+
+OPTIONS:
+ -h / --help
+ Display this help message and exit.
+
+ -u / --user
+ Connect as instead of being prompted to give it.
+ -p / --password
+ Automatically give for authentication instead of being prompted
+ to give it.
+
+ -n / --nb-times
+ Repeat queries times.
+ -t / --nb-threads
+ Execute queries in parallel threads.
+ -P / --profile
+ dumps profile results (hotshot) in
+ -o / --report-output
+ Write profiler report into rather than on stdout
+
+Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+from __future__ import print_function
+
+import os
+import sys
+import threading
+import getopt
+import traceback
+from getpass import getpass
+from os.path import basename
+from time import clock
+
+from logilab.common.fileutils import lines
+from logilab.common.ureports import Table, TextWriter
+from cubicweb.server.repository import Repository
+
+TB_LOCK = threading.Lock()
+
+class QueryExecutor:
+ def __init__(self, session, times, queries, reporter = None):
+ self._session = session
+ self._times = times
+ self._queries = queries
+ self._reporter = reporter
+
+ def run(self):
+ with self._session.new_cnx() as cnx:
+ times = self._times
+ while times:
+ for index, query in enumerate(self._queries):
+ start = clock()
+ try:
+ cnx.execute(query)
+ except Exception:
+ TB_LOCK.acquire()
+ traceback.print_exc()
+ TB_LOCK.release()
+ return
+ if self._reporter is not None:
+ self._reporter.add_proftime(clock() - start, index)
+ times -= 1
+
+def usage(status=0):
+ """print usage string and exit"""
+ print(__doc__ % basename(sys.argv[0]))
+ sys.exit(status)
+
+
+class ProfileReporter:
+ """a profile reporter gathers all profile informations from several
+ threads and can write a report that summarizes all profile informations
+ """
+ profiler_lock = threading.Lock()
+
+ def __init__(self, queries):
+ self._queries = tuple(queries)
+ self._profile_results = [(0., 0)] * len(self._queries)
+ # self._table_report = Table(3, rheaders = True)
+ len_max = max([len(query) for query in self._queries]) + 5
+ self._query_fmt = '%%%ds' % len_max
+
+ def add_proftime(self, elapsed_time, query_index):
+ """add a new time measure for query"""
+ ProfileReporter.profiler_lock.acquire()
+ cumul_time, times = self._profile_results[query_index]
+ cumul_time += elapsed_time
+ times += 1.
+ self._profile_results[query_index] = (cumul_time, times)
+ ProfileReporter.profiler_lock.release()
+
+ def dump_report(self, output = sys.stdout):
+ """dump report in 'output'"""
+ table_elems = ['RQL Query', 'Times', 'Avg Time']
+ total_time = 0.
+ for query, (cumul_time, times) in zip(self._queries, self._profile_results):
+ avg_time = cumul_time / float(times)
+ table_elems += [str(query), '%f' % times, '%f' % avg_time ]
+ total_time += cumul_time
+ table_elems.append('Total time :')
+ table_elems.append(str(total_time))
+ table_elems.append(' ')
+ table_layout = Table(3, rheaders = True, children = table_elems)
+ TextWriter().format(table_layout, output)
+ # output.write('\n'.join(tmp_output))
+
+
+def run(args):
+ """run the command line tool"""
+ try:
+ opts, args = getopt.getopt(args, 'hn:t:u:p:P:o:', ['help', 'user=', 'password=',
+ 'nb-times=', 'nb-threads=',
+ 'profile', 'report-output=',])
+ except Exception as ex:
+ print(ex)
+ usage(1)
+ repeat = 100
+ threads = 1
+ user = os.environ.get('USER', os.environ.get('LOGNAME'))
+ password = None
+ report_output = sys.stdout
+ prof_file = None
+ for opt, val in opts:
+ if opt in ('-h', '--help'):
+ usage()
+ if opt in ('-u', '--user'):
+ user = val
+ elif opt in ('-p', '--password'):
+ password = val
+ elif opt in ('-n', '--nb-times'):
+ repeat = int(val)
+ elif opt in ('-t', '--nb-threads'):
+ threads = int(val)
+ elif opt in ('-P', '--profile'):
+ prof_file = val
+ elif opt in ('-o', '--report-output'):
+ report_output = open(val, 'w')
+ if len(args) != 2:
+ usage(1)
+ queries = [query for query in lines(args[1]) if not query.startswith('#')]
+ if user is None:
+ user = raw_input('login: ')
+ if password is None:
+ password = getpass('password: ')
+ from cubicweb.cwconfig import instance_configuration
+ config = instance_configuration(args[0])
+ # get local access to the repository
+ print("Creating repo", prof_file)
+ repo = Repository(config, prof_file)
+ session = repo.new_session(user, password=password)
+ reporter = ProfileReporter(queries)
+ if threads > 1:
+ executors = []
+ while threads:
+ qe = QueryExecutor(session, repeat, queries, reporter = reporter)
+ executors.append(qe)
+ thread = threading.Thread(target=qe.run)
+ qe.thread = thread
+ thread.start()
+ threads -= 1
+ for qe in executors:
+ qe.thread.join()
+## for qe in executors:
+## print qe.thread, repeat - qe._times, 'times'
+ else:
+ QueryExecutor(session, repeat, queries, reporter = reporter).run()
+ reporter.dump_report(report_output)
+
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/cubes/__init__.py
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/cubes/i18ntestcube
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/cubes/i18ntestcube Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_i18ntestcube/
\ No newline at end of file
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/firstnames.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/firstnames.txt Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1599 @@
+ash
+pasqualino
+asl
+benjy
+wolodymyr
+dionysos
+launce
+khaleel
+sondra
+maaike
+lavinia
+giosu
+daisy
+xiang
+belgin
+edda
+olympia
+treasa
+katya
+misi
+ville
+mahon
+yngve
+moritz
+elder
+gawel
+horsa
+blossom
+deanne
+imelda
+deanna
+cairbre
+eddy
+horst
+gaenor
+breanne
+hewie
+breanna
+jarvis
+jamin
+loise
+jamil
+fingall
+giselle
+jamie
+shinju
+gisella
+akilina
+jordan
+gertie
+cardea
+eiran
+valdemar
+sebestyen
+galia
+bride
+greg
+fausta
+eniola
+rudo
+pratibha
+kisha
+mickey
+charlotte
+karp
+charlotta
+nunzia
+nunzio
+patrice
+kara
+hallam
+collyn
+kari
+karl
+dusan
+lia
+cherokee
+lim
+lin
+yvain
+madlyn
+liv
+lir
+lis
+tullio
+norma
+liz
+lettice
+kae
+kaj
+kai
+tatyanna
+kam
+freddie
+elton
+meinir
+blaise
+kat
+japeth
+alpha
+kay
+mack
+jayna
+jayne
+hormazed
+lupita
+humbert
+vitya
+neoptolemus
+richardine
+hallvard
+diogo
+larkin
+ravi
+louiza
+hermogenes
+alanis
+yadira
+leandra
+milburga
+leandro
+sorin
+randi
+kaleb
+rogerio
+sanna
+kalea
+justice
+kaleo
+dijana
+shprintza
+randy
+colby
+otthild
+mariamne
+patrycja
+darwin
+christal
+khalida
+kaley
+allegria
+vidya
+renaud
+sisel
+suibhne
+lonny
+julienne
+calliope
+rocco
+alexander
+aristide
+edwige
+xzavier
+rajesh
+egil
+gell
+mahavir
+charline
+sigi
+theophania
+maurice
+afon
+konnor
+kiran
+angie
+jalila
+tolly
+havva
+metody
+engel
+philander
+lancelot
+nathalie
+leilah
+dane
+elm
+chatzkel
+keaton
+ashlie
+kudret
+rava
+danette
+eachann
+wilburn
+jeff
+kazimiera
+rukmini
+lauryn
+femie
+mahvash
+berkant
+alesha
+daedalus
+aphra
+karla
+tetty
+agostinho
+bolivar
+savitri
+karly
+forbes
+vencesl
+bahija
+walter
+imam
+iman
+krzys
+imad
+elsa
+neville
+tracie
+else
+anthony
+shevon
+katherine
+marylou
+wojtek
+oddmund
+tristand
+areli
+valkyrie
+garfield
+wyatt
+luanne
+ossia
+luanna
+luciana
+guido
+luciano
+shachar
+astraea
+paco
+leland
+avra
+amenhotep
+kekoa
+gorden
+sameera
+boutros
+ruaidhr
+friedemann
+darrell
+hideaki
+petar
+donatien
+fannie
+eliana
+iason
+fedora
+grant
+shay
+estee
+marcelle
+marcella
+lothair
+shae
+ester
+marcello
+estev
+cassian
+allyson
+dima
+goodwin
+cezar
+blair
+monique
+elwin
+ihsan
+olufunmilayo
+arturo
+nanaia
+greetje
+clovia
+beowulf
+vassily
+madail
+emmeline
+guendolen
+nandag
+eilish
+sakari
+elisheva
+crispin
+aksel
+alvin
+cernunnos
+feardorcha
+heshel
+afra
+iqbal
+pryce
+siddhartha
+mikkel
+alvis
+myrtie
+khajag
+yesenia
+nikki
+grigory
+grigore
+maeve
+rebeca
+diederick
+maeva
+grigori
+cheryl
+rahim
+marco
+marci
+stein
+trista
+olufemi
+emmanuelle
+nadezhda
+wahid
+marcy
+vanda
+lavra
+alida
+amara
+hipolito
+valent
+renatus
+moira
+donny
+lucretia
+donna
+vesta
+cadoc
+reetta
+erma
+markku
+rosamond
+gracia
+tuyet
+sieffre
+gracie
+kodey
+debra
+photine
+jacek
+yanick
+isiah
+khordad
+rui
+stef
+rub
+foma
+sten
+kassy
+rue
+nelly
+merrick
+ayn
+macy
+vincente
+anargyros
+rut
+lenox
+jenessa
+faith
+barnaby
+manny
+jyotsana
+hasan
+iakopa
+edvard
+narcisa
+loredana
+ida
+torborg
+rollo
+stamatios
+pero
+natalya
+maudie
+carlton
+paulina
+aliyah
+lanty
+tadg
+deiniol
+dwayne
+alison
+fabius
+rbj
+latasha
+maarit
+roxanna
+katinka
+publius
+augustijn
+ferdy
+khadiga
+akosua
+rees
+quetzalcoatl
+kristian
+larry
+reed
+krystal
+micheil
+paolo
+chelsey
+ute
+paola
+hamilcar
+malin
+deangelo
+munir
+velma
+malik
+utz
+malie
+govad
+chelsea
+malia
+willem
+seetha
+andrina
+rupert
+myrrine
+theodoros
+tito
+ivonne
+nan
+beryl
+nat
+tawnie
+korn
+marzena
+tinek
+hermine
+kora
+frances
+william
+tianna
+evan
+kory
+merletta
+kort
+nevan
+naheed
+heath
+tyreek
+shona
+amyas
+urjasz
+katy
+gu
+gr
+hilde
+mehmud
+gy
+hilda
+psyche
+olive
+nuno
+vinnie
+ga
+kato
+kata
+jeunesse
+kate
+chandrakant
+caoilainn
+arik
+rhonda
+leocadio
+euan
+aric
+leocadia
+aria
+bronwen
+marcellin
+vladislav
+ferapont
+nichole
+kizzy
+duilio
+jafet
+maas
+tue
+felicity
+mansoor
+rfhlaith
+brigitta
+fishke
+akua
+izabela
+olaf
+vittore
+michael
+skar
+ryan
+gretta
+alvena
+olav
+brigitte
+euterpe
+barbara
+aiolos
+carter
+khalifa
+tziporah
+honora
+feich
+marilena
+onesime
+theo
+gunvor
+sa'id
+katlyn
+nicholas
+preeti
+etzel
+ekewaka
+vinal
+jubal
+ramsey
+rowley
+jocelin
+alfsigr
+kalliope
+micah
+frantisek
+holger
+alysha
+chant
+derry
+corin
+janus
+morcant
+chang
+corie
+gena
+randa
+joost
+vasile
+clark
+clare
+wim
+wil
+clara
+danika
+jory
+eleonoora
+ayelet
+caligula
+zakiah
+kilie
+meliora
+ottavio
+idoya
+ninette
+hudson
+deon
+gawdat
+frida
+jonathan
+reynold
+laocadia
+cerise
+cosmo
+hezekiah
+winston
+isak
+allyn
+noelene
+trajan
+vijaya
+cosma
+tresha
+astrithr
+priya
+astrophel
+pocahontas
+eliphalet
+stafford
+salah
+salal
+pauliina
+lazer
+feidhlim
+jackalyn
+kenny
+alayna
+wilfried
+wasim
+blaine
+femke
+jehu
+kenna
+lenore
+nkechi
+letizia
+kian
+kayleigh
+spartacus
+manuela
+leyton
+lesley
+georg
+ferdinand
+cuauhtemoc
+aeron
+lavrenti
+nyx
+ronald
+yoshiko
+gundula
+eluf
+toma
+riccardo
+ruadh
+matylda
+winter
+mayson
+llew
+clytia
+jamila
+fariha
+aegle
+octavio
+steafan
+jacqui
+mikelo
+dovid
+modestus
+blake
+jeanna
+alessa
+conway
+brook
+sunday
+kizzie
+hande
+catherine
+eckhard
+rr
+gwyneth
+aukusti
+placid
+rufino
+kyleigh
+helah
+benoite
+eluned
+sanaz
+cnaeus
+ettie
+benaiah
+brendan
+wenonah
+nye
+candela
+dragan
+sanda
+naveen
+margar
+naveed
+austen
+sandu
+britta
+brodie
+morton
+kamilla
+sandy
+guilherme
+dorothea
+calix
+braxton
+wigburg
+tryphena
+ricky
+may
+sylwia
+libor
+marek
+ece
+trinity
+katsuro
+tercero
+'ismat
+mared
+jill
+amato
+achim
+princess
+jaquelyn
+eustathios
+tapio
+aglea
+kees
+evstathios
+edwyna
+austin
+cristian
+jouko
+nikandros
+leonora
+kaitlynn
+christoph
+mai
+parthalan
+tancredo
+rosaleen
+lynnette
+yasamin
+encarnacion
+gerolt
+ionut
+harmon
+ailbhe
+islwyn
+muirenn
+nyah
+mariana
+viktor
+greta
+kreszentia
+grete
+hormazd
+foka
+poseidon
+kazimir
+ultan
+ben
+sudhir
+bea
+bee
+saburo
+elnora
+ber
+michelyne
+clytemnestra
+yardena
+gavrel
+michelangelo
+wystan
+odhiambo
+miquel
+bertha
+su
+berthe
+alisia
+kelley
+leonhard
+rodger
+ewald
+oluwaseyi
+celandine
+kunegunda
+luisa
+khayyam
+iisakki
+luise
+ligia
+zaina
+tatiana
+siarl
+jorge
+bronislaw
+bronislav
+montana
+edric
+miloslava
+achilles
+donaldina
+wilfredo
+laurens
+haifa
+stelian
+glenice
+calvino
+rodica
+hulda
+indy
+uri
+laurena
+tzeitel
+laurene
+urs
+danita
+platon
+parker
+chadwick
+lorne
+narinder
+theodoric
+florentina
+ambrosine
+nikephoros
+kapel
+aeolus
+cenek
+hadi
+perle
+alyona
+cyril
+perla
+cicely
+darby
+madhav
+hector
+ethan
+aretha
+ilker
+avdotya
+boris
+sassa
+misty
+bonaventure
+kiefer
+emmet
+arkadios
+farrah
+tivoli
+pietari
+mohammed
+shoshana
+felipe
+felipa
+maurene
+tancred
+raymonde
+sho
+faron
+arundhati
+esteri
+silvanus
+nuha
+aloisia
+baris
+tammie
+fabricio
+lux
+luz
+driskoll
+tyra
+luc
+marsha
+luk
+aron
+joye
+ken
+gethsemane
+kelan
+yuko
+merry
+proserpine
+precious
+suibne
+mindy
+vitus
+olga
+jia
+kalysta
+angharad
+ciera
+careen
+inglebert
+apphia
+muadhnait
+christen
+rebekah
+dominique
+gita
+tori
+harmonie
+anatolius
+harmonia
+denise
+johann
+johano
+denisa
+viktoria
+padmini
+johana
+christer
+barakat
+willy
+sari
+fitzroy
+yaw
+sara
+yan
+quim
+quin
+yaa
+katelin
+pontus
+raelene
+alexus
+gwandoya
+venceslav
+ott
+artemidoros
+zaynab
+folant
+salman
+ealdgy
+randal
+macey
+heriberto
+kimball
+ekin
+dema
+evelyn
+demi
+pip
+simona
+daniil
+emmerson
+kausalya
+kortney
+gavriil
+yered
+parth
+fido
+solange
+oona
+anka
+renie
+anke
+habakkuk
+linwood
+teofilo
+grazyna
+enitan
+bhaskar
+finnian
+perseus
+mordechai
+fyodor
+ashley
+philo
+i
+hecate
+phile
+theodor
+kiaran
+ashlee
+dollie
+savannah
+upton
+sofia
+noak
+sofie
+laurel
+lauren
+dubaku
+zacharjasz
+patricio
+trudi
+sophus
+vida
+patricia
+trudy
+tapani
+mavreena
+jesper
+sandrine
+sonia
+livio
+mikolaj
+laurine
+livia
+finnegan
+oprah
+waheed
+lavonne
+perdita
+liviu
+imen
+attila
+lincoln
+fernanda
+evrard
+fernande
+jaana
+artair
+fernando
+candy
+cande
+kazimierz
+kaija
+shamgar
+laxmi
+martie
+page
+candi
+brody
+piaras
+shea
+herbie
+shem
+kristaps
+sher
+cleveland
+carreen
+margaid
+phinehas
+justina
+wendi
+linus
+wenda
+matrona
+christiane
+wendy
+kerensa
+roch
+fergal
+fanny
+kamila
+oswin
+camilo
+everette
+katashi
+myron
+ridley
+shavonne
+blythe
+nader
+marlowe
+miha
+carolyn
+glenn
+gadar
+rainard
+sybella
+raquel
+rozabela
+serhat
+bashemath
+jing
+gobnet
+yentl
+sylvana
+dolores
+sanjit
+tamsin
+sanjiv
+innes
+daniela
+daniele
+margr
+keysha
+rogelio
+ean
+hj
+philipp
+valerian
+marge
+gail
+margh
+gaia
+engelbert
+kathie
+artemisia
+margo
+stefan
+pansy
+swanhilda
+swanhilde
+alessio
+beata
+beate
+babur
+beatrice
+eris
+erin
+maura
+camryn
+conan
+erik
+krysia
+nigelia
+mauri
+averill
+draco
+eric
+sophronius
+mauro
+diego
+simcha
+malachy
+barth
+maoilios
+germaine
+malachi
+katariina
+lianne
+ferdinando
+donagh
+kelemen
+taletta
+star
+gilah
+faustus
+lfwine
+rayna
+gotthard
+sa'd
+stan
+klemen
+pranay
+howie
+dewey
+tiarnan
+katherina
+uzma
+jabril
+hakan
+martin
+elsie
+cleve
+imani
+moshe
+padma
+inmaculada
+augustine
+trenton
+ghislain
+aiden
+alfhild
+ireneus
+gottschalk
+andra
+jahzeel
+andro
+fredrik
+wynter
+kohar
+tobin
+giustino
+buddy
+marcos
+mieszko
+giustina
+khalil
+aur
+helladius
+riccarda
+elettra
+glykeria
+yeva
+trahaearn
+ulisse
+wilfred
+sorrel
+saara
+ekwueme
+sarita
+finella
+waldo
+herbert
+elissa
+bevan
+lavern
+till
+ruxandra
+lavender
+ghalib
+eldon
+masterman
+tameka
+mihajlo
+mahin
+neo
+asim
+jordon
+pace
+ned
+giampiero
+asia
+nea
+haze
+bearach
+cheng
+pieter
+yonah
+chikako
+maverick
+fonsie
+ozzy
+meg
+mitxel
+filbert
+mel
+neves
+henrik
+mei
+hilaire
+drew
+deemer
+liborio
+dubhghlas
+bogdan
+dipak
+rapha
+golda
+maighread
+masha
+pranciskis
+mitchell
+titilayo
+aydin
+ippolit
+toiba
+omar
+cindy
+alexandrina
+lyubov
+hiltraud
+joshua
+moray
+baptiste
+bahiyya
+marquita
+benedicta
+reagan
+latifah
+scevola
+ardashir
+pakpao
+topaz
+janine
+omolara
+janina
+morag
+euripides
+lennart
+orb
+helmuth
+armo
+diederik
+lennard
+raeburn
+oscar
+odell
+ualan
+noemi
+melba
+berlin
+lazarus
+merla
+meera
+anastas
+rhamantus
+yussel
+meshullam
+esdras
+kumar
+flora
+norwood
+rio
+apollinaris
+oleg
+rim
+nadzeija
+akio
+akim
+efisio
+jayda
+olek
+rowanne
+honey
+karola
+chetana
+candelas
+friede
+phaedrus
+frieda
+joann
+braidy
+hitomi
+kieron
+dakarai
+teofil
+dervila
+ria
+pietrina
+becky
+alechjo
+santos
+egon
+olwin
+ove
+balthazar
+reeta
+becka
+tillo
+royce
+peninnah
+earnestine
+janis
+jakab
+janie
+rosalba
+hosanna
+aharon
+fife
+zacharias
+fifi
+aleesha
+murray
+helena
+helene
+rashmi
+afia
+oswald
+zachariah
+shawnee
+pius
+zdenek
+kichiro
+melchiorre
+erland
+yaroslava
+anushka
+cree
+iser
+rachel
+anik
+fabiola
+ania
+aneurin
+hernando
+ernesto
+ernesta
+astor
+manasseh
+naphtali
+shai
+lorena
+lazar
+luce
+lorenz
+luca
+briana
+rosemary
+dawid
+nava
+payton
+linos
+aida
+gunne
+milan
+tuomas
+sahar
+doug
+mikala
+dawn
+vincenza
+saturninus
+channah
+mandy
+reuven
+cormag
+cormac
+mandi
+sachie
+ladonna
+phuong
+tasha
+ramon
+hashim
+fachtna
+euphemia
+tisha
+jozafat
+horatius
+imke
+venus
+rodolf
+binyamin
+cosmin
+oluwafunmilayo
+nekane
+loup
+kohinoor
+teuvo
+xue
+innokenti
+vincenzo
+kiley
+isa
+hannibal
+vijay
+kornelia
+afanasy
+vittorio
+tuor
+adalia
+damayanti
+afanasi
+grady
+evangelos
+ermete
+brock
+bonita
+arisha
+pelagia
+solvej
+parthenope
+peggie
+kierra
+jozefa
+garry
+giuditta
+ladislas
+jozefo
+swietoslaw
+yildiz
+nasira
+eshe
+helen
+gretchen
+shekhar
+daren
+lenuta
+dymphna
+daina
+matteo
+berjouhi
+jerusha
+solomon
+gernot
+murtagh
+meaveen
+godwin
+ladislao
+minh
+hachiro
+farquhar
+ichabod
+mina
+caleb
+veera
+ginger
+ming
+jaynie
+sharyn
+seonag
+ferdie
+ilana
+gabriela
+gabriele
+lloren
+hooda
+mabelle
+timeus
+teagan
+gorka
+ulrich
+philadelphia
+razvan
+lamprecht
+marit
+kean
+marin
+mario
+rhonwen
+vilko
+konstantin
+tyr
+maria
+fastred
+kazuki
+krister
+don
+dom
+iekika
+ruben
+m
+calanthe
+luchjo
+vicki
+sheryl
+afanen
+kirabo
+dov
+kristel
+dot
+kristen
+pavao
+donelle
+antti
+donella
+katerina
+liza
+wladyslaw
+gerlach
+hrodohaidis
+samnang
+ashok
+raelyn
+tipene
+kallias
+kun
+gebhard
+folke
+katica
+lennie
+rupinder
+maryann
+adolphus
+lachtna
+petri
+monica
+kyriakos
+brannon
+deforest
+shankara
+hourig
+haniyya
+christopher
+griogair
+saturn
+tola
+earl
+decebal
+bas
+petra
+adelia
+cleto
+bao
+bal
+bai
+julien
+clarette
+dimitar
+fioralba
+tommie
+domhnall
+ragnhei
+gunnar
+ailill
+juliet
+pete
+vasya
+peta
+duff
+imaculada
+peti
+manola
+kolab
+petr
+neriah
+manolo
+edoardo
+onora
+elisud
+graciano
+fayza
+as'ad
+romola
+vernon
+pluto
+genevra
+yahweh
+mukesh
+fiacre
+sudarshana
+shahriar
+athanasius
+una
+casimir
+derval
+ernst
+sherilyn
+taranis
+enzo
+bedelia
+winnie
+kalyan
+jinan
+plamen
+quinn
+monat
+alcaeus
+mathieu
+aindri
+raffaella
+armin
+lovell
+cyrus
+chelo
+sidonius
+basia
+tina
+basil
+basim
+fuad
+riley
+tracee
+chun
+talia
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/__init__.py
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/__pkginfo__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/__pkginfo__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,18 @@
+# pylint: disable=W0622
+"""cubicweb i18n test cube application packaging information"""
+
+modname = 'i18ntestcube'
+distname = 'cubicweb-i18ntestcube'
+
+numversion = (0, 1, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = 'LGPL'
+author = 'LOGILAB S.A. (Paris, FRANCE)'
+author_email = 'contact@logilab.fr'
+description = 'forum'
+web = 'http://www.cubicweb.org/project/%s' % distname
+
+__depends__ = {'cubicweb': '>= 3.16.4',
+ }
+__recommends__ = {}
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/i18n/en.po.ref
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/i18n/en.po.ref Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,182 @@
+msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb 3.16.5\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team \n"
+"Language-Team: fr \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: cubicweb-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+# schema pot file, generated on 2013-07-12 16:18:12
+#
+# singular and plural forms for each entity type
+# subject and object forms for each relation type
+# (no object form for final or symmetric relation types)
+msgid "Forum"
+msgstr ""
+
+msgid "Forum_plural"
+msgstr ""
+
+msgid "This Forum"
+msgstr ""
+
+msgid "This Forum:"
+msgstr ""
+
+msgid "New Forum"
+msgstr ""
+
+msgctxt "inlined:Forum.in_forum.object"
+msgid "add a ForumThread"
+msgstr ""
+
+msgctxt "inlined:Forum.in_forum.object"
+msgid "ForumThread"
+msgstr ""
+
+msgid "add ForumThread in_forum Forum object"
+msgstr ""
+
+msgid "add a Forum"
+msgstr ""
+
+msgid "add a ForumThread"
+msgstr ""
+
+msgid "creating ForumThread (ForumThread in_forum Forum %(linkto)s)"
+msgstr ""
+
+msgid "ForumThread"
+msgstr ""
+
+msgid "ForumThread_plural"
+msgstr ""
+
+msgid "This ForumThread"
+msgstr ""
+
+msgid "This ForumThread:"
+msgstr ""
+
+msgid "New ForumThread"
+msgstr ""
+
+msgid "content"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "content"
+msgstr ""
+
+msgid "content_format"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "content_format"
+msgstr ""
+
+msgctxt "Forum"
+msgid "description"
+msgstr ""
+
+msgctxt "Forum"
+msgid "description_format"
+msgstr ""
+
+msgid "in_forum"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "in_forum"
+msgstr ""
+
+msgctxt "Forum"
+msgid "in_forum_object"
+msgstr ""
+
+msgid "in_forum_object"
+msgstr ""
+
+msgid "interested_in"
+msgstr ""
+
+msgctxt "CWUser"
+msgid "interested_in"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "interested_in_object"
+msgstr ""
+
+msgctxt "Forum"
+msgid "interested_in_object"
+msgstr ""
+
+msgid "interested_in_object"
+msgstr ""
+
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "Forum"
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "CWUser"
+msgid "nosy_list_object"
+msgstr ""
+
+msgid "nosy_list_object"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "title"
+msgstr ""
+
+msgid "topic"
+msgstr ""
+
+msgctxt "Forum"
+msgid "topic"
+msgstr ""
+
+msgid "Topic"
+msgstr ""
+
+msgid "Description"
+msgstr ""
+
+msgid "Number of threads"
+msgstr ""
+
+msgid "Last activity"
+msgstr ""
+
+msgid ""
+"a long\n"
+"tranlated line\n"
+"hop."
+msgstr ""
+
+msgid "Subject"
+msgstr ""
+
+msgid "Created"
+msgstr ""
+
+msgid "Answers"
+msgstr ""
+
+msgid "Last answered"
+msgstr ""
+
+msgid "This forum does not have any thread yet."
+msgstr ""
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""cubicweb-forum schema"""
+
+from yams.buildobjs import (String, RichString, EntityType,
+ RelationDefinition, SubjectRelation)
+from yams.reader import context
+
+class Forum(EntityType):
+ topic = String(maxsize=50, required=True, unique=True)
+ description = RichString()
+
+class ForumThread(EntityType):
+ __permissions__ = {
+ 'read': ('managers', 'users'),
+ 'add': ('managers', 'users'),
+ 'update': ('managers', 'owners'),
+ 'delete': ('managers', 'owners')
+ }
+ title = String(required=True, fulltextindexed=True, maxsize=256)
+ content = RichString(required=True, fulltextindexed=True)
+ in_forum = SubjectRelation('Forum', cardinality='1*', inlined=True,
+ composite='object')
+class interested_in(RelationDefinition):
+ subject = 'CWUser'
+ object = ('ForumThread', 'Forum')
+
+class nosy_list(RelationDefinition):
+ subject = ('Forum', 'ForumThread')
+ object = 'CWUser'
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/views.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/views.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,61 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""cubicweb-forum views/forms/actions/components for web ui"""
+
+from cubicweb import view
+from cubicweb.predicates import is_instance
+from cubicweb.web.views import primary, baseviews, uicfg
+from cubicweb.web.views.uicfg import autoform_section as afs
+
+class MyAFS(uicfg.AutoformSectionRelationTags):
+ __select__ = is_instance('ForumThread')
+
+_myafs = MyAFS()
+
+_myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
+
+afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
+
+
+class ForumSameETypeListView(baseviews.SameETypeListView):
+ __select__ = baseviews.SameETypeListView.__select__ & is_instance('Forum')
+
+ def call(self, **kwargs):
+ _ = self._cw._
+ _('Topic'), _('Description')
+ _('Number of threads'), _('Last activity')
+ _('''a long
+tranlated line
+hop.''')
+
+
+class ForumLastActivity(view.EntityView):
+ __regid__ = 'forum_last_activity'
+ __select__ = view.EntityView.__select__ & is_instance('Forum')
+
+
+class ForumPrimaryView(primary.PrimaryView):
+ __select__ = primary.PrimaryView.__select__ & is_instance('Forum')
+
+ def render_entity_attributes(self, entity):
+ _ = self._cw._
+ _('Subject'), _('Created'), _('Answers'),
+ _('Last answered')
+ _('This forum does not have any thread yet.')
+
+class ForumThreadPrimaryView(primary.PrimaryView):
+ __select__ = primary.PrimaryView.__select__ & is_instance('ForumThread')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,40 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""schema for cubicweb.devtools tests"""
+from yams.buildobjs import EntityType, SubjectRelation, String, RichString, Int, Date
+
+
+class Person(EntityType):
+ """a physical person"""
+ surname = String(required=True, fulltextindexed=True, indexed=True,
+ maxsize=64)
+ firstname = String(fulltextindexed=True, maxsize=64)
+ civility = String(required=True, internationalizable=True,
+ vocabulary=('Mr', 'Ms', 'Mrs'),
+ default='Mr')
+ description = RichString(fulltextindexed=True)
+ birthday = Date()
+
+
+class Bug(EntityType):
+ title = String(maxsize=64, required=True, fulltextindexed=True)
+ severity = String(vocabulary=('important', 'normal', 'minor'),
+ default='normal')
+ cost = Int()
+ description = String(maxsize=4096, fulltextindexed=True)
+ identical_to = SubjectRelation('Bug', symmetric=True)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/dep_1.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/dep_1.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+a = 4;
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/deps_2.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/deps_2.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+b = a +2;
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/test_simple_failure.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/test_simple_failure.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,18 @@
+$(document).ready(function() {
+
+ QUnit.module("air");
+
+ QUnit.test("test 1", function (assert) {
+ assert.equal(2, 4);
+ });
+
+ QUnit.test("test 2", function (assert) {
+ assert.equal('', '45');
+ assert.equal('1024', '32');
+ });
+
+ QUnit.module("able");
+ QUnit.test("test 3", function (assert) {
+ assert.deepEqual(1, 1);
+ });
+});
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/test_simple_success.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/test_simple_success.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,17 @@
+$(document).ready(function() {
+
+ QUnit.module("air");
+
+ QUnit.test("test 1", function (assert) {
+ assert.equal(2, 2);
+ });
+
+ QUnit.test("test 2", function (assert) {
+ assert.equal('45', '45');
+ });
+
+ QUnit.module("able");
+ QUnit.test("test 3", function (assert) {
+ assert.deepEqual(1, 1);
+ });
+});
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/test_with_dep.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/test_with_dep.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,9 @@
+$(document).ready(function() {
+
+ QUnit.module("air");
+
+ QUnit.test("test 1", function (assert) {
+ assert.equal(a, 4);
+ });
+
+});
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/test_with_ordered_deps.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/test_with_ordered_deps.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,9 @@
+$(document).ready(function() {
+
+ QUnit.module("air");
+
+ QUnit.test("test 1", function (assert) {
+ assert.equal(b, 6);
+ });
+
+});
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/data/static/js_examples/utils.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/static/js_examples/utils.js Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,29 @@
+function datetuple(d) {
+ return [d.getFullYear(), d.getMonth()+1, d.getDate(),
+ d.getHours(), d.getMinutes()];
+}
+
+function pprint(obj) {
+ print('{');
+ for(k in obj) {
+ print(' ' + k + ' = ' + obj[k]);
+ }
+ print('}');
+}
+
+function arrayrepr(array) {
+ return '[' + array.join(', ') + ']';
+}
+
+function assertArrayEquals(array1, array2) {
+ if (array1.length != array2.length) {
+ throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', '));
+ }
+ for (var i=0; i.
+"""only for unit tests !"""
+
+from cubicweb.view import EntityView
+from cubicweb.predicates import is_instance
+
+HTML_PAGE = u"""
+
+
Hello World !
+
+
+"""
+
+class SimpleView(EntityView):
+ __regid__ = 'simple'
+ __select__ = is_instance('Bug',)
+
+ def call(self, **kwargs):
+ self.cell_call(0, 0)
+
+ def cell_call(self, row, col):
+ self.w(HTML_PAGE)
+
+class RaisingView(EntityView):
+ __regid__ = 'raising'
+ __select__ = is_instance('Bug',)
+
+ def cell_call(self, row, col):
+ raise ValueError()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_dbfill.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_dbfill.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,121 @@
+# -*- coding: iso-8859-1 -*-
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for database value generator"""
+
+import os.path as osp
+import re
+import datetime
+import io
+
+from six.moves import range
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools.fill import ValueGenerator, make_tel
+from cubicweb.devtools import ApptestConfiguration
+
+DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data')
+ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$')
+
+
+class MyValueGenerator(ValueGenerator):
+
+ def generate_Bug_severity(self, entity, index):
+ return u'dangerous'
+
+ def generate_Any_description(self, entity, index, format=None):
+ return u'yo'
+
+
+class ValueGeneratorTC(TestCase):
+ """test case for ValueGenerator"""
+
+ def _choice_func(self, etype, attrname):
+ try:
+ return getattr(self, '_available_%s_%s' % (etype, attrname))(etype, attrname)
+ except AttributeError:
+ return None
+
+ def _available_Person_firstname(self, etype, attrname):
+ return [f.strip() for f in io.open(osp.join(DATADIR, 'firstnames.txt'), encoding='latin1')]
+
+ def setUp(self):
+ config = ApptestConfiguration('data', __file__)
+ config.bootstrap_cubes()
+ schema = config.load_schema()
+ e_schema = schema.eschema('Person')
+ self.person_valgen = ValueGenerator(e_schema, self._choice_func)
+ e_schema = schema.eschema('Bug')
+ self.bug_valgen = MyValueGenerator(e_schema)
+ self.config = config
+
+ def test_string(self):
+ """test string generation"""
+ surname = self.person_valgen.generate_attribute_value({}, 'surname', 12)
+ self.assertEqual(surname, u'&surname12')
+
+ def test_domain_value(self):
+ """test value generation from a given domain value"""
+ firstname = self.person_valgen.generate_attribute_value({}, 'firstname', 12)
+ possible_choices = self._choice_func('Person', 'firstname')
+ self.assertTrue(firstname in possible_choices,
+ '%s not in %s' % (firstname, possible_choices))
+
+ def test_choice(self):
+ """test choice generation"""
+ # Test for random index
+ for index in range(5):
+ sx_value = self.person_valgen.generate_attribute_value({}, 'civility', index)
+ self.assertTrue(sx_value in ('Mr', 'Mrs', 'Ms'))
+
+ def test_integer(self):
+ """test integer generation"""
+ # Test for random index
+ for index in range(5):
+ cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index)
+ self.assertIn(cost_value, list(range(index+1)))
+
+ def test_date(self):
+ """test date generation"""
+ # Test for random index
+ for index in range(10):
+ date_value = self.person_valgen.generate_attribute_value({}, 'birthday', index)
+ self.assertTrue(isinstance(date_value, datetime.date))
+
+ def test_phone(self):
+ """tests make_tel utility"""
+ self.assertEqual(make_tel(22030405), '22 03 04 05')
+
+ def test_customized_generation(self):
+ self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'severity', 12),
+ u'dangerous')
+ self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'description', 12),
+ u'yo')
+ self.assertEqual(self.person_valgen.generate_attribute_value({}, 'description', 12),
+ u'yo')
+
+
+class ConstraintInsertionTC(TestCase):
+
+ def test_writeme(self):
+ self.skipTest('Test automatic insertion / Schema Constraints')
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_devctl.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_devctl.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,129 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for cubicweb-ctl commands from devtools"""
+
+import os
+import os.path as osp
+import sys
+import tempfile
+import shutil
+from subprocess import Popen, PIPE, STDOUT, check_output
+from unittest import TestCase
+
+
+def newcube(directory, name):
+ cmd = ['cubicweb-ctl', 'newcube', '--directory', directory, name]
+ proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT)
+ stdout, _ = proc.communicate(b'short_desc\n')
+ return proc.returncode, stdout
+
+
+def to_unicode(msg):
+ return msg.decode(sys.getdefaultencoding(), errors='replace')
+
+
+class DevCtlTC(TestCase):
+ """Test case for devtools commands"""
+
+ if not hasattr(TestCase, 'assertItemsEqual'):
+ assertItemsEqual = TestCase.assertCountEqual
+
+ def test_newcube(self):
+ expected_project_content = ['setup.py', 'test', 'MANIFEST.in',
+ 'cubicweb_foo',
+ 'cubicweb-foo.spec', 'debian', 'README',
+ 'tox.ini']
+ expected_package_content = ['i18n', 'hooks.py', 'views.py',
+ 'migration', 'entities.py', 'schema.py',
+ '__init__.py', 'data', '__pkginfo__.py']
+ tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube")
+ try:
+ retcode, stdout = newcube(tmpdir, 'foo')
+ self.assertEqual(retcode, 0, msg=to_unicode(stdout))
+ project_dir = osp.join(tmpdir, 'cubicweb-foo')
+ project_content = os.listdir(project_dir)
+ package_dir = osp.join(project_dir, 'cubicweb_foo')
+ package_content = os.listdir(package_dir)
+ self.assertItemsEqual(project_content, expected_project_content)
+ self.assertItemsEqual(package_content, expected_package_content)
+ finally:
+ shutil.rmtree(tmpdir, ignore_errors=True)
+
+ def test_flake8(self):
+ """Ensure newcube built from skeleton is flake8-compliant"""
+ tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-flake8")
+ try:
+ newcube(tmpdir, 'foo')
+ cmd = [sys.executable, '-m', 'flake8',
+ osp.join(tmpdir, 'cubicweb-foo', 'cubicweb_foo')]
+ proc = Popen(cmd, stdout=PIPE, stderr=STDOUT)
+ retcode = proc.wait()
+ finally:
+ shutil.rmtree(tmpdir, ignore_errors=True)
+ self.assertEqual(retcode, 0,
+ msg=to_unicode(proc.stdout.read()))
+
+ def test_newcube_sdist(self):
+ """Ensure sdist can be built from a new cube"""
+ tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-sdist")
+ try:
+ newcube(tmpdir, 'foo')
+ projectdir = osp.join(tmpdir, 'cubicweb-foo')
+ cmd = [sys.executable, 'setup.py', 'sdist']
+ proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, cwd=projectdir)
+ retcode = proc.wait()
+ stdout = to_unicode(proc.stdout.read())
+ self.assertEqual(retcode, 0, stdout)
+ distfpath = osp.join(projectdir, 'dist', 'cubicweb-foo-0.1.0.tar.gz')
+ self.assertTrue(osp.isfile(distfpath))
+ finally:
+ shutil.rmtree(tmpdir, ignore_errors=True)
+
+ def test_newcube_install(self):
+ """Ensure a new cube can be installed"""
+ tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-install")
+ try:
+ newcube(tmpdir, 'foo')
+ projectdir = osp.join(tmpdir, 'cubicweb-foo')
+ env = os.environ.copy()
+ env['HOME'] = tmpdir
+ cmd = [sys.executable, 'setup.py', 'install', '--user']
+ proc = Popen(cmd, stdout=PIPE, stderr=STDOUT,
+ cwd=projectdir, env=env)
+ retcode = proc.wait()
+ stdout = to_unicode(proc.stdout.read())
+ self.assertEqual(retcode, 0, stdout)
+ targetdir = check_output([sys.executable, '-m', 'site', '--user-site'],
+ env=env, cwd=projectdir).strip()
+ target_egg = 'cubicweb_foo-0.1.0-py{0}.egg'.format(sys.version[:3]).encode()
+ self.assertTrue(osp.isdir(osp.join(targetdir, target_egg)),
+ 'target directory content: %s' % os.listdir(targetdir))
+ pkgdir = osp.join(targetdir, target_egg, b'cubicweb_foo')
+ self.assertTrue(osp.isdir(pkgdir),
+ os.listdir(osp.join(targetdir, target_egg)))
+ pkgcontent = [f for f in os.listdir(pkgdir) if f.endswith(b'.py')]
+ self.assertItemsEqual(pkgcontent,
+ [b'schema.py', b'entities.py', b'hooks.py', b'__init__.py',
+ b'__pkginfo__.py', b'views.py'])
+ finally:
+ shutil.rmtree(tmpdir, ignore_errors=True)
+
+
+if __name__ == '__main__':
+ from unittest import main
+ main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_fill.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_fill.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,70 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for cubicweb.devtools.fill module
+
+"""
+import re
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools.fill import ValueGenerator, _ValueGenerator
+
+ISODATE_SRE = re.compile('(?P\d{4})-(?P\d{2})-(?P\d{2})$')
+
+
+class AutoExtendableTC(TestCase):
+
+ def setUp(self):
+ self.attrvalues = dir(_ValueGenerator)
+
+ def tearDown(self):
+ attrvalues = set(dir(_ValueGenerator))
+ for attrname in attrvalues - set(self.attrvalues):
+ delattr(_ValueGenerator, attrname)
+
+
+ def test_autoextend(self):
+ self.assertNotIn('generate_server', dir(ValueGenerator))
+ class MyValueGenerator(ValueGenerator):
+ def generate_server(self, index):
+ return attrname
+ self.assertIn('generate_server', dir(ValueGenerator))
+
+
+ def test_bad_signature_detection(self):
+ self.assertNotIn('generate_server', dir(ValueGenerator))
+ try:
+ class MyValueGenerator(ValueGenerator):
+ def generate_server(self):
+ pass
+ except TypeError:
+ self.assertNotIn('generate_server', dir(ValueGenerator))
+ else:
+ self.fail('TypeError not raised')
+
+
+ def test_signature_extension(self):
+ self.assertNotIn('generate_server', dir(ValueGenerator))
+ class MyValueGenerator(ValueGenerator):
+ def generate_server(self, index, foo):
+ pass
+ self.assertIn('generate_server', dir(ValueGenerator))
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_httptest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_httptest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,108 @@
+# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unittest for cubicweb.devtools.httptest module"""
+
+from six.moves import http_client
+
+from logilab.common.testlib import Tags
+from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebWsgiTC
+
+
+class TwistedCWAnonTC(CubicWebServerTC):
+
+ def test_response(self):
+ try:
+ response = self.web_get()
+ except http_client.NotConnected as ex:
+ self.fail("Can't connection to test server: %s" % ex)
+
+ def test_response_anon(self):
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.OK)
+
+ def test_base_url(self):
+ if self.config['base-url'] not in self.web_get().read().decode('ascii'):
+ self.fail('no mention of base url in retrieved page')
+
+
+class TwistedCWIdentTC(CubicWebServerTC):
+ test_db_id = 'httptest-cwident'
+ anonymous_allowed = False
+ tags = CubicWebServerTC.tags | Tags(('auth',))
+
+ def test_response_denied(self):
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.FORBIDDEN)
+
+ def test_login(self):
+ response = self.web_get()
+ if response.status != http_client.FORBIDDEN:
+ self.skipTest('Already authenticated, "test_response_denied" must have failed')
+ # login
+ self.web_login(self.admlogin, self.admpassword)
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.OK, response.body)
+ # logout
+ self.web_logout()
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.FORBIDDEN, response.body)
+
+
+class WsgiCWAnonTC(CubicWebWsgiTC):
+
+ def test_response(self):
+ try:
+ response = self.web_get()
+ except http_client.NotConnected as ex:
+ self.fail("Can't connection to test server: %s" % ex)
+
+ def test_response_anon(self):
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.OK)
+
+ def test_base_url(self):
+ if self.config['base-url'] not in self.web_get().read().decode('ascii'):
+ self.fail('no mention of base url in retrieved page')
+
+
+class WsgiCWIdentTC(CubicWebWsgiTC):
+ test_db_id = 'httptest-cwident'
+ anonymous_allowed = False
+ tags = CubicWebServerTC.tags | Tags(('auth',))
+
+ def test_response_denied(self):
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.FORBIDDEN)
+
+ def test_login(self):
+ response = self.web_get()
+ if response.status != http_client.FORBIDDEN:
+ self.skipTest('Already authenticated, "test_response_denied" must have failed')
+ # login
+ self.web_login(self.admlogin, self.admpassword)
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.OK, response.body)
+ # logout
+ self.web_logout()
+ response = self.web_get()
+ self.assertEqual(response.status, http_client.FORBIDDEN, response.body)
+
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_i18n.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_i18n.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,91 @@
+# -*- coding: iso-8859-1 -*-
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for i18n messages generator"""
+
+import os
+import os.path as osp
+import sys
+from subprocess import PIPE, Popen, STDOUT
+
+from unittest import TestCase, main
+
+
+DATADIR = osp.join(osp.abspath(osp.dirname(__file__)), 'data')
+
+
+def load_po(fname):
+ """load a po file and return a set of encountered (msgid, msgctx)"""
+ msgs = set()
+ msgid = msgctxt = None
+ with open(fname) as fobj:
+ for line in fobj:
+ if line.strip() in ('', '#'):
+ continue
+ if line.startswith('msgstr'):
+ assert not (msgid, msgctxt) in msgs
+ msgs.add((msgid, msgctxt))
+ msgid = msgctxt = None
+ elif line.startswith('msgid'):
+ msgid = line.split(' ', 1)[1][1:-1]
+ elif line.startswith('msgctx'):
+ msgctxt = line.split(' ', 1)[1][1: -1]
+ elif msgid is not None:
+ msgid += line[1:-1]
+ elif msgctxt is not None:
+ msgctxt += line[1:-1]
+ return msgs
+
+
+class cubePotGeneratorTC(TestCase):
+ """test case for i18n pot file generator"""
+
+ def test_i18ncube(self):
+ env = os.environ.copy()
+ if 'PYTHONPATH' in env:
+ env['PYTHONPATH'] += os.pathsep
+ else:
+ env['PYTHONPATH'] = ''
+ env['PYTHONPATH'] += osp.join(DATADIR, 'libpython')
+ cubedir = osp.join(DATADIR, 'libpython', 'cubicweb_i18ntestcube')
+ self._check(cubedir, env)
+
+ def test_i18ncube_legacy_layout(self):
+ env = os.environ.copy()
+ env['CW_CUBES_PATH'] = osp.join(DATADIR, 'cubes')
+ if 'PYTHONPATH' in env:
+ env['PYTHONPATH'] += os.pathsep
+ else:
+ env['PYTHONPATH'] = ''
+ env['PYTHONPATH'] += DATADIR
+ cubedir = osp.join(DATADIR, 'cubes', 'i18ntestcube')
+ self._check(cubedir, env)
+
+ def _check(self, cubedir, env):
+ cmd = [sys.executable, '-m', 'cubicweb', 'i18ncube', 'i18ntestcube']
+ proc = Popen(cmd, env=env, stdout=PIPE, stderr=STDOUT)
+ stdout, _ = proc.communicate()
+ msg = stdout.decode(sys.getdefaultencoding(), errors='replace')
+ self.assertEqual(proc.returncode, 0, msg=msg)
+ msgs = load_po(osp.join(cubedir, 'i18n', 'en.po.ref'))
+ newmsgs = load_po(osp.join(cubedir, 'i18n', 'en.po'))
+ self.assertEqual(msgs, newmsgs)
+
+
+if __name__ == '__main__':
+ main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_qunit.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_qunit.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,27 @@
+from cubicweb.devtools import qunit
+
+
+def js(name):
+ return '/static/js_examples/' + name
+
+class QUnitTestCaseTC(qunit.QUnitTestCase):
+
+ all_js_tests = (
+ (js('test_simple_success.js'),),
+ (js('test_with_dep.js'), (js('dep_1.js'),)),
+ (js('test_with_ordered_deps.js'), (js('dep_1.js'), js('deps_2.js'),)),
+ )
+
+
+ def test_simple_failure(self):
+ js_tests = list(self._test_qunit(js('test_simple_failure.js')))
+ self.assertEqual(len(js_tests), 3)
+ test_1, test_2, test_3 = js_tests
+ self.assertRaises(self.failureException, test_1[1], *test_1[2:])
+ self.assertRaises(self.failureException, test_2[1], *test_2[2:])
+ test_3[1](*test_3[2:])
+
+
+if __name__ == '__main__':
+ from unittest import main
+ main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_testlib.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_testlib.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,298 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unittests for cw.devtools.testlib module"""
+
+from io import BytesIO, StringIO
+from unittest import TextTestRunner
+
+from six import PY2
+
+from logilab.common.testlib import TestSuite, TestCase, unittest_main
+from logilab.common.registry import yes
+
+from cubicweb.devtools import htmlparser
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.pytestconf import clean_repo_test_cls
+
+class FakeFormTC(TestCase):
+ def test_fake_form(self):
+ class entity:
+ cw_etype = 'Entity'
+ eid = 0
+ sio = BytesIO(b'hop\n')
+ form = CubicWebTC.fake_form('import',
+ {'file': ('filename.txt', sio),
+ 'encoding': u'utf-8',
+ }, [(entity, {'field': 'value'})])
+ self.assertEqual(form, {'__form_id': 'import',
+ '__maineid': 0,
+ '__type:0': 'Entity',
+ '_cw_entity_fields:0': '__type,field',
+ '_cw_fields': 'encoding,file',
+ 'eid': [0],
+ 'encoding': u'utf-8',
+ 'field:0': 'value',
+ 'file': ('filename.txt', sio)})
+
+class WebTestTC(TestCase):
+
+ def setUp(self):
+ output = BytesIO() if PY2 else StringIO()
+ self.runner = TextTestRunner(stream=output)
+
+ def test_error_raised(self):
+ class MyWebTest(CubicWebTC):
+
+ def test_error_view(self):
+ with self.admin_access.web_request() as req:
+ req.create_entity('Bug', title=u"bt")
+ self.view('raising', req.execute('Bug B'), template=None, req=req)
+
+ def test_correct_view(self):
+ with self.admin_access.web_request() as req:
+ self.view('primary', req.execute('CWUser U'), template=None, req=req)
+
+ tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')]
+ result = self.runner.run(TestSuite(tests))
+ self.assertEqual(result.testsRun, 2)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.failures), 1)
+ clean_repo_test_cls(MyWebTest)
+
+
+class RepoInstancesConsistencyTC(CubicWebTC):
+ test_db_id = 'RepoInstancesConsistencyTC'
+
+ def pre_setup_database(self, cnx, config):
+ self.assertIs(cnx.repo, config.repository())
+
+ def test_pre_setup(self):
+ pass
+
+
+HTML_PAGE = u"""
+
+ need a title
+
+
Hello World !
+
+
+"""
+
+HTML_PAGE2 = u"""
+
+ need a title
+
+
+
+
+"""
+
+
+class HTMLPageInfoTC(TestCase):
+ """test cases for PageInfo"""
+
+ def setUp(self):
+ parser = htmlparser.HTMLValidator()
+ # disable cleanup that would remove doctype
+ parser.preprocess_data = lambda data: data
+ self.page_info = parser.parse_string(HTML_PAGE2)
+
+ def test_source1(self):
+ """make sure source is stored correctly"""
+ self.assertEqual(self.page_info.source, HTML_PAGE2)
+
+ def test_source2(self):
+ """make sure source is stored correctly - raise exception"""
+ parser = htmlparser.DTDValidator()
+ self.assertRaises(AssertionError, parser.parse_string, HTML_PAGE_ERROR)
+
+ def test_has_title_no_level(self):
+ """tests h? tags information"""
+ self.assertEqual(self.page_info.has_title('Test'), True)
+ self.assertEqual(self.page_info.has_title('Test '), False)
+ self.assertEqual(self.page_info.has_title('Tes'), False)
+ self.assertEqual(self.page_info.has_title('Hello world !'), True)
+
+ def test_has_title_level(self):
+ """tests h? tags information"""
+ self.assertEqual(self.page_info.has_title('Test', level = 1), True)
+ self.assertEqual(self.page_info.has_title('Test', level = 2), False)
+ self.assertEqual(self.page_info.has_title('Test', level = 3), False)
+ self.assertEqual(self.page_info.has_title('Test', level = 4), False)
+ self.assertRaises(IndexError, self.page_info.has_title, 'Test', level = 5)
+
+ def test_has_title_regexp_no_level(self):
+ """tests has_title_regexp() with no particular level specified"""
+ self.assertEqual(self.page_info.has_title_regexp('h[23] title'), True)
+
+ def test_has_title_regexp_level(self):
+ """tests has_title_regexp() with a particular level specified"""
+ self.assertEqual(self.page_info.has_title_regexp('h[23] title', 2), True)
+ self.assertEqual(self.page_info.has_title_regexp('h[23] title', 3), True)
+ self.assertEqual(self.page_info.has_title_regexp('h[23] title', 4), False)
+
+ def test_appears(self):
+ """tests PageInfo.appears()"""
+ self.assertEqual(self.page_info.appears('CW'), True)
+ self.assertEqual(self.page_info.appears('Logilab'), True)
+ self.assertEqual(self.page_info.appears('Logilab introduces'), True)
+ self.assertEqual(self.page_info.appears('H2 title'), False)
+
+ def test_has_link(self):
+ """tests has_link()"""
+ self.assertEqual(self.page_info.has_link('Logilab'), True)
+ self.assertEqual(self.page_info.has_link('logilab'), False)
+ self.assertEqual(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True)
+ self.assertEqual(self.page_info.has_link('Logilab', 'http://www.google.com'), False)
+
+ def test_has_link_regexp(self):
+ """test has_link_regexp()"""
+ self.assertEqual(self.page_info.has_link_regexp('L[oi]gilab'), True)
+ self.assertEqual(self.page_info.has_link_regexp('L[ai]gilab'), False)
+
+
+class CWUtilitiesTC(CubicWebTC):
+
+ def test_temporary_permissions_eschema(self):
+ eschema = self.schema['CWUser']
+ with self.temporary_permissions(CWUser={'read': ()}):
+ self.assertEqual(eschema.permissions['read'], ())
+ self.assertTrue(eschema.permissions['add'])
+ self.assertTrue(eschema.permissions['read'], ())
+
+ def test_temporary_permissions_rdef(self):
+ rdef = self.schema['CWUser'].rdef('in_group')
+ with self.temporary_permissions((rdef, {'read': ()})):
+ self.assertEqual(rdef.permissions['read'], ())
+ self.assertTrue(rdef.permissions['add'])
+ self.assertTrue(rdef.permissions['read'], ())
+
+ def test_temporary_permissions_rdef_with_exception(self):
+ rdef = self.schema['CWUser'].rdef('in_group')
+ try:
+ with self.temporary_permissions((rdef, {'read': ()})):
+ self.assertEqual(rdef.permissions['read'], ())
+ self.assertTrue(rdef.permissions['add'])
+ raise ValueError('goto')
+ except ValueError:
+ self.assertTrue(rdef.permissions['read'], ())
+ else:
+ self.fail('exception was caught unexpectedly')
+
+ def test_temporary_appobjects_registered(self):
+
+ class AnAppobject(object):
+ __registries__ = ('hip',)
+ __regid__ = 'hop'
+ __select__ = yes()
+ registered = None
+
+ @classmethod
+ def __registered__(cls, reg):
+ cls.registered = reg
+
+ with self.temporary_appobjects(AnAppobject):
+ self.assertEqual(self.vreg['hip'], AnAppobject.registered)
+ self.assertIn(AnAppobject, self.vreg['hip']['hop'])
+ self.assertNotIn(AnAppobject, self.vreg['hip']['hop'])
+
+ def test_login(self):
+ """Calling login should not break hook control"""
+ with self.admin_access.repo_cnx() as cnx:
+ self.hook_executed = False
+ self.create_user(cnx, 'babar')
+ cnx.commit()
+
+ from cubicweb.server import hook
+ from cubicweb.predicates import is_instance
+
+ class MyHook(hook.Hook):
+ __regid__ = 'whatever'
+ __select__ = hook.Hook.__select__ & is_instance('CWProperty')
+ category = 'test-hook'
+ events = ('after_add_entity',)
+ test = self
+
+ def __call__(self):
+ self.test.hook_executed = True
+
+ with self.new_access('babar').repo_cnx() as cnx:
+ with self.temporary_appobjects(MyHook):
+ with cnx.allow_all_hooks_but('test-hook'):
+ prop = cnx.create_entity('CWProperty', pkey=u'ui.language', value=u'en')
+ cnx.commit()
+ self.assertFalse(self.hook_executed)
+
+
+class RepoAccessTC(CubicWebTC):
+
+ def test_repo_connection(self):
+ acc = self.new_access('admin')
+ with acc.repo_cnx() as cnx:
+ rset = cnx.execute('Any X WHERE X is CWUser')
+ self.assertTrue(rset)
+
+ def test_client_connection(self):
+ acc = self.new_access('admin')
+ with acc.client_cnx() as cnx:
+ rset = cnx.execute('Any X WHERE X is CWUser')
+ self.assertTrue(rset)
+
+ def test_web_request(self):
+ acc = self.new_access('admin')
+ with acc.web_request(elephant='babar') as req:
+ rset = req.execute('Any X WHERE X is CWUser')
+ self.assertTrue(rset)
+ self.assertEqual('babar', req.form['elephant'])
+
+ def test_close(self):
+ acc = self.new_access('admin')
+ acc.close()
+
+ def test_admin_access(self):
+ with self.admin_access.client_cnx() as cnx:
+ self.assertEqual('admin', cnx.user.login)
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/test/unittest_webtest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/unittest_webtest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,41 @@
+from six.moves import http_client
+
+from logilab.common.testlib import Tags
+from cubicweb.devtools.webtest import CubicWebTestTC
+
+
+class CWTTC(CubicWebTestTC):
+ def test_response(self):
+ response = self.webapp.get('/')
+ self.assertEqual(200, response.status_int)
+
+ def test_base_url(self):
+ if self.config['base-url'] not in self.webapp.get('/').text:
+ self.fail('no mention of base url in retrieved page')
+
+
+class CWTIdentTC(CubicWebTestTC):
+ test_db_id = 'webtest-ident'
+ anonymous_allowed = False
+ tags = CubicWebTestTC.tags | Tags(('auth',))
+
+ def test_reponse_denied(self):
+ res = self.webapp.get('/', expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
+
+ def test_login(self):
+ res = self.webapp.get('/', expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
+
+ self.login(self.admlogin, self.admpassword)
+ res = self.webapp.get('/')
+ self.assertEqual(http_client.OK, res.status_int)
+
+ self.logout()
+ res = self.webapp.get('/', expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/testlib.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/testlib.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1356 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""this module contains base classes and utilities for cubicweb tests"""
+from __future__ import print_function
+
+import sys
+import re
+import warnings
+from os.path import dirname, join, abspath
+from math import log
+from contextlib import contextmanager
+from inspect import isgeneratorfunction
+from itertools import chain
+
+from six import text_type, string_types
+from six.moves import range
+from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote
+
+import yams.schema
+
+from logilab.common.testlib import Tags, nocoverage
+from logilab.common.debugger import Debugger
+from logilab.common.umessage import message_from_string
+from logilab.common.decorators import cached, classproperty, clear_cache, iclassmethod
+from logilab.common.deprecation import deprecated, class_deprecated
+from logilab.common.shellutils import getlogin
+
+from cubicweb import (ValidationError, NoSelectableObject, AuthenticationError,
+ BadConnectionId)
+from cubicweb import cwconfig, devtools, web, server, repoapi
+from cubicweb.utils import json
+from cubicweb.sobjects import notification
+from cubicweb.web import Redirect, application, eid_param
+from cubicweb.server.hook import SendMailOp
+from cubicweb.server.session import Session
+from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
+from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID
+
+
+if sys.version_info[:2] < (3, 4):
+ from unittest2 import TestCase
+ if not hasattr(TestCase, 'subTest'):
+ raise ImportError('no subTest support in available unittest2')
+else:
+ from unittest import TestCase
+
+# in python 2.7, DeprecationWarning are not shown anymore by default
+warnings.filterwarnings('default', category=DeprecationWarning)
+
+
+# provide a data directory for the test class ##################################
+
+class BaseTestCase(TestCase):
+
+ @classproperty
+ @cached
+ def datadir(cls): # pylint: disable=E0213
+ """helper attribute holding the standard test's data directory
+ """
+ mod = sys.modules[cls.__module__]
+ return join(dirname(abspath(mod.__file__)), 'data')
+ # cache it (use a class method to cache on class since TestCase is
+ # instantiated for each test run)
+
+ @classmethod
+ def datapath(cls, *fname):
+ """joins the object's datadir and `fname`"""
+ return join(cls.datadir, *fname)
+
+
+if hasattr(BaseTestCase, 'assertItemsEqual'):
+ BaseTestCase.assertCountEqual = BaseTestCase.assertItemsEqual
+
+
+# low-level utilities ##########################################################
+
+class CubicWebDebugger(Debugger):
+ """special debugger class providing a 'view' function which saves some
+ html into a temporary file and open a web browser to examinate it.
+ """
+ def do_view(self, arg):
+ import webbrowser
+ data = self._getval(arg)
+ with open('/tmp/toto.html', 'w') as toto:
+ toto.write(data)
+ webbrowser.open('file:///tmp/toto.html')
+
+
+def line_context_filter(line_no, center, before=3, after=None):
+ """return true if line are in context
+
+ if after is None: after = before
+ """
+ if after is None:
+ after = before
+ return center - before <= line_no <= center + after
+
+
+def unprotected_entities(schema, strict=False):
+ """returned a set of each non final entity type, excluding "system" entities
+ (eg CWGroup, CWUser...)
+ """
+ if strict:
+ protected_entities = yams.schema.BASE_TYPES
+ else:
+ protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES)
+ return set(schema.entities()) - protected_entities
+
+
+class JsonValidator(object):
+ def parse_string(self, data):
+ return json.loads(data.decode('ascii'))
+
+
+@contextmanager
+def real_error_handling(app):
+ """By default, CubicWebTC `app` attribute (ie the publisher) is monkey
+ patched so that unexpected error are raised rather than going through the
+ `error_handler` method.
+
+ By using this context manager you disable this monkey-patching temporarily.
+ Hence when publishihng a request no error will be raised, you'll get
+ req.status_out set to an HTTP error status code and the generated page will
+ usually hold a traceback as HTML.
+
+ >>> with real_error_handling(app):
+ >>> page = app.handle_request(req)
+ """
+ # remove the monkey patched error handler
+ fake_error_handler = app.error_handler
+ del app.error_handler
+ # return the app
+ yield app
+ # restore
+ app.error_handler = fake_error_handler
+
+
+# email handling, to test emails sent by an application ########################
+
+MAILBOX = []
+
+
+class Email(object):
+ """you'll get instances of Email into MAILBOX during tests that trigger
+ some notification.
+
+ * `msg` is the original message object
+
+ * `recipients` is a list of email address which are the recipients of this
+ message
+ """
+ def __init__(self, fromaddr, recipients, msg):
+ self.fromaddr = fromaddr
+ self.recipients = recipients
+ self.msg = msg
+
+ @property
+ def message(self):
+ return message_from_string(self.msg)
+
+ @property
+ def subject(self):
+ return self.message.get('Subject')
+
+ @property
+ def content(self):
+ return self.message.get_payload(decode=True)
+
+ def __repr__(self):
+ return '' % (','.join(self.recipients),
+ self.message.get('Subject'))
+
+
+# the trick to get email into MAILBOX instead of actually sent: monkey patch
+# cwconfig.SMTP object
+class MockSMTP:
+
+ def __init__(self, server, port):
+ pass
+
+ def close(self):
+ pass
+
+ def sendmail(self, fromaddr, recipients, msg):
+ MAILBOX.append(Email(fromaddr, recipients, msg))
+
+cwconfig.SMTP = MockSMTP
+
+
+# Repoaccess utility ###############################################3###########
+
+class RepoAccess(object):
+ """An helper to easily create object to access the repo as a specific user
+
+ Each RepoAccess have it own session.
+
+ A repo access can create three type of object:
+
+ .. automethod:: cubicweb.testlib.RepoAccess.cnx
+ .. automethod:: cubicweb.testlib.RepoAccess.web_request
+
+ The RepoAccess need to be closed to destroy the associated Session.
+ TestCase usually take care of this aspect for the user.
+
+ .. automethod:: cubicweb.testlib.RepoAccess.close
+ """
+
+ def __init__(self, repo, login, requestcls):
+ self._repo = repo
+ self._login = login
+ self.requestcls = requestcls
+ self._session = self._unsafe_connect(login)
+
+ def _unsafe_connect(self, login, **kwargs):
+ """ a completely unsafe connect method for the tests """
+ # use an internal connection
+ with self._repo.internal_cnx() as cnx:
+ # try to get a user object
+ user = cnx.find('CWUser', login=login).one()
+ user.groups
+ user.properties
+ user.login
+ session = Session(user, self._repo)
+ self._repo._sessions[session.sessionid] = session
+ user._cw = user.cw_rset.req = session
+ with session.new_cnx() as cnx:
+ self._repo.hm.call_hooks('session_open', cnx)
+ # commit connection at this point in case write operation has been
+ # done during `session_open` hooks
+ cnx.commit()
+ return session
+
+ @contextmanager
+ def cnx(self):
+ """Context manager returning a server side connection for the user"""
+ with self._session.new_cnx() as cnx:
+ yield cnx
+
+ # aliases for bw compat
+ client_cnx = repo_cnx = cnx
+
+ @contextmanager
+ def web_request(self, url=None, headers={}, method='GET', **kwargs):
+ """Context manager returning a web request pre-linked to a client cnx
+
+ To commit and rollback use::
+
+ req.cnx.commit()
+ req.cnx.rolback()
+ """
+ req = self.requestcls(self._repo.vreg, url=url, headers=headers,
+ method=method, form=kwargs)
+ with self._session.new_cnx() as cnx:
+ req.set_cnx(cnx)
+ yield req
+
+ def close(self):
+ """Close the session associated to the RepoAccess"""
+ self._session.close()
+
+ @contextmanager
+ def shell(self):
+ from cubicweb.server.migractions import ServerMigrationHelper
+ with self._session.new_cnx() as cnx:
+ mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx,
+ interactive=False,
+ # hack so it don't try to load fs schema
+ schema=1)
+ yield mih
+ cnx.commit()
+
+
+# base class for cubicweb tests requiring a full cw environments ###############
+
+class CubicWebTC(BaseTestCase):
+ """abstract class for test using an apptest environment
+
+ attributes:
+
+ * `vreg`, the vregistry
+ * `schema`, self.vreg.schema
+ * `config`, cubicweb configuration
+ * `cnx`, repoapi connection to the repository using an admin user
+ * `session`, server side session associated to `cnx`
+ * `app`, the cubicweb publisher (for web testing)
+ * `repo`, the repository object
+ * `admlogin`, login of the admin user
+ * `admpassword`, password of the admin user
+ * `shell`, create and use shell environment
+ * `anonymous_allowed`: flag telling if anonymous browsing should be allowed
+ """
+ appid = 'data'
+ configcls = devtools.ApptestConfiguration
+ requestcls = fake.FakeRequest
+ tags = Tags('cubicweb', 'cw_repo')
+ test_db_id = DEFAULT_EMPTY_DB_ID
+
+ # anonymous is logged by default in cubicweb test cases
+ anonymous_allowed = True
+
+ @classmethod
+ def setUpClass(cls):
+ test_module_file = sys.modules[cls.__module__].__file__
+ assert 'config' not in cls.__dict__, (
+ '%s has a config class attribute before entering setUpClass. '
+ 'Let CubicWebTC.setUpClass instantiate it and modify it afterwards.' % cls)
+ cls.config = cls.configcls(cls.appid, test_module_file)
+ cls.config.mode = 'test'
+
+ def __init__(self, *args, **kwargs):
+ self._admin_session = None
+ self.repo = None
+ self._open_access = set()
+ super(CubicWebTC, self).__init__(*args, **kwargs)
+
+ def run(self, *args, **kwds):
+ testMethod = getattr(self, self._testMethodName)
+ if isgeneratorfunction(testMethod):
+ raise RuntimeError(
+ '%s appears to be a generative test. This is not handled '
+ 'anymore, use subTest API instead.' % self)
+ return super(CubicWebTC, self).run(*args, **kwds)
+
+ # repository connection handling ###########################################
+
+ def new_access(self, login):
+ """provide a new RepoAccess object for a given user
+
+ The access is automatically closed at the end of the test."""
+ login = text_type(login)
+ access = RepoAccess(self.repo, login, self.requestcls)
+ self._open_access.add(access)
+ return access
+
+ def _close_access(self):
+ while self._open_access:
+ try:
+ self._open_access.pop().close()
+ except BadConnectionId:
+ continue # already closed
+
+ @property
+ def session(self):
+ """return admin session"""
+ return self._admin_session
+
+ def _init_repo(self):
+ """init the repository and connection to it.
+ """
+ # get or restore and working db.
+ db_handler = devtools.get_test_db_handler(self.config, self.init_config)
+ db_handler.build_db_cache(self.test_db_id, self.pre_setup_database)
+ db_handler.restore_database(self.test_db_id)
+ self.repo = db_handler.get_repo(startup=True)
+ # get an admin session (without actual login)
+ login = text_type(db_handler.config.default_admin_config['login'])
+ self.admin_access = self.new_access(login)
+ self._admin_session = self.admin_access._session
+
+ # config management ########################################################
+
+ @classmethod # XXX could be turned into a regular method
+ def init_config(cls, config):
+ """configuration initialization hooks.
+
+ You may only want to override here the configuraton logic.
+
+ Otherwise, consider to use a different :class:`ApptestConfiguration`
+ defined in the `configcls` class attribute.
+
+ This method will be called by the database handler once the config has
+ been properly bootstrapped.
+ """
+ admincfg = config.default_admin_config
+ cls.admlogin = text_type(admincfg['login'])
+ cls.admpassword = admincfg['password']
+ # uncomment the line below if you want rql queries to be logged
+ # config.global_set_option('query-log-file',
+ # '/tmp/test_rql_log.' + `os.getpid()`)
+ config.global_set_option('log-file', None)
+ # set default-dest-addrs to a dumb email address to avoid mailbox or
+ # mail queue pollution
+ config.global_set_option('default-dest-addrs', ['whatever'])
+ send_to = '%s@logilab.fr' % getlogin()
+ config.global_set_option('sender-addr', send_to)
+ config.global_set_option('default-dest-addrs', send_to)
+ config.global_set_option('sender-name', 'cubicweb-test')
+ config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr')
+ # default_base_url on config class isn't enough for TestServerConfiguration
+ config.global_set_option('base-url', config.default_base_url())
+ # web resources
+ try:
+ config.global_set_option('embed-allowed', re.compile('.*'))
+ except Exception: # not in server only configuration
+ pass
+
+ @property
+ def vreg(self):
+ return self.repo.vreg
+
+ # global resources accessors ###############################################
+
+ @property
+ def schema(self):
+ """return the application schema"""
+ return self.vreg.schema
+
+ def set_option(self, optname, value):
+ self.config.global_set_option(optname, value)
+
+ def set_debug(self, debugmode):
+ server.set_debug(debugmode)
+
+ def debugged(self, debugmode):
+ return server.debugged(debugmode)
+
+ # default test setup and teardown #########################################
+
+ def setUp(self):
+ assert hasattr(self, 'config'), (
+ 'It seems that CubicWebTC.setUpClass has not been called. '
+ 'Missing super() call in %s?' % self.setUpClass)
+ # monkey patch send mail operation so emails are sent synchronously
+ self._patch_SendMailOp()
+ previous_failure = self.__class__.__dict__.get('_repo_init_failed')
+ if previous_failure is not None:
+ self.skipTest('repository is not initialised: %r' % previous_failure)
+ try:
+ self._init_repo()
+ except Exception as ex:
+ self.__class__._repo_init_failed = ex
+ raise
+ self.addCleanup(self._close_access)
+ self.config.set_anonymous_allowed(self.anonymous_allowed)
+ self.setup_database()
+ MAILBOX[:] = [] # reset mailbox
+
+ def tearDown(self):
+ # XXX hack until logilab.common.testlib is fixed
+ if self._admin_session is not None:
+ self._admin_session.close()
+ self._admin_session = None
+ while self._cleanups:
+ cleanup, args, kwargs = self._cleanups.pop(-1)
+ cleanup(*args, **kwargs)
+ self.repo.turn_repo_off()
+
+ def _patch_SendMailOp(self):
+ # monkey patch send mail operation so emails are sent synchronously
+ _old_mail_postcommit_event = SendMailOp.postcommit_event
+ SendMailOp.postcommit_event = SendMailOp.sendmails
+
+ def reverse_SendMailOp_monkey_patch():
+ SendMailOp.postcommit_event = _old_mail_postcommit_event
+
+ self.addCleanup(reverse_SendMailOp_monkey_patch)
+
+ def setup_database(self):
+ """add your database setup code by overriding this method"""
+
+ @classmethod
+ def pre_setup_database(cls, cnx, config):
+ """add your pre database setup code by overriding this method
+
+ Do not forget to set the cls.test_db_id value to enable caching of the
+ result.
+ """
+
+ # user / session management ###############################################
+
+ @deprecated('[3.19] explicitly use RepoAccess object in test instead')
+ def user(self, req=None):
+ """return the application schema"""
+ if req is None:
+ return self.request().user
+ else:
+ return req.user
+
+ @iclassmethod # XXX turn into a class method
+ def create_user(self, req, login=None, groups=('users',), password=None,
+ email=None, commit=True, **kwargs):
+ """create and return a new user entity"""
+ if password is None:
+ password = login
+ if login is not None:
+ login = text_type(login)
+ user = req.create_entity('CWUser', login=login,
+ upassword=password, **kwargs)
+ req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
+ % ','.join(repr(str(g)) for g in groups),
+ {'x': user.eid})
+ if email is not None:
+ req.create_entity('EmailAddress', address=text_type(email),
+ reverse_primary_email=user)
+ user.cw_clear_relation_cache('in_group', 'subject')
+ if commit:
+ try:
+ req.commit() # req is a session
+ except AttributeError:
+ req.cnx.commit()
+ return user
+
+ # other utilities #########################################################
+
+ @contextmanager
+ def temporary_appobjects(self, *appobjects):
+ self.vreg._loadedmods.setdefault(self.__module__, {})
+ for obj in appobjects:
+ self.vreg.register(obj)
+ registered = getattr(obj, '__registered__', None)
+ if registered:
+ for registry in obj.__registries__:
+ registered(self.vreg[registry])
+ try:
+ yield
+ finally:
+ for obj in appobjects:
+ self.vreg.unregister(obj)
+
+ @contextmanager
+ def temporary_permissions(self, *perm_overrides, **perm_kwoverrides):
+ """Set custom schema permissions within context.
+
+ There are two ways to call this method, which may be used together :
+
+ * using positional argument(s):
+
+ .. sourcecode:: python
+
+ rdef = self.schema['CWUser'].rdef('login')
+ with self.temporary_permissions((rdef, {'read': ()})):
+ ...
+
+
+ * using named argument(s):
+
+ .. sourcecode:: python
+
+ with self.temporary_permissions(CWUser={'read': ()}):
+ ...
+
+ Usually the former will be preferred to override permissions on a
+ relation definition, while the latter is well suited for entity types.
+
+ The allowed keys in the permission dictionary depend on the schema type
+ (entity type / relation definition). Resulting permissions will be
+ similar to `orig_permissions.update(partial_perms)`.
+ """
+ torestore = []
+ for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()):
+ if isinstance(erschema, string_types):
+ erschema = self.schema[erschema]
+ for action, actionperms in etypeperms.items():
+ origperms = erschema.permissions[action]
+ erschema.set_action_permissions(action, actionperms)
+ torestore.append([erschema, action, origperms])
+ try:
+ yield
+ finally:
+ for erschema, action, permissions in torestore:
+ if action is None:
+ erschema.permissions = permissions
+ else:
+ erschema.set_action_permissions(action, permissions)
+
+ def assertModificationDateGreater(self, entity, olddate):
+ entity.cw_attr_cache.pop('modification_date', None)
+ self.assertGreater(entity.modification_date, olddate)
+
+ def assertMessageEqual(self, req, params, expected_msg):
+ msg = req.session.data[params['_cwmsgid']]
+ self.assertEqual(expected_msg, msg)
+
+ # workflow utilities #######################################################
+
+ def assertPossibleTransitions(self, entity, expected):
+ transitions = entity.cw_adapt_to('IWorkflowable').possible_transitions()
+ self.assertListEqual(sorted(tr.name for tr in transitions),
+ sorted(expected))
+
+ # views and actions registries inspection ##################################
+
+ def pviews(self, req, rset):
+ return sorted((a.__regid__, a.__class__)
+ for a in self.vreg['views'].possible_views(req, rset=rset))
+
+ def pactions(self, req, rset,
+ skipcategories=('addrelated', 'siteactions', 'useractions',
+ 'footer', 'manage')):
+ return [(a.__regid__, a.__class__)
+ for a in self.vreg['actions'].poss_visible_objects(req, rset=rset)
+ if a.category not in skipcategories]
+
+ def pactions_by_cats(self, req, rset, categories=('addrelated',)):
+ return [(a.__regid__, a.__class__)
+ for a in self.vreg['actions'].poss_visible_objects(req, rset=rset)
+ if a.category in categories]
+
+ def pactionsdict(self, req, rset,
+ skipcategories=('addrelated', 'siteactions', 'useractions',
+ 'footer', 'manage')):
+ res = {}
+ for a in self.vreg['actions'].poss_visible_objects(req, rset=rset):
+ if a.category not in skipcategories:
+ res.setdefault(a.category, []).append(a.__class__)
+ return res
+
+ def action_submenu(self, req, rset, id):
+ return self._test_action(self.vreg['actions'].select(id, req, rset=rset))
+
+ def _test_action(self, action):
+ class fake_menu(list):
+ @property
+ def items(self):
+ return self
+
+ class fake_box(object):
+ def action_link(self, action, **kwargs):
+ return (action.title, action.url())
+ submenu = fake_menu()
+ action.fill_menu(fake_box(), submenu)
+ return submenu
+
+ def list_views_for(self, rset):
+ """returns the list of views that can be applied on `rset`"""
+ req = rset.req
+ only_once_vids = ('primary', 'secondary', 'text')
+ req.data['ex'] = ValueError("whatever")
+ viewsvreg = self.vreg['views']
+ for vid, views in viewsvreg.items():
+ if vid[0] == '_':
+ continue
+ if rset.rowcount > 1 and vid in only_once_vids:
+ continue
+ views = [view for view in views
+ if view.category != 'startupview'
+ and not issubclass(view, notification.NotificationView)
+ and not isinstance(view, class_deprecated)]
+ if views:
+ try:
+ view = viewsvreg._select_best(views, req, rset=rset)
+ if view is None:
+ raise NoSelectableObject((req,), {'rset': rset}, views)
+ if view.linkable():
+ yield view
+ else:
+ not_selected(self.vreg, view)
+ # else the view is expected to be used as subview and should
+ # not be tested directly
+ except NoSelectableObject:
+ continue
+
+ def list_actions_for(self, rset):
+ """returns the list of actions that can be applied on `rset`"""
+ req = rset.req
+ for action in self.vreg['actions'].possible_objects(req, rset=rset):
+ yield action
+
+ def list_boxes_for(self, rset):
+ """returns the list of boxes that can be applied on `rset`"""
+ req = rset.req
+ for box in self.vreg['ctxcomponents'].possible_objects(req, rset=rset):
+ yield box
+
+ def list_startup_views(self):
+ """returns the list of startup views"""
+ with self.admin_access.web_request() as req:
+ for view in self.vreg['views'].possible_views(req, None):
+ if view.category == 'startupview':
+ yield view.__regid__
+ else:
+ not_selected(self.vreg, view)
+
+ # web ui testing utilities #################################################
+
+ @property
+ @cached
+ def app(self):
+ """return a cubicweb publisher"""
+ publisher = application.CubicWebPublisher(self.repo, self.config)
+
+ def raise_error_handler(*args, **kwargs):
+ raise
+
+ publisher.error_handler = raise_error_handler
+ return publisher
+
+ @deprecated('[3.19] use the .remote_calling method')
+ def remote_call(self, fname, *args):
+ """remote json call simulation"""
+ dump = json.dumps
+ args = [dump(arg) for arg in args]
+ req = self.request(fname=fname, pageid='123', arg=args)
+ ctrl = self.vreg['controllers'].select('ajax', req)
+ return ctrl.publish(), req
+
+ @contextmanager
+ def remote_calling(self, fname, *args):
+ """remote json call simulation"""
+ args = [json.dumps(arg) for arg in args]
+ with self.admin_access.web_request(fname=fname, pageid='123', arg=args) as req:
+ ctrl = self.vreg['controllers'].select('ajax', req)
+ yield ctrl.publish(), req
+
+ def app_handle_request(self, req, path='view'):
+ return self.app.core_handle(req, path)
+
+ @deprecated("[3.15] app_handle_request is the new and better way"
+ " (beware of small semantic changes)")
+ def app_publish(self, *args, **kwargs):
+ return self.app_handle_request(*args, **kwargs)
+
+ def ctrl_publish(self, req, ctrl='edit', rset=None):
+ """call the publish method of the edit controller"""
+ ctrl = self.vreg['controllers'].select(ctrl, req, appli=self.app)
+ try:
+ result = ctrl.publish(rset)
+ req.cnx.commit()
+ except web.Redirect:
+ req.cnx.commit()
+ raise
+ return result
+
+ @staticmethod
+ def fake_form(formid, field_dict=None, entity_field_dicts=()):
+ """Build _cw.form dictionnary to fake posting of some standard cubicweb form
+
+ * `formid`, the form id, usually form's __regid__
+
+ * `field_dict`, dictionary of name:value for fields that are not tied to an entity
+
+ * `entity_field_dicts`, list of (entity, dictionary) where dictionary contains name:value
+ for fields that are not tied to the given entity
+ """
+ assert field_dict or entity_field_dicts, \
+ 'field_dict and entity_field_dicts arguments must not be both unspecified'
+ if field_dict is None:
+ field_dict = {}
+ form = {'__form_id': formid}
+ fields = []
+ for field, value in field_dict.items():
+ fields.append(field)
+ form[field] = value
+
+ def _add_entity_field(entity, field, value):
+ entity_fields.append(field)
+ form[eid_param(field, entity.eid)] = value
+
+ for entity, field_dict in entity_field_dicts:
+ if '__maineid' not in form:
+ form['__maineid'] = entity.eid
+ entity_fields = []
+ form.setdefault('eid', []).append(entity.eid)
+ _add_entity_field(entity, '__type', entity.cw_etype)
+ for field, value in field_dict.items():
+ _add_entity_field(entity, field, value)
+ if entity_fields:
+ form[eid_param('_cw_entity_fields', entity.eid)] = ','.join(entity_fields)
+ if fields:
+ form['_cw_fields'] = ','.join(sorted(fields))
+ return form
+
+ @deprecated('[3.19] use .admin_request_from_url instead')
+ def req_from_url(self, url):
+ """parses `url` and builds the corresponding CW-web request
+
+ req.form will be setup using the url's query string
+ """
+ req = self.request(url=url)
+ if isinstance(url, unicode):
+ url = url.encode(req.encoding) # req.setup_params() expects encoded strings
+ querystring = urlparse(url)[-2]
+ params = parse_qs(querystring)
+ req.setup_params(params)
+ return req
+
+ @contextmanager
+ def admin_request_from_url(self, url):
+ """parses `url` and builds the corresponding CW-web request
+
+ req.form will be setup using the url's query string
+ """
+ with self.admin_access.web_request(url=url) as req:
+ if isinstance(url, unicode):
+ url = url.encode(req.encoding) # req.setup_params() expects encoded strings
+ querystring = urlparse(url)[-2]
+ params = parse_qs(querystring)
+ req.setup_params(params)
+ yield req
+
+ def url_publish(self, url, data=None):
+ """takes `url`, uses application's app_resolver to find the appropriate
+ controller and result set, then publishes the result.
+
+ To simulate post of www-form-encoded data, give a `data` dictionary
+ containing desired key/value associations.
+
+ This should pretty much correspond to what occurs in a real CW server
+ except the apache-rewriter component is not called.
+ """
+ with self.admin_request_from_url(url) as req:
+ if data is not None:
+ req.form.update(data)
+ ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False))
+ return self.ctrl_publish(req, ctrlid, rset)
+
+ def http_publish(self, url, data=None):
+ """like `url_publish`, except this returns a http response, even in case
+ of errors. You may give form parameters using the `data` argument.
+ """
+ with self.admin_request_from_url(url) as req:
+ if data is not None:
+ req.form.update(data)
+ with real_error_handling(self.app):
+ result = self.app_handle_request(req, req.relative_path(False))
+ return result, req
+
+ @staticmethod
+ def _parse_location(req, location):
+ try:
+ path, params = location.split('?', 1)
+ except ValueError:
+ path = location
+ params = {}
+ else:
+ cleanup = lambda p: (p[0], urlunquote(p[1]))
+ params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
+ if path.startswith(req.base_url()): # may be relative
+ path = path[len(req.base_url()):]
+ return path, params
+
+ def expect_redirect(self, callback, req):
+ """call the given callback with req as argument, expecting to get a
+ Redirect exception
+ """
+ try:
+ callback(req)
+ except Redirect as ex:
+ return self._parse_location(req, ex.location)
+ else:
+ self.fail('expected a Redirect exception')
+
+ def expect_redirect_handle_request(self, req, path='edit'):
+ """call the publish method of the application publisher, expecting to
+ get a Redirect exception
+ """
+ self.app_handle_request(req, path)
+ self.assertTrue(300 <= req.status_out < 400, req.status_out)
+ location = req.get_response_header('location')
+ return self._parse_location(req, location)
+
+ @deprecated("[3.15] expect_redirect_handle_request is the new and better way"
+ " (beware of small semantic changes)")
+ def expect_redirect_publish(self, *args, **kwargs):
+ return self.expect_redirect_handle_request(*args, **kwargs)
+
+ def set_auth_mode(self, authmode, anonuser=None):
+ self.set_option('auth-mode', authmode)
+ self.set_option('anonymous-user', anonuser)
+ if anonuser is None:
+ self.config.anonymous_credential = None
+ else:
+ self.config.anonymous_credential = (anonuser, anonuser)
+
+ def init_authentication(self, authmode, anonuser=None):
+ self.set_auth_mode(authmode, anonuser)
+ req = self.requestcls(self.vreg, url='login')
+ sh = self.app.session_handler
+ authm = sh.session_manager.authmanager
+ authm.anoninfo = self.vreg.config.anonymous_user()
+ authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]}
+ # not properly cleaned between tests
+ self.open_sessions = sh.session_manager._sessions = {}
+ return req, self.session
+
+ def assertAuthSuccess(self, req, origsession, nbsessions=1):
+ session = self.app.get_session(req)
+ cnx = repoapi.Connection(session)
+ req.set_cnx(cnx)
+ self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions)
+ self.assertEqual(session.login, origsession.login)
+ self.assertEqual(session.anonymous_session, False)
+
+ def assertAuthFailure(self, req, nbsessions=0):
+ with self.assertRaises(AuthenticationError):
+ self.app.get_session(req)
+ # +0 since we do not track the opened session
+ self.assertEqual(len(self.open_sessions), nbsessions)
+ clear_cache(req, 'get_authorization')
+
+ # content validation #######################################################
+
+ # validators are used to validate (XML, DTD, whatever) view's content
+ # validators availables are :
+ # DTDValidator : validates XML + declared DTD
+ # SaxOnlyValidator : guarantees XML is well formed
+ # None : do not try to validate anything
+ # validators used must be imported from from.devtools.htmlparser
+ content_type_validators = {
+ # maps MIME type : validator name
+ #
+ # do not set html validators here, we need HTMLValidator for html
+ # snippets
+ # 'text/html': DTDValidator,
+ # 'application/xhtml+xml': DTDValidator,
+ 'application/xml': htmlparser.XMLValidator,
+ 'text/xml': htmlparser.XMLValidator,
+ 'application/json': JsonValidator,
+ 'text/plain': None,
+ 'text/comma-separated-values': None,
+ 'text/x-vcard': None,
+ 'text/calendar': None,
+ 'image/png': None,
+ }
+ # maps vid : validator name (override content_type_validators)
+ vid_validators = dict((vid, htmlparser.VALMAP[valkey])
+ for vid, valkey in VIEW_VALIDATORS.items())
+
+ def view(self, vid, rset=None, req=None, template='main-template',
+ **kwargs):
+ """This method tests the view `vid` on `rset` using `template`
+
+ If no error occurred while rendering the view, the HTML is analyzed
+ and parsed.
+
+ :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo`
+ encapsulation the generated HTML
+ """
+ if req is None:
+ assert rset is not None, 'you must supply at least one of rset or req'
+ req = rset.req
+ req.form['vid'] = vid
+ viewsreg = self.vreg['views']
+ view = viewsreg.select(vid, req, rset=rset, **kwargs)
+ if template is None: # raw view testing, no template
+ viewfunc = view.render
+ else:
+ kwargs['view'] = view
+ viewfunc = lambda **k: viewsreg.main_template(req, template,
+ rset=rset, **kwargs)
+ return self._test_view(viewfunc, view, template, kwargs)
+
+ def _test_view(self, viewfunc, view, template='main-template', kwargs={}):
+ """this method does the actual call to the view
+
+ If no error occurred while rendering the view, the HTML is analyzed
+ and parsed.
+
+ :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo`
+ encapsulation the generated HTML
+ """
+ try:
+ output = viewfunc(**kwargs)
+ except Exception:
+ # hijack exception: generative tests stop when the exception
+ # is not an AssertionError
+ klass, exc, tcbk = sys.exc_info()
+ try:
+ msg = '[%s in %s] %s' % (klass, view.__regid__, exc)
+ except Exception:
+ msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__)
+ exc = AssertionError(msg)
+ exc.__traceback__ = tcbk
+ raise exc
+ return self._check_html(output, view, template)
+
+ def get_validator(self, view=None, content_type=None, output=None):
+ if view is not None:
+ try:
+ return self.vid_validators[view.__regid__]()
+ except KeyError:
+ if content_type is None:
+ content_type = view.content_type
+ if content_type is None:
+ content_type = 'text/html'
+ if content_type in ('text/html', 'application/xhtml+xml') and output:
+ if output.startswith(b''):
+ # only check XML well-formness since HTMLValidator isn't html5
+ # compatible and won't like various other extensions
+ default_validator = htmlparser.XMLSyntaxValidator
+ elif output.startswith(b' used in progress widget, unknown in html dtd
+ output = re.sub('', '', output)
+ return self.assertWellFormed(validator, output.strip(), context=view.__regid__)
+
+ def assertWellFormed(self, validator, content, context=None):
+ try:
+ return validator.parse_string(content)
+ except Exception:
+ # hijack exception: generative tests stop when the exception
+ # is not an AssertionError
+ klass, exc, tcbk = sys.exc_info()
+ if context is None:
+ msg = u'[%s]' % (klass,)
+ else:
+ msg = u'[%s in %s]' % (klass, context)
+ msg = msg.encode(sys.getdefaultencoding(), 'replace')
+
+ try:
+ str_exc = str(exc)
+ except Exception:
+ str_exc = 'undisplayable exception'
+ msg += str_exc.encode(sys.getdefaultencoding(), 'replace')
+ if content is not None:
+ position = getattr(exc, "position", (0,))[0]
+ if position:
+ # define filter
+ if isinstance(content, str):
+ content = unicode(content, sys.getdefaultencoding(), 'replace')
+ content = validator.preprocess_data(content)
+ content = content.splitlines()
+ width = int(log(len(content), 10)) + 1
+ line_template = " %" + ("%i" % width) + "i: %s"
+ # XXX no need to iterate the whole file except to get
+ # the line number
+ content = u'\n'.join(line_template % (idx + 1, line)
+ for idx, line in enumerate(content)
+ if line_context_filter(idx + 1, position))
+ msg += u'\nfor content:\n%s' % content
+ exc = AssertionError(msg)
+ exc.__traceback__ = tcbk
+ raise exc
+
+ def assertDocTestFile(self, testfile):
+ # doctest returns tuple (failure_count, test_count)
+ with self.admin_access.shell() as mih:
+ result = mih.process_script(testfile)
+ if result[0] and result[1]:
+ raise self.failureException("doctest file '%s' failed"
+ % testfile)
+
+ # notifications ############################################################
+
+ def assertSentEmail(self, subject, recipients=None, nb_msgs=None):
+ """test recipients in system mailbox for given email subject
+
+ :param subject: email subject to find in mailbox
+ :param recipients: list of email recipients
+ :param nb_msgs: expected number of entries
+ :returns: list of matched emails
+ """
+ messages = [email for email in MAILBOX
+ if email.message.get('Subject') == subject]
+ if recipients is not None:
+ sent_to = set()
+ for msg in messages:
+ sent_to.update(msg.recipients)
+ self.assertSetEqual(set(recipients), sent_to)
+ if nb_msgs is not None:
+ self.assertEqual(len(MAILBOX), nb_msgs)
+ return messages
+
+
+# auto-populating test classes and utilities ###################################
+
+from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries
+
+# XXX cleanup unprotected_entities & all mess
+
+
+def how_many_dict(schema, cnx, how_many, skip):
+ """given a schema, compute how many entities by type we need to be able to
+ satisfy relations cardinality.
+
+ The `how_many` argument tells how many entities of which type we want at
+ least.
+
+ Return a dictionary with entity types as key, and the number of entities for
+ this type as value.
+ """
+ relmap = {}
+ for rschema in schema.relations():
+ if rschema.final:
+ continue
+ for subj, obj in rschema.rdefs:
+ card = rschema.rdef(subj, obj).cardinality
+ # if the relation is mandatory, we'll need at least as many subj and
+ # obj to satisfy it
+ if card[0] in '1+' and card[1] in '1?':
+ # subj has to be linked to at least one obj,
+ # but obj can be linked to only one subj
+ # -> we need at least as many subj as obj to satisfy
+ # cardinalities for this relation
+ relmap.setdefault((rschema, subj), []).append(str(obj))
+ if card[1] in '1+' and card[0] in '1?':
+ # reverse subj and obj in the above explanation
+ relmap.setdefault((rschema, obj), []).append(str(subj))
+ unprotected = unprotected_entities(schema)
+ for etype in skip: # XXX (syt) duh? explain or kill
+ unprotected.add(etype)
+ howmanydict = {}
+ # step 1, compute a base number of each entity types: number of already
+ # existing entities of this type + `how_many`
+ for etype in unprotected_entities(schema, strict=True):
+ howmanydict[str(etype)] = cnx.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0]
+ if etype in unprotected:
+ howmanydict[str(etype)] += how_many
+ # step 2, augment nb entity per types to satisfy cardinality constraints,
+ # by recomputing for each relation that constrained an entity type:
+ #
+ # new num for etype = max(current num, sum(num for possible target etypes))
+ #
+ # XXX we should first check there is no cycle then propagate changes
+ for (rschema, etype), targets in relmap.items():
+ relfactor = sum(howmanydict[e] for e in targets)
+ howmanydict[str(etype)] = max(relfactor, howmanydict[etype])
+ return howmanydict
+
+
+class AutoPopulateTest(CubicWebTC):
+ """base class for test with auto-populating of the database"""
+ __abstract__ = True
+
+ test_db_id = 'autopopulate'
+
+ tags = CubicWebTC.tags | Tags('autopopulated')
+
+ pdbclass = CubicWebDebugger
+ # this is a hook to be able to define a list of rql queries
+ # that are application dependent and cannot be guessed automatically
+ application_rql = []
+
+ no_auto_populate = ()
+ ignored_relations = set()
+
+ def to_test_etypes(self):
+ return unprotected_entities(self.schema, strict=True)
+
+ def custom_populate(self, how_many, cnx):
+ pass
+
+ def post_populate(self, cnx):
+ pass
+
+ @nocoverage
+ def auto_populate(self, how_many):
+ """this method populates the database with `how_many` entities
+ of each possible type. It also inserts random relations between them
+ """
+ with self.admin_access.cnx() as cnx:
+ with cnx.security_enabled(read=False, write=False):
+ self._auto_populate(cnx, how_many)
+ cnx.commit()
+
+ def _auto_populate(self, cnx, how_many):
+ self.custom_populate(how_many, cnx)
+ vreg = self.vreg
+ howmanydict = how_many_dict(self.schema, cnx, how_many, self.no_auto_populate)
+ for etype in unprotected_entities(self.schema):
+ if etype in self.no_auto_populate:
+ continue
+ nb = howmanydict.get(etype, how_many)
+ for rql, args in insert_entity_queries(etype, self.schema, vreg, nb):
+ cnx.execute(rql, args)
+ edict = {}
+ for etype in unprotected_entities(self.schema, strict=True):
+ rset = cnx.execute('%s X' % etype)
+ edict[str(etype)] = set(row[0] for row in rset.rows)
+ existingrels = {}
+ ignored_relations = SYSTEM_RELATIONS | self.ignored_relations
+ for rschema in self.schema.relations():
+ if rschema.final or rschema in ignored_relations or rschema.rule:
+ continue
+ rset = cnx.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema)
+ existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset)
+ q = make_relations_queries(self.schema, edict, cnx, ignored_relations,
+ existingrels=existingrels)
+ for rql, args in q:
+ try:
+ cnx.execute(rql, args)
+ except ValidationError as ex:
+ # failed to satisfy some constraint
+ print('error in automatic db population', ex)
+ cnx.commit_state = None # reset uncommitable flag
+ self.post_populate(cnx)
+
+ def iter_individual_rsets(self, etypes=None, limit=None):
+ etypes = etypes or self.to_test_etypes()
+ with self.admin_access.web_request() as req:
+ for etype in etypes:
+ if limit:
+ rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype)
+ else:
+ rql = 'Any X WHERE X is %s' % etype
+ rset = req.execute(rql)
+ for row in range(len(rset)):
+ if limit and row > limit:
+ break
+ # XXX iirk
+ rset2 = rset.limit(limit=1, offset=row)
+ yield rset2
+
+ def iter_automatic_rsets(self, limit=10):
+ """generates basic resultsets for each entity type"""
+ etypes = self.to_test_etypes()
+ if not etypes:
+ return
+ with self.admin_access.web_request() as req:
+ for etype in etypes:
+ yield req.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype))
+ etype1 = etypes.pop()
+ try:
+ etype2 = etypes.pop()
+ except KeyError:
+ etype2 = etype1
+ # test a mixed query (DISTINCT/GROUP to avoid getting duplicate
+ # X which make muledit view failing for instance (html validation fails
+ # because of some duplicate "id" attributes)
+ yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' %
+ (etype1, etype2))
+ # test some application-specific queries if defined
+ for rql in self.application_rql:
+ yield req.execute(rql)
+
+ def _test_everything_for(self, rset):
+ """this method tries to find everything that can be tested
+ for `rset` and yields a callable test (as needed in generative tests)
+ """
+ propdefs = self.vreg['propertydefs']
+ # make all components visible
+ for k, v in propdefs.items():
+ if k.endswith('visible') and not v['default']:
+ propdefs[k]['default'] = True
+ for view in self.list_views_for(rset):
+ backup_rset = rset.copy(rset.rows, rset.description)
+ with self.subTest(name=self._testname(rset, view.__regid__, 'view')):
+ self.view(view.__regid__, rset,
+ rset.req.reset_headers(), 'main-template')
+ # We have to do this because some views modify the
+ # resultset's syntax tree
+ rset = backup_rset
+ for action in self.list_actions_for(rset):
+ with self.subTest(name=self._testname(rset, action.__regid__, 'action')):
+ self._test_action(action)
+ for box in self.list_boxes_for(rset):
+ w = [].append
+ with self.subTest(self._testname(rset, box.__regid__, 'box')):
+ box.render(w)
+
+ @staticmethod
+ def _testname(rset, objid, objtype):
+ return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype)
+
+
+# concrete class for automated application testing ############################
+
+class AutomaticWebTest(AutoPopulateTest):
+ """import this if you wan automatic tests to be ran"""
+
+ tags = AutoPopulateTest.tags | Tags('web', 'generated')
+
+ def setUp(self):
+ if self.__class__ is AutomaticWebTest:
+ # Prevent direct use of AutomaticWebTest to avoid database caching
+ # issues.
+ return
+ super(AutomaticWebTest, self).setUp()
+
+ # access to self.app for proper initialization of the authentication
+ # machinery (else some views may fail)
+ self.app
+
+ def test_one_each_config(self):
+ self.auto_populate(1)
+ for rset in self.iter_automatic_rsets(limit=1):
+ self._test_everything_for(rset)
+
+ def test_ten_each_config(self):
+ self.auto_populate(10)
+ for rset in self.iter_automatic_rsets(limit=10):
+ self._test_everything_for(rset)
+
+ def test_startup_views(self):
+ for vid in self.list_startup_views():
+ with self.admin_access.web_request() as req:
+ with self.subTest(vid=vid):
+ self.view(vid, None, req)
+
+
+# registry instrumentization ###################################################
+
+def not_selected(vreg, appobject):
+ try:
+ vreg._selected[appobject.__class__] -= 1
+ except (KeyError, AttributeError):
+ pass
+
+
+# def vreg_instrumentize(testclass):
+# # XXX broken
+# from cubicweb.devtools.apptest import TestEnvironment
+# env = testclass._env = TestEnvironment('data', configcls=testclass.configcls)
+# for reg in env.vreg.values():
+# reg._selected = {}
+# try:
+# orig_select_best = reg.__class__.__orig_select_best
+# except Exception:
+# orig_select_best = reg.__class__._select_best
+# def instr_select_best(self, *args, **kwargs):
+# selected = orig_select_best(self, *args, **kwargs)
+# try:
+# self._selected[selected.__class__] += 1
+# except KeyError:
+# self._selected[selected.__class__] = 1
+# except AttributeError:
+# pass # occurs on reg used to restore database
+# return selected
+# reg.__class__._select_best = instr_select_best
+# reg.__class__.__orig_select_best = orig_select_best
+
+
+# def print_untested_objects(testclass, skipregs=('hooks', 'etypes')):
+# for regname, reg in testclass._env.vreg.items():
+# if regname in skipregs:
+# continue
+# for appobjects in reg.values():
+# for appobject in appobjects:
+# if not reg._selected.get(appobject):
+# print 'not tested', regname, appobject
diff -r 1400aee10df4 -r faf279e33298 cubicweb/devtools/webtest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/webtest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,34 @@
+from __future__ import absolute_import
+
+import webtest
+
+from cubicweb.wsgi import handler
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class CubicWebTestTC(CubicWebTC):
+ def setUp(self):
+ super(CubicWebTestTC, self).setUp()
+ self.config.global_set_option('base-url', 'http://localhost.local/')
+ # call load_configuration again to let the config reset its datadir_url
+ self.config.load_configuration()
+ webapp = handler.CubicWebWSGIApplication(self.config)
+ self.webapp = webtest.TestApp(webapp)
+
+ def tearDown(self):
+ del self.webapp
+ super(CubicWebTestTC, self).tearDown()
+
+ def login(self, user=None, password=None, **args):
+ if user is None:
+ user = self.admlogin
+ if password is None:
+ password = self.admpassword if user == self.admlogin else user
+ args.update({
+ '__login': user,
+ '__password': password
+ })
+ return self.webapp.get('/login', args)
+
+ def logout(self):
+ return self.webapp.get('/logout')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,208 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""base application's entities class implementation: `AnyEntity`"""
+
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from six import text_type, string_types
+
+from logilab.common.decorators import classproperty
+from logilab.common.deprecation import deprecated
+
+from cubicweb import Unauthorized
+from cubicweb.entity import Entity
+
+
+class AnyEntity(Entity):
+ """an entity instance has e_schema automagically set on the class and
+ instances have access to their issuing cursor
+ """
+ __regid__ = 'Any'
+
+ @classproperty
+ def cw_etype(cls):
+ """entity type as a unicode string"""
+ return text_type(cls.__regid__)
+
+ @classmethod
+ def cw_create_url(cls, req, **kwargs):
+ """ return the url of the entity creation form for this entity type"""
+ return req.build_url('add/%s' % cls.__regid__, **kwargs)
+
+ @classmethod
+ @deprecated('[3.22] use cw_fti_index_rql_limit instead')
+ def cw_fti_index_rql_queries(cls, req):
+ """return the list of rql queries to fetch entities to FT-index
+
+ The default is to fetch all entities at once and to prefetch
+ indexable attributes but one could imagine iterating over
+ "smaller" resultsets if the table is very big or returning
+ a subset of entities that match some business-logic condition.
+ """
+ restrictions = ['X is %s' % cls.__regid__]
+ selected = ['X']
+ for attrschema in sorted(cls.e_schema.indexable_attributes()):
+ varname = attrschema.type.upper()
+ restrictions.append('X %s %s' % (attrschema, varname))
+ selected.append(varname)
+ return ['Any %s WHERE %s' % (', '.join(selected),
+ ', '.join(restrictions))]
+
+ @classmethod
+ def cw_fti_index_rql_limit(cls, req, limit=1000):
+ """generate rsets of entities to FT-index
+
+ By default, each successive result set is limited to 1000 entities
+ """
+ if cls.cw_fti_index_rql_queries.__func__ != AnyEntity.cw_fti_index_rql_queries.__func__:
+ warn("[3.22] cw_fti_index_rql_queries is replaced by cw_fti_index_rql_limit",
+ DeprecationWarning)
+ for rql in cls.cw_fti_index_rql_queries(req):
+ yield req.execute(rql)
+ return
+ restrictions = ['X is %s' % cls.__regid__]
+ selected = ['X']
+ start = 0
+ for attrschema in sorted(cls.e_schema.indexable_attributes()):
+ varname = attrschema.type.upper()
+ restrictions.append('X %s %s' % (attrschema, varname))
+ selected.append(varname)
+ while True:
+ q_restrictions = restrictions + ['X eid > %s' % start]
+ rset = req.execute('Any %s ORDERBY X LIMIT %s WHERE %s' %
+ (', '.join(selected),
+ limit,
+ ', '.join(q_restrictions)))
+ if rset:
+ start = rset[-1][0]
+ yield rset
+ else:
+ break
+
+ # meta data api ###########################################################
+
+ def dc_title(self):
+ """return a suitable *unicode* title for this entity"""
+ for rschema, attrschema in self.e_schema.attribute_definitions():
+ if rschema.meta:
+ continue
+ value = self.cw_attr_value(rschema.type)
+ if value is not None:
+ # make the value printable (dates, floats, bytes, etc.)
+ return self.printable_value(rschema.type, value, attrschema.type,
+ format='text/plain')
+ return u'%s #%s' % (self.dc_type(), self.eid)
+
+ def dc_long_title(self):
+ """return a more detailled title for this entity"""
+ return self.dc_title()
+
+ def dc_description(self, format='text/plain'):
+ """return a suitable description for this entity"""
+ if 'description' in self.e_schema.subjrels:
+ return self.printable_value('description', format=format)
+ return u''
+
+ def dc_authors(self):
+ """return a suitable description for the author(s) of the entity"""
+ try:
+ return ', '.join(u.name() for u in self.owned_by)
+ except Unauthorized:
+ return u''
+
+ def dc_creator(self):
+ """return a suitable description for the creator of the entity"""
+ if self.creator:
+ return self.creator.name()
+ return u''
+
+ def dc_date(self, date_format=None):# XXX default to ISO 8601 ?
+ """return latest modification date of this entity"""
+ return self._cw.format_date(self.modification_date, date_format=date_format)
+
+ def dc_type(self, form=''):
+ """return the display name for the type of this entity (translated)"""
+ return self.e_schema.display_name(self._cw, form)
+
+ def dc_language(self):
+ """return language used by this entity (translated)"""
+ # check if entities has internationalizable attributes
+ # XXX one is enough or check if all String attributes are internationalizable?
+ for rschema, attrschema in self.e_schema.attribute_definitions():
+ if rschema.rdef(self.e_schema, attrschema).internationalizable:
+ return self._cw._(self._cw.user.property_value('ui.language'))
+ return self._cw._(self._cw.vreg.property_value('ui.language'))
+
+ @property
+ def creator(self):
+ """return the CWUser entity which has created this entity, or None if
+ unknown or if the curent user doesn't has access to this euser
+ """
+ try:
+ return self.created_by[0]
+ except (Unauthorized, IndexError):
+ return None
+
+ # abstractions making the whole things (well, some at least) working ######
+
+ def sortvalue(self, rtype=None):
+ """return a value which can be used to sort this entity or given
+ entity's attribute
+ """
+ if rtype is None:
+ return self.dc_title().lower()
+ value = self.cw_attr_value(rtype)
+ # do not restrict to `unicode` because Bytes will return a `str` value
+ if isinstance(value, string_types):
+ return self.printable_value(rtype, format='text/plain').lower()
+ return value
+
+
+def fetch_config(fetchattrs, mainattr=None, pclass=AnyEntity, order='ASC'):
+ """function to ease basic configuration of an entity class ORM. Basic usage
+ is:
+
+ .. sourcecode:: python
+
+ class MyEntity(AnyEntity):
+
+ fetch_attrs, cw_fetch_order = fetch_config(['attr1', 'attr2'])
+ # uncomment line below if you want the same sorting for 'unrelated' entities
+ # cw_fetch_unrelated_order = cw_fetch_order
+
+ Using this, when using ORM methods retrieving this type of entity, 'attr1'
+ and 'attr2' will be automatically prefetched and results will be sorted on
+ 'attr1' ascending (ie the first attribute in the list).
+
+ This function will automatically add to fetched attributes those defined in
+ parent class given using the `pclass` argument.
+
+ Also, You can use `mainattr` and `order` argument to have a different
+ sorting.
+ """
+ if pclass is not None:
+ fetchattrs += pclass.fetch_attrs
+ if mainattr is None:
+ mainattr = fetchattrs[0]
+ @classmethod
+ def fetch_order(cls, select, attr, var):
+ if attr == mainattr:
+ select.add_sort_var(var, order=='ASC')
+ return fetchattrs, fetch_order
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/adapters.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/adapters.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,429 @@
+# copyright 2010-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""some basic entity adapter implementations, for interfaces used in the
+framework itself.
+"""
+from cubicweb import _
+
+from itertools import chain
+
+from logilab.mtconverter import TransformError
+from logilab.common.decorators import cached
+
+from cubicweb import ValidationError, view, ViolatedConstraint, UniqueTogetherError
+from cubicweb.predicates import is_instance, relation_possible, match_exception
+
+
+class IEmailableAdapter(view.EntityAdapter):
+ __regid__ = 'IEmailable'
+ __select__ = relation_possible('primary_email') | relation_possible('use_email')
+
+ def get_email(self):
+ if getattr(self.entity, 'primary_email', None):
+ return self.entity.primary_email[0].address
+ if getattr(self.entity, 'use_email', None):
+ return self.entity.use_email[0].address
+ return None
+
+ def allowed_massmail_keys(self):
+ """returns a set of allowed email substitution keys
+
+ The default is to return the entity's attribute list but you might
+ override this method to allow extra keys. For instance, a Person
+ class might want to return a `companyname` key.
+ """
+ return set(rschema.type
+ for rschema, attrtype in self.entity.e_schema.attribute_definitions()
+ if attrtype.type not in ('Password', 'Bytes'))
+
+ def as_email_context(self):
+ """returns the dictionary as used by the sendmail controller to
+ build email bodies.
+
+ NOTE: the dictionary keys should match the list returned by the
+ `allowed_massmail_keys` method.
+ """
+ return dict((attr, getattr(self.entity, attr))
+ for attr in self.allowed_massmail_keys())
+
+
+class INotifiableAdapter(view.EntityAdapter):
+ __regid__ = 'INotifiable'
+ __select__ = is_instance('Any')
+
+ def notification_references(self, view):
+ """used to control References field of email send on notification
+ for this entity. `view` is the notification view.
+
+ Should return a list of eids which can be used to generate message
+ identifiers of previously sent email(s)
+ """
+ itree = self.entity.cw_adapt_to('ITree')
+ if itree is not None:
+ return itree.path()[:-1]
+ if view.msgid_timestamp:
+ return (self.entity.eid,)
+ return ()
+
+
+class IFTIndexableAdapter(view.EntityAdapter):
+ """standard adapter to handle fulltext indexing
+
+ .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.fti_containers
+ .. automethod:: cubicweb.entities.adapters.IFTIndexableAdapter.get_words
+ """
+ __regid__ = 'IFTIndexable'
+ __select__ = is_instance('Any')
+
+ def fti_containers(self, _done=None):
+ """return the list of entities to index when handling ``self.entity``
+
+ The actual list of entities depends on ``fulltext_container`` usage
+ in the datamodel definition
+ """
+ if _done is None:
+ _done = set()
+ entity = self.entity
+ _done.add(entity.eid)
+ containers = tuple(entity.e_schema.fulltext_containers())
+ if containers:
+ for rschema, role in containers:
+ if role == 'object':
+ targets = getattr(entity, rschema.type)
+ else:
+ targets = getattr(entity, 'reverse_%s' % rschema)
+ for target in targets:
+ if target.eid in _done:
+ continue
+ for container in target.cw_adapt_to('IFTIndexable').fti_containers(_done):
+ yield container
+ else:
+ yield entity
+
+ # weight in ABCD
+ entity_weight = 1.0
+ attr_weight = {}
+
+ def get_words(self):
+ """used by the full text indexer to get words to index
+
+ this method should only be used on the repository side since it depends
+ on the logilab.database package
+
+ :rtype: list
+ :return: the list of indexable word of this entity
+ """
+ from logilab.database.fti import tokenize
+ # take care to cases where we're modyfying the schema
+ entity = self.entity
+ pending = self._cw.transaction_data.setdefault('pendingrdefs', set())
+ words = {}
+ for rschema in entity.e_schema.indexable_attributes():
+ if (entity.e_schema, rschema) in pending:
+ continue
+ weight = self.attr_weight.get(rschema, 'C')
+ try:
+ value = entity.printable_value(rschema, format=u'text/plain')
+ except TransformError:
+ continue
+ except Exception:
+ self.exception("can't add value of %s to text index for entity %s",
+ rschema, entity.eid)
+ continue
+ if value:
+ words.setdefault(weight, []).extend(tokenize(value))
+ for rschema, role in entity.e_schema.fulltext_relations():
+ if role == 'subject':
+ for entity_ in getattr(entity, rschema.type):
+ merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words())
+ else: # if role == 'object':
+ for entity_ in getattr(entity, 'reverse_%s' % rschema.type):
+ merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words())
+ return words
+
+
+def merge_weight_dict(maindict, newdict):
+ for weight, words in newdict.items():
+ maindict.setdefault(weight, []).extend(words)
+
+
+class IDownloadableAdapter(view.EntityAdapter):
+ """interface for downloadable entities"""
+ __regid__ = 'IDownloadable'
+ __abstract__ = True
+
+ def download_url(self, **kwargs): # XXX not really part of this interface
+ """return a URL to download entity's content
+
+ It should be a unicode object containing url-encoded ASCII.
+ """
+ raise NotImplementedError
+
+ def download_content_type(self):
+ """return MIME type (unicode) of the downloadable content"""
+ raise NotImplementedError
+
+ def download_encoding(self):
+ """return encoding (unicode) of the downloadable content"""
+ raise NotImplementedError
+
+ def download_file_name(self):
+ """return file name (unicode) of the downloadable content"""
+ raise NotImplementedError
+
+ def download_data(self):
+ """return actual data (bytes) of the downloadable content"""
+ raise NotImplementedError
+
+
+# XXX should propose to use two different relations for children/parent
+class ITreeAdapter(view.EntityAdapter):
+ """This adapter provides a tree interface.
+
+ It has to be overriden to be configured using the tree_relation,
+ child_role and parent_role class attributes to benefit from this default
+ implementation.
+
+ This class provides the following methods:
+
+ .. automethod: iterparents
+ .. automethod: iterchildren
+ .. automethod: prefixiter
+
+ .. automethod: is_leaf
+ .. automethod: is_root
+
+ .. automethod: root
+ .. automethod: parent
+ .. automethod: children
+ .. automethod: different_type_children
+ .. automethod: same_type_children
+ .. automethod: children_rql
+ .. automethod: path
+ """
+ __regid__ = 'ITree'
+ __abstract__ = True
+
+ child_role = 'subject'
+ parent_role = 'object'
+
+ def children_rql(self):
+ """Returns RQL to get the children of the entity."""
+ return self.entity.cw_related_rql(self.tree_relation, self.parent_role)
+
+ def different_type_children(self, entities=True):
+ """Return children entities of different type as this entity.
+
+ According to the `entities` parameter, return entity objects or the
+ equivalent result set.
+ """
+ res = self.entity.related(self.tree_relation, self.parent_role,
+ entities=entities)
+ eschema = self.entity.e_schema
+ if entities:
+ return [e for e in res if e.e_schema != eschema]
+ return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col)
+
+ def same_type_children(self, entities=True):
+ """Return children entities of the same type as this entity.
+
+ According to the `entities` parameter, return entity objects or the
+ equivalent result set.
+ """
+ res = self.entity.related(self.tree_relation, self.parent_role,
+ entities=entities)
+ eschema = self.entity.e_schema
+ if entities:
+ return [e for e in res if e.e_schema == eschema]
+ return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col)
+
+ def is_leaf(self):
+ """Returns True if the entity does not have any children."""
+ return len(self.children()) == 0
+
+ def is_root(self):
+ """Returns true if the entity is root of the tree (e.g. has no parent).
+ """
+ return self.parent() is None
+
+ def root(self):
+ """Return the root entity of the tree."""
+ return self._cw.entity_from_eid(self.path()[0])
+
+ def parent(self):
+ """Returns the parent entity if any, else None (e.g. if we are on the
+ root).
+ """
+ try:
+ return self.entity.related(self.tree_relation, self.child_role,
+ entities=True)[0]
+ except (KeyError, IndexError):
+ return None
+
+ def children(self, entities=True, sametype=False):
+ """Return children entities.
+
+ According to the `entities` parameter, return entity objects or the
+ equivalent result set.
+ """
+ if sametype:
+ return self.same_type_children(entities)
+ else:
+ return self.entity.related(self.tree_relation, self.parent_role,
+ entities=entities)
+
+ def iterparents(self, strict=True):
+ """Return an iterator on the parents of the entity."""
+ def _uptoroot(self):
+ curr = self
+ while True:
+ curr = curr.parent()
+ if curr is None:
+ break
+ yield curr
+ curr = curr.cw_adapt_to('ITree')
+ if not strict:
+ return chain([self.entity], _uptoroot(self))
+ return _uptoroot(self)
+
+ def iterchildren(self, _done=None):
+ """Return an iterator over the item's children."""
+ if _done is None:
+ _done = set()
+ for child in self.children():
+ if child.eid in _done:
+ self.error('loop in %s tree: %s', child.cw_etype.lower(), child)
+ continue
+ yield child
+ _done.add(child.eid)
+
+ def prefixiter(self, _done=None):
+ """Return an iterator over the item's descendants in a prefixed order."""
+ if _done is None:
+ _done = set()
+ if self.entity.eid in _done:
+ return
+ _done.add(self.entity.eid)
+ yield self.entity
+ for child in self.same_type_children():
+ for entity in child.cw_adapt_to('ITree').prefixiter(_done):
+ yield entity
+
+ @cached
+ def path(self):
+ """Returns the list of eids from the root object to this object."""
+ path = []
+ adapter = self
+ entity = adapter.entity
+ while entity is not None:
+ if entity.eid in path:
+ self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity)
+ break
+ path.append(entity.eid)
+ try:
+ # check we are not jumping to another tree
+ if (adapter.tree_relation != self.tree_relation or
+ adapter.child_role != self.child_role):
+ break
+ entity = adapter.parent()
+ adapter = entity.cw_adapt_to('ITree')
+ except AttributeError:
+ break
+ path.reverse()
+ return path
+
+
+class ISerializableAdapter(view.EntityAdapter):
+ """Adapter to serialize an entity to a bare python structure that may be
+ directly serialized to e.g. JSON.
+ """
+
+ __regid__ = 'ISerializable'
+ __select__ = is_instance('Any')
+
+ def serialize(self):
+ entity = self.entity
+ entity.complete()
+ data = {
+ 'cw_etype': entity.cw_etype,
+ 'cw_source': entity.cw_metainformation()['source']['uri'],
+ 'eid': entity.eid,
+ }
+ for rschema, __ in entity.e_schema.attribute_definitions():
+ attr = rschema.type
+ try:
+ value = entity.cw_attr_cache[attr]
+ except KeyError:
+ # Bytes
+ continue
+ data[attr] = value
+ return data
+
+
+# error handling adapters ######################################################
+
+
+class IUserFriendlyError(view.EntityAdapter):
+ __regid__ = 'IUserFriendlyError'
+ __abstract__ = True
+
+ def __init__(self, *args, **kwargs):
+ self.exc = kwargs.pop('exc')
+ super(IUserFriendlyError, self).__init__(*args, **kwargs)
+
+
+class IUserFriendlyUniqueTogether(IUserFriendlyError):
+ __select__ = match_exception(UniqueTogetherError)
+
+ def raise_user_exception(self):
+ rtypes = self.exc.rtypes
+ errors = {}
+ msgargs = {}
+ i18nvalues = []
+ for rtype in rtypes:
+ errors[rtype] = _('%(KEY-rtype)s is part of violated unicity constraint')
+ msgargs[rtype + '-rtype'] = rtype
+ i18nvalues.append(rtype + '-rtype')
+ errors[''] = _('some relations violate a unicity constraint')
+ raise ValidationError(self.entity.eid, errors, msgargs=msgargs, i18nvalues=i18nvalues)
+
+
+class IUserFriendlyCheckConstraint(IUserFriendlyError):
+ __select__ = match_exception(ViolatedConstraint)
+
+ def raise_user_exception(self):
+ cstrname = self.exc.cstrname
+ eschema = self.entity.e_schema
+ for rschema, attrschema in eschema.attribute_definitions():
+ rdef = rschema.rdef(eschema, attrschema)
+ for constraint in rdef.constraints:
+ if cstrname == constraint.name_for(rdef):
+ break
+ else:
+ continue
+ break
+ else:
+ assert 0
+ key = rschema.type + '-subject'
+ # use .get since a constraint may be associated to an attribute that isn't edited (e.g.
+ # constraint between two attributes). This should be the purpose of an api rework at some
+ # point, we currently rely on the fact that such constraint will provide a dedicated user
+ # message not relying on the `value` argument
+ value = self.entity.cw_edited.get(rschema.type)
+ msg, args = constraint.failed_message(key, value, self.entity)
+ raise ValidationError(self.entity.eid, {key: msg}, args)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/authobjs.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/authobjs.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,183 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""entity classes user and group entities"""
+
+__docformat__ = "restructuredtext en"
+
+from six import string_types, text_type
+
+from logilab.common.decorators import cached
+
+from cubicweb import Unauthorized
+from cubicweb.entities import AnyEntity, fetch_config
+
+class CWGroup(AnyEntity):
+ __regid__ = 'CWGroup'
+ fetch_attrs, cw_fetch_order = fetch_config(['name'])
+ cw_fetch_unrelated_order = cw_fetch_order
+
+ def dc_long_title(self):
+ name = self.name
+ trname = self._cw._(name)
+ if trname != name:
+ return '%s (%s)' % (name, trname)
+ return name
+
+ @cached
+ def num_users(self):
+ """return the number of users in this group"""
+ return self._cw.execute('Any COUNT(U) WHERE U in_group G, G eid %(g)s',
+ {'g': self.eid})[0][0]
+
+
+class CWUser(AnyEntity):
+ __regid__ = 'CWUser'
+ fetch_attrs, cw_fetch_order = fetch_config(['login', 'firstname', 'surname'])
+ cw_fetch_unrelated_order = cw_fetch_order
+
+ # used by repository to check if the user can log in or not
+ AUTHENTICABLE_STATES = ('activated',)
+
+ # low level utilities #####################################################
+ def __init__(self, *args, **kwargs):
+ groups = kwargs.pop('groups', None)
+ properties = kwargs.pop('properties', None)
+ super(CWUser, self).__init__(*args, **kwargs)
+ if groups is not None:
+ self._groups = groups
+ if properties is not None:
+ self._properties = properties
+
+ @property
+ def groups(self):
+ try:
+ return self._groups
+ except AttributeError:
+ self._groups = set(g.name for g in self.in_group)
+ return self._groups
+
+ @property
+ def properties(self):
+ try:
+ return self._properties
+ except AttributeError:
+ self._properties = dict(
+ self._cw.execute(
+ 'Any K, V WHERE P for_user U, U eid %(userid)s, '
+ 'P pkey K, P value V',
+ {'userid': self.eid}))
+ return self._properties
+
+ def prefered_language(self, language=None):
+ """return language used by this user, if explicitly defined (eg not
+ using http negociation)
+ """
+ language = language or self.property_value('ui.language')
+ vreg = self._cw.vreg
+ try:
+ vreg.config.translations[language]
+ except KeyError:
+ language = vreg.property_value('ui.language')
+ assert language in vreg.config.translations[language], language
+ return language
+
+ def property_value(self, key):
+ try:
+ # properties stored on the user aren't correctly typed
+ # (e.g. all values are unicode string)
+ return self._cw.vreg.typed_value(key, self.properties[key])
+ except KeyError:
+ pass
+ except ValueError:
+ self.warning('incorrect value for eproperty %s of user %s',
+ key, self.login)
+ return self._cw.vreg.property_value(key)
+
+ def set_property(self, pkey, value):
+ value = text_type(value)
+ try:
+ prop = self._cw.execute(
+ 'CWProperty X WHERE X pkey %(k)s, X for_user U, U eid %(u)s',
+ {'k': pkey, 'u': self.eid}).get_entity(0, 0)
+ except Exception:
+ kwargs = dict(pkey=text_type(pkey), value=value)
+ if self.is_in_group('managers'):
+ kwargs['for_user'] = self
+ self._cw.create_entity('CWProperty', **kwargs)
+ else:
+ prop.cw_set(value=value)
+
+ def matching_groups(self, groups):
+ """return the number of the given group(s) in which the user is
+
+ :type groups: str or iterable(str)
+ :param groups: a group name or an iterable on group names
+ """
+ if isinstance(groups, string_types):
+ groups = frozenset((groups,))
+ elif isinstance(groups, (tuple, list)):
+ groups = frozenset(groups)
+ return len(groups & self.groups) # XXX return the resulting set instead of its size
+
+ def is_in_group(self, group):
+ """convience / shortcut method to test if the user belongs to `group`
+ """
+ return group in self.groups
+
+ def is_anonymous(self):
+ """ checks if user is an anonymous user"""
+ # FIXME on the web-side anonymous user is detected according to config['anonymous-user'],
+ # we don't have this info on the server side.
+ return self.groups == frozenset(('guests', ))
+
+ def owns(self, eid):
+ try:
+ return self._cw.execute(
+ 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
+ {'x': eid, 'u': self.eid})
+ except Unauthorized:
+ return False
+ owns = cached(owns, keyarg=1)
+
+ # presentation utilities ##################################################
+
+ def name(self):
+ """construct a name using firstname / surname or login if not defined"""
+
+ if self.firstname and self.surname:
+ return self._cw._('%(firstname)s %(surname)s') % {
+ 'firstname': self.firstname, 'surname': self.surname}
+ if self.firstname:
+ return self.firstname
+ return self.login
+
+ def dc_title(self):
+ return self.login
+
+ dc_long_title = name
+
+ def __call__(self, *args, **kwargs):
+ """ugly hack for compatibility betweeb dbapi and repo api
+
+ In the dbapi, Connection and Session have a ``user`` method to
+ generated a user for a request In the repo api, Connection and Session
+ have a user attribute inherited from SessionRequestBase prototype. This
+ ugly hack allows to not break user of the user method.
+
+ XXX Deprecate me ASAP"""
+ return self
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/lib.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/lib.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,149 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""entity classes for optional library entities"""
+
+__docformat__ = "restructuredtext en"
+from warnings import warn
+from datetime import datetime
+
+from six.moves import range
+from six.moves.urllib.parse import urlsplit, urlunsplit
+
+from logilab.mtconverter import xml_escape
+
+from cubicweb import UnknownProperty
+from cubicweb.entity import _marker
+from cubicweb.entities import AnyEntity, fetch_config
+
+def mangle_email(address):
+ try:
+ name, host = address.split('@', 1)
+ except ValueError:
+ return address
+ return '%s at %s' % (name, host.replace('.', ' dot '))
+
+
+class EmailAddress(AnyEntity):
+ __regid__ = 'EmailAddress'
+ fetch_attrs, cw_fetch_order = fetch_config(['address', 'alias'])
+ rest_attr = 'eid'
+
+ def dc_title(self):
+ if self.alias:
+ return '%s <%s>' % (self.alias, self.display_address())
+ return self.display_address()
+
+ @property
+ def email_of(self):
+ return self.reverse_use_email and self.reverse_use_email[0] or None
+
+ @property
+ def prefered(self):
+ return self.prefered_form and self.prefered_form[0] or self
+
+ def related_emails(self, skipeids=None):
+ # XXX move to eemail
+ # check email relations are in the schema first
+ subjrels = self.e_schema.object_relations()
+ if not ('sender' in subjrels and 'recipients' in subjrels):
+ return
+ rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC '
+ 'WHERE X sender Y or X recipients Y, '
+ 'X subject S, X date D, Y eid %(y)s',
+ {'y': self.eid})
+ if skipeids is None:
+ skipeids = set()
+ for i in range(len(rset)):
+ eid = rset[i][0]
+ if eid in skipeids:
+ continue
+ skipeids.add(eid)
+ yield rset.get_entity(i, 0)
+
+ def display_address(self):
+ if self._cw.vreg.config['mangle-emails']:
+ return mangle_email(self.address)
+ return self.address
+
+ def printable_value(self, attr, value=_marker, attrtype=None,
+ format='text/html'):
+ """overriden to return displayable address when necessary"""
+ if attr == 'address':
+ address = self.display_address()
+ if format == 'text/html':
+ address = xml_escape(address)
+ return address
+ return super(EmailAddress, self).printable_value(attr, value, attrtype, format)
+
+
+class Bookmark(AnyEntity):
+ """customized class for Bookmark entities"""
+ __regid__ = 'Bookmark'
+ fetch_attrs, cw_fetch_order = fetch_config(['title', 'path'])
+
+ def actual_url(self):
+ url = self._cw.build_url(self.path)
+ if self.title:
+ urlparts = list(urlsplit(url))
+ if urlparts[3]:
+ urlparts[3] += '&vtitle=%s' % self._cw.url_quote(self.title)
+ else:
+ urlparts[3] = 'vtitle=%s' % self._cw.url_quote(self.title)
+ url = urlunsplit(urlparts)
+ return url
+
+ def action_url(self):
+ return self.absolute_url() + '/follow'
+
+
+class CWProperty(AnyEntity):
+ __regid__ = 'CWProperty'
+
+ fetch_attrs, cw_fetch_order = fetch_config(['pkey', 'value'])
+ rest_attr = 'pkey'
+
+ def typed_value(self):
+ return self._cw.vreg.typed_value(self.pkey, self.value)
+
+ def dc_description(self, format='text/plain'):
+ try:
+ return self._cw._(self._cw.vreg.property_info(self.pkey)['help'])
+ except UnknownProperty:
+ return u''
+
+
+class CWCache(AnyEntity):
+ """Cache"""
+ __regid__ = 'CWCache'
+ fetch_attrs, cw_fetch_order = fetch_config(['name'])
+
+ def __init__(self, *args, **kwargs):
+ warn('[3.19] CWCache entity type is going away soon. '
+ 'Other caching mechanisms can be used more reliably '
+ 'to the same effect.',
+ DeprecationWarning)
+ super(CWCache, self).__init__(*args, **kwargs)
+
+ def touch(self):
+ self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s',
+ {'t': datetime.now(), 'x': self.eid})
+
+ def valid(self, date):
+ if date:
+ return date > self.timestamp
+ return False
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/schemaobjs.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/schemaobjs.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,178 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""schema definition related entities"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.decorators import cached
+
+from yams.schema import role_name
+
+from cubicweb import ValidationError
+from cubicweb.schema import ERQLExpression, RRQLExpression
+
+from cubicweb.entities import AnyEntity, fetch_config
+
+
+class CWEType(AnyEntity):
+ __regid__ = 'CWEType'
+ fetch_attrs, cw_fetch_order = fetch_config(['name'])
+
+ def dc_title(self):
+ return u'%s (%s)' % (self.name, self._cw._(self.name))
+
+ def dc_long_title(self):
+ stereotypes = []
+ _ = self._cw._
+ if self.final:
+ stereotypes.append(_('final'))
+ if stereotypes:
+ return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes))
+ return self.dc_title()
+
+
+class CWRType(AnyEntity):
+ __regid__ = 'CWRType'
+ fetch_attrs, cw_fetch_order = fetch_config(['name'])
+
+ def dc_title(self):
+ return u'%s (%s)' % (self.name, self._cw._(self.name))
+
+ def dc_long_title(self):
+ stereotypes = []
+ _ = self._cw._
+ if self.symmetric:
+ stereotypes.append(_('symmetric'))
+ if self.inlined:
+ stereotypes.append(_('inlined'))
+ if self.final:
+ stereotypes.append(_('final'))
+ if stereotypes:
+ return u'%s <<%s>>' % (self.dc_title(), ', '.join(stereotypes))
+ return self.dc_title()
+
+ def check_inlined_allowed(self):
+ """check inlining is possible, raise ValidationError if not possible
+ """
+ # don't use the persistent schema, we may miss cardinality changes
+ # in the same transaction
+ for rdef in self.reverse_relation_type:
+ card = rdef.cardinality[0]
+ if not card in '?1':
+ qname = role_name('inlined', 'subject')
+ rtype = self.name
+ stype = rdef.stype
+ otype = rdef.otype
+ msg = self._cw._("can't set inlined=True, "
+ "%(stype)s %(rtype)s %(otype)s "
+ "has cardinality=%(card)s")
+ raise ValidationError(self.eid, {qname: msg % locals()})
+
+
+class CWRelation(AnyEntity):
+ __regid__ = 'CWRelation'
+ fetch_attrs = fetch_config(['cardinality'])[0]
+
+ def dc_title(self):
+ return u'%s %s %s' % (
+ self.from_entity[0].name,
+ self.relation_type[0].name,
+ self.to_entity[0].name)
+
+ def dc_long_title(self):
+ card = self.cardinality
+ scard, ocard = u'', u''
+ if card[0] != '1':
+ scard = '[%s]' % card[0]
+ if card[1] != '1':
+ ocard = '[%s]' % card[1]
+ return u'%s %s%s%s %s' % (
+ self.from_entity[0].name,
+ scard, self.relation_type[0].name, ocard,
+ self.to_entity[0].name)
+
+ @property
+ def rtype(self):
+ return self.relation_type[0]
+
+ @property
+ def stype(self):
+ return self.from_entity[0]
+
+ @property
+ def otype(self):
+ return self.to_entity[0]
+
+ def yams_schema(self):
+ rschema = self._cw.vreg.schema.rschema(self.rtype.name)
+ return rschema.rdefs[(self.stype.name, self.otype.name)]
+
+
+class CWAttribute(CWRelation):
+ __regid__ = 'CWAttribute'
+
+ def dc_long_title(self):
+ card = self.cardinality
+ scard = u''
+ if card[0] == '1':
+ scard = '+'
+ return u'%s %s%s %s' % (
+ self.from_entity[0].name,
+ scard, self.relation_type[0].name,
+ self.to_entity[0].name)
+
+
+class CWConstraint(AnyEntity):
+ __regid__ = 'CWConstraint'
+ fetch_attrs, cw_fetch_order = fetch_config(['value'])
+
+ def dc_title(self):
+ return '%s(%s)' % (self.cstrtype[0].name, self.value or u'')
+
+ @property
+ def type(self):
+ return self.cstrtype[0].name
+
+
+class RQLExpression(AnyEntity):
+ __regid__ = 'RQLExpression'
+ fetch_attrs, cw_fetch_order = fetch_config(['exprtype', 'mainvars', 'expression'])
+
+ def dc_title(self):
+ return self.expression or u''
+
+ def dc_long_title(self):
+ return '%s(%s)' % (self.exprtype, self.expression or u'')
+
+ @property
+ def expression_of(self):
+ for rel in ('read_permission', 'add_permission', 'delete_permission',
+ 'update_permission', 'condition'):
+ values = getattr(self, 'reverse_%s' % rel)
+ if values:
+ return values[0]
+
+ @cached
+ def _rqlexpr(self):
+ if self.exprtype == 'ERQLExpression':
+ return ERQLExpression(self.expression, self.mainvars, self.eid)
+ #if self.exprtype == 'RRQLExpression':
+ return RRQLExpression(self.expression, self.mainvars, self.eid)
+
+ def check_expression(self, *args, **kwargs):
+ return self._rqlexpr().check(*args, **kwargs)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/sources.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/sources.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,184 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""data source related entities"""
+
+__docformat__ = "restructuredtext en"
+
+import re
+from socket import gethostname
+import logging
+
+from logilab.common.textutils import text_to_dict
+from logilab.common.configuration import OptionError
+from logilab.mtconverter import xml_escape
+
+from cubicweb.entities import AnyEntity, fetch_config
+
+class _CWSourceCfgMixIn(object):
+ @property
+ def dictconfig(self):
+ return self.config and text_to_dict(self.config) or {}
+
+ def update_config(self, skip_unknown=False, **config):
+ from cubicweb.server import SOURCE_TYPES
+ from cubicweb.server.serverconfig import (SourceConfiguration,
+ generate_source_config)
+ cfg = self.dictconfig
+ cfg.update(config)
+ options = SOURCE_TYPES[self.type].options
+ sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
+ for opt, val in cfg.items():
+ try:
+ sconfig.set_option(opt, val)
+ except OptionError:
+ if skip_unknown:
+ continue
+ raise
+ cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding)
+ self.cw_set(config=cfgstr)
+
+
+class CWSource(_CWSourceCfgMixIn, AnyEntity):
+ __regid__ = 'CWSource'
+ fetch_attrs, cw_fetch_order = fetch_config(['name', 'type'])
+
+ @property
+ def host_config(self):
+ dictconfig = self.dictconfig
+ host = gethostname()
+ for hostcfg in self.host_configs:
+ if hostcfg.match(host):
+ self.info('matching host config %s for source %s',
+ hostcfg.match_host, self.name)
+ dictconfig.update(hostcfg.dictconfig)
+ return dictconfig
+
+ @property
+ def host_configs(self):
+ return self.reverse_cw_host_config_of
+
+ def init_mapping(self, mapping):
+ for key, options in mapping:
+ if isinstance(key, tuple): # relation definition
+ assert len(key) == 3
+ restrictions = ['X relation_type RT, RT name %(rt)s']
+ kwargs = {'rt': key[1]}
+ if key[0] != '*':
+ restrictions.append('X from_entity FT, FT name %(ft)s')
+ kwargs['ft'] = key[0]
+ if key[2] != '*':
+ restrictions.append('X to_entity TT, TT name %(tt)s')
+ kwargs['tt'] = key[2]
+ rql = 'Any X WHERE %s' % ','.join(restrictions)
+ schemarset = self._cw.execute(rql, kwargs)
+ elif key[0].isupper(): # entity type
+ schemarset = self._cw.execute('CWEType X WHERE X name %(et)s',
+ {'et': key})
+ else: # relation type
+ schemarset = self._cw.execute('CWRType X WHERE X name %(rt)s',
+ {'rt': key})
+ for schemaentity in schemarset.entities():
+ self._cw.create_entity('CWSourceSchemaConfig',
+ cw_for_source=self,
+ cw_schema=schemaentity,
+ options=options)
+
+ @property
+ def repo_source(self):
+ """repository only property, not available from the web side (eg
+ self._cw is expected to be a server session)
+ """
+ return self._cw.repo.sources_by_eid[self.eid]
+
+
+class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity):
+ __regid__ = 'CWSourceHostConfig'
+ fetch_attrs, cw_fetch_order = fetch_config(['match_host', 'config'])
+
+ @property
+ def cwsource(self):
+ return self.cw_host_config_of[0]
+
+ def match(self, hostname):
+ return re.match(self.match_host, hostname)
+
+
+class CWSourceSchemaConfig(AnyEntity):
+ __regid__ = 'CWSourceSchemaConfig'
+ fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options'])
+
+ def dc_title(self):
+ return self._cw._(self.cw_etype) + ' #%s' % self.eid
+
+ @property
+ def schema(self):
+ return self.cw_schema[0]
+
+ @property
+ def cwsource(self):
+ return self.cw_for_source[0]
+
+
+class CWDataImport(AnyEntity):
+ __regid__ = 'CWDataImport'
+ repo_source = _logs = None # please pylint
+
+ def init(self):
+ self._logs = []
+ self.repo_source = self.cwsource.repo_source
+
+ def dc_title(self):
+ return '%s [%s]' % (self.printable_value('start_timestamp'),
+ self.printable_value('status'))
+
+ @property
+ def cwsource(self):
+ return self.cw_import_of[0]
+
+ def record_debug(self, msg, path=None, line=None):
+ self._log(logging.DEBUG, msg, path, line)
+ self.repo_source.debug(msg)
+
+ def record_info(self, msg, path=None, line=None):
+ self._log(logging.INFO, msg, path, line)
+ self.repo_source.info(msg)
+
+ def record_warning(self, msg, path=None, line=None):
+ self._log(logging.WARNING, msg, path, line)
+ self.repo_source.warning(msg)
+
+ def record_error(self, msg, path=None, line=None):
+ self._status = u'failed'
+ self._log(logging.ERROR, msg, path, line)
+ self.repo_source.error(msg)
+
+ def record_fatal(self, msg, path=None, line=None):
+ self._status = u'failed'
+ self._log(logging.FATAL, msg, path, line)
+ self.repo_source.fatal(msg)
+
+ def _log(self, severity, msg, path=None, line=None):
+ encodedmsg = u'%s\t%s\t%s\t%s ' % (severity, path or u'',
+ line or u'', xml_escape(msg))
+ self._logs.append(encodedmsg)
+
+ def write_log(self, session, **kwargs):
+ if 'status' not in kwargs:
+ kwargs['status'] = getattr(self, '_status', u'success')
+ self.cw_set(log=u' '.join(self._logs), **kwargs)
+ self._logs = []
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/test/data/migration/postcreate.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/test/data/migration/postcreate.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,19 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+wf = add_workflow(u'bmk wf', 'Bookmark')
+wf.add_state(u'hop', initial=True)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/test/data/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/test/data/schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,37 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""entities tests schema"""
+
+from yams.buildobjs import EntityType, String, RichString, Int
+from cubicweb.schema import make_workflowable
+
+class Company(EntityType):
+ order = Int()
+ name = String()
+ description = RichString()
+
+class Division(Company):
+ __specializes_schema__ = True
+
+class SubDivision(Division):
+ __specializes_schema__ = True
+
+
+from cubicweb.schemas import bootstrap, Bookmark
+make_workflowable(bootstrap.CWGroup)
+make_workflowable(Bookmark.Bookmark)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/test/unittest_base.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/test/unittest_base.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for cubicweb.entities.base module
+"""
+
+from logilab.common.testlib import unittest_main
+from logilab.common.decorators import clear_cache
+from logilab.common.registry import yes
+
+from cubicweb.devtools.testlib import CubicWebTC
+
+from cubicweb.entities import AnyEntity
+
+
+class BaseEntityTC(CubicWebTC):
+
+ def setup_database(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.membereid = self.create_user(cnx, 'member').eid
+ cnx.commit()
+
+
+class MetadataTC(BaseEntityTC):
+
+ def test_creator(self):
+ with self.new_access('member').repo_cnx() as cnx:
+ entity = cnx.create_entity('Bookmark', title=u"hello", path=u'project/cubicweb')
+ cnx.commit()
+ self.assertEqual(entity.creator.eid, self.membereid)
+ self.assertEqual(entity.dc_creator(), u'member')
+
+ def test_type(self):
+ # dc_type may be translated
+ with self.admin_access.client_cnx() as cnx:
+ member = cnx.entity_from_eid(self.membereid)
+ self.assertEqual(member.dc_type(), 'CWUser')
+
+ def test_cw_etype(self):
+ # cw_etype is never translated
+ with self.admin_access.client_cnx() as cnx:
+ member = cnx.entity_from_eid(self.membereid)
+ self.assertEqual(member.cw_etype, 'CWUser')
+
+ def test_entity_meta_attributes(self):
+ # XXX move to yams
+ self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
+ self.assertEqual(dict((str(k), v)
+ for k, v in self.schema['State'].meta_attributes().items()),
+ {'description_format': ('format', 'description')})
+
+ def test_fti_rql_method(self):
+ class EmailAddress(AnyEntity):
+ __regid__ = 'EmailAddress'
+ __select__ = AnyEntity.__select__ & yes(2)
+
+ @classmethod
+ def cw_fti_index_rql_queries(cls, req):
+ return ['EmailAddress Y']
+
+ with self.admin_access.web_request() as req:
+ req.create_entity('EmailAddress', address=u'foo@bar.com')
+ eclass = self.vreg['etypes'].etype_class('EmailAddress')
+ # deprecated
+ self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, '
+ 'X address ADDRESS, X alias ALIAS'],
+ eclass.cw_fti_index_rql_queries(req))
+
+ self.assertEqual(['Any X, ADDRESS, ALIAS ORDERBY X LIMIT 1000 WHERE X is EmailAddress, '
+ 'X address ADDRESS, X alias ALIAS, X eid > 0'],
+ [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)])
+
+ # test backwards compatibility with custom method
+ with self.temporary_appobjects(EmailAddress):
+ self.vreg['etypes'].clear_caches()
+ eclass = self.vreg['etypes'].etype_class('EmailAddress')
+ self.assertEqual(['EmailAddress Y'],
+ [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)])
+
+
+class EmailAddressTC(BaseEntityTC):
+
+ def test_canonical_form(self):
+ with self.admin_access.repo_cnx() as cnx:
+ email1 = cnx.execute('INSERT EmailAddress X: '
+ 'X address "maarten.ter.huurne@philips.com"').get_entity(0, 0)
+ email2 = cnx.execute('INSERT EmailAddress X: '
+ 'X address "maarten@philips.com"').get_entity(0, 0)
+ email3 = cnx.execute('INSERT EmailAddress X: '
+ 'X address "toto@logilab.fr"').get_entity(0, 0)
+ email1.cw_set(prefered_form=email2)
+ self.assertEqual(email1.prefered.eid, email2.eid)
+ self.assertEqual(email2.prefered.eid, email2.eid)
+ self.assertEqual(email3.prefered.eid, email3.eid)
+
+ def test_mangling(self):
+ query = 'INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"'
+ with self.admin_access.repo_cnx() as cnx:
+ email = cnx.execute(query).get_entity(0, 0)
+ self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com')
+ self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com')
+ self.vreg.config.global_set_option('mangle-emails', True)
+ try:
+ self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com')
+ self.assertEqual(email.printable_value('address'),
+ 'maarten.ter.huurne at philips dot com')
+ email = cnx.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0)
+ self.assertEqual(email.display_address(), 'syt')
+ self.assertEqual(email.printable_value('address'), 'syt')
+ finally:
+ self.vreg.config.global_set_option('mangle-emails', False)
+
+ def test_printable_value_escape(self):
+ with self.admin_access.repo_cnx() as cnx:
+ email = cnx.execute('INSERT EmailAddress X: '
+ 'X address "maarten&ter@philips.com"').get_entity(0, 0)
+ self.assertEqual(email.printable_value('address'),
+ 'maarten&ter@philips.com')
+ self.assertEqual(email.printable_value('address', format='text/plain'),
+ 'maarten&ter@philips.com')
+
+
+class CWUserTC(BaseEntityTC):
+
+ def test_complete(self):
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
+ e.complete()
+
+ def test_matching_groups(self):
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
+ self.assertTrue(e.matching_groups('managers'))
+ self.assertFalse(e.matching_groups('xyz'))
+ self.assertTrue(e.matching_groups(('xyz', 'managers')))
+ self.assertFalse(e.matching_groups(('xyz', 'abcd')))
+
+ def test_dc_title_and_name(self):
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
+ self.assertEqual(e.dc_title(), 'member')
+ self.assertEqual(e.name(), 'member')
+ e.cw_set(firstname=u'bouah')
+ self.assertEqual(e.dc_title(), 'member')
+ self.assertEqual(e.name(), u'bouah')
+ e.cw_set(surname=u'lôt')
+ self.assertEqual(e.dc_title(), 'member')
+ self.assertEqual(e.name(), u'bouah lôt')
+
+ def test_falsey_dc_title(self):
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.create_entity('Company', order=0, name=u'pythonian')
+ cnx.commit()
+ self.assertEqual(u'0', e.dc_title())
+
+ def test_allowed_massmail_keys(self):
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
+ # Bytes/Password attributes should be omitted
+ self.assertEqual(
+ e.cw_adapt_to('IEmailable').allowed_massmail_keys(),
+ set(('surname', 'firstname', 'login', 'last_login_time',
+ 'creation_date', 'modification_date', 'cwuri', 'eid'))
+ )
+
+ def test_cw_instantiate_object_relation(self):
+ """ a weird non regression test """
+ with self.admin_access.repo_cnx() as cnx:
+ e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
+ cnx.create_entity('CWGroup', name=u'logilab', reverse_in_group=e)
+
+
+class HTMLtransformTC(BaseEntityTC):
+
+ def test_sanitized_html(self):
+ with self.admin_access.repo_cnx() as cnx:
+ c = cnx.create_entity('Company', name=u'Babar',
+ description=u"""
+Title
+=====
+
+Elephant management best practices.
+
+.. raw:: html
+
+
+""", description_format=u'text/rest')
+ cnx.commit()
+ c.cw_clear_all_caches()
+ self.assertIn('alert',
+ c.printable_value('description', format='text/plain'))
+ self.assertNotIn('alert',
+ c.printable_value('description', format='text/html'))
+
+
+class SpecializedEntityClassesTC(CubicWebTC):
+
+ def select_eclass(self, etype):
+ # clear selector cache
+ clear_cache(self.vreg['etypes'], 'etype_class')
+ return self.vreg['etypes'].etype_class(etype)
+
+ def test_etype_class_selection_and_specialization(self):
+ # no specific class for Subdivisions, the default one should be selected
+ eclass = self.select_eclass('SubDivision')
+ self.assertTrue(eclass.__autogenerated__)
+ # self.assertEqual(eclass.__bases__, (AnyEntity,))
+ # build class from most generic to most specific and make
+ # sure the most specific is always selected
+ self.vreg._loadedmods[__name__] = {}
+ for etype in ('Company', 'Division', 'SubDivision'):
+ class Foo(AnyEntity):
+ __regid__ = etype
+ self.vreg.register(Foo)
+ eclass = self.select_eclass('SubDivision')
+ self.assertTrue(eclass.__autogenerated__)
+ self.assertFalse(eclass is Foo)
+ if etype == 'SubDivision':
+ self.assertEqual(eclass.__bases__, (Foo,))
+ else:
+ self.assertEqual(eclass.__bases__[0].__bases__, (Foo,))
+ # check Division eclass is still selected for plain Division entities
+ eclass = self.select_eclass('Division')
+ self.assertEqual(eclass.cw_etype, 'Division')
+
+
+class ISerializableTC(CubicWebTC):
+
+ def test_serialization(self):
+ with self.admin_access.repo_cnx() as cnx:
+ entity = cnx.create_entity('CWGroup', name=u'tmp')
+ cnx.commit()
+ serializer = entity.cw_adapt_to('ISerializable')
+ expected = {
+ 'cw_etype': u'CWGroup',
+ 'cw_source': 'system',
+ 'eid': entity.eid,
+ 'cwuri': u'http://testing.fr/cubicweb/%s' % entity.eid,
+ 'creation_date': entity.creation_date,
+ 'modification_date': entity.modification_date,
+ 'name': u'tmp',
+ }
+ self.assertEqual(serializer.serialize(), expected)
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/test/unittest_wfobjs.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/test/unittest_wfobjs.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,705 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+def add_wf(shell, etype, name=None, default=False):
+ if name is None:
+ name = etype
+ return shell.add_workflow(name, etype, default=default,
+ ensure_workflowable=False)
+
+def parse_hist(wfhist):
+ return [(ti.previous_state.name, ti.new_state.name,
+ ti.transition and ti.transition.name, ti.comment)
+ for ti in wfhist]
+
+
+class WorkflowBuildingTC(CubicWebTC):
+
+ def test_wf_construction(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ foo = wf.add_state(u'foo', initial=True)
+ bar = wf.add_state(u'bar')
+ self.assertEqual(wf.state_by_name('bar').eid, bar.eid)
+ self.assertEqual(wf.state_by_name('barrr'), None)
+ baz = wf.add_transition(u'baz', (foo,), bar, ('managers',))
+ self.assertEqual(wf.transition_by_name('baz').eid, baz.eid)
+ self.assertEqual(len(baz.require_group), 1)
+ self.assertEqual(baz.require_group[0].name, 'managers')
+
+ def test_duplicated_state(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ wf.add_state(u'foo', initial=True)
+ shell.commit()
+ with self.assertRaises(ValidationError) as cm:
+ wf.add_state(u'foo')
+ self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint',
+ 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint',
+ '': u'some relations violate a unicity constraint'},
+ cm.exception.errors)
+ shell.rollback()
+ # no pb if not in the same workflow
+ wf2 = add_wf(shell, 'Company')
+ foo = wf2.add_state(u'foo', initial=True)
+ shell.commit()
+ # gnark gnark
+ bar = wf.add_state(u'bar')
+ shell.commit()
+ with self.assertRaises(ValidationError) as cm:
+ bar.cw_set(name=u'foo')
+ shell.rollback()
+ self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint',
+ 'state_of': u'%(KEY-rtype)s is part of violated unicity constraint',
+ '': u'some relations violate a unicity constraint'},
+ cm.exception.errors)
+
+ def test_duplicated_transition(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ foo = wf.add_state(u'foo', initial=True)
+ bar = wf.add_state(u'bar')
+ wf.add_transition(u'baz', (foo,), bar, ('managers',))
+ with self.assertRaises(ValidationError) as cm:
+ wf.add_transition(u'baz', (bar,), foo)
+ self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint',
+ 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint',
+ '': u'some relations violate a unicity constraint'},
+ cm.exception.errors)
+ shell.rollback()
+ # no pb if not in the same workflow
+ wf2 = add_wf(shell, 'Company')
+ foo = wf2.add_state(u'foo', initial=True)
+ bar = wf2.add_state(u'bar')
+ wf2.add_transition(u'baz', (foo,), bar, ('managers',))
+ shell.commit()
+ # gnark gnark
+ biz = wf2.add_transition(u'biz', (bar,), foo)
+ shell.commit()
+ with self.assertRaises(ValidationError) as cm:
+ biz.cw_set(name=u'baz')
+ shell.rollback()
+ self.assertEqual({'name': u'%(KEY-rtype)s is part of violated unicity constraint',
+ 'transition_of': u'%(KEY-rtype)s is part of violated unicity constraint',
+ '': u'some relations violate a unicity constraint'},
+ cm.exception.errors)
+
+
+class WorkflowTC(CubicWebTC):
+
+ def setup_database(self):
+ rschema = self.schema['in_state']
+ for rdef in rschema.rdefs.values():
+ self.assertEqual(rdef.cardinality, '1*')
+ with self.admin_access.client_cnx() as cnx:
+ self.member_eid = self.create_user(cnx, 'member').eid
+ cnx.commit()
+
+ def test_workflow_base(self):
+ with self.admin_access.web_request() as req:
+ e = self.create_user(req, 'toto')
+ iworkflowable = e.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'activated')
+ iworkflowable.change_state('deactivated', u'deactivate 1')
+ req.cnx.commit()
+ iworkflowable.change_state('activated', u'activate 1')
+ req.cnx.commit()
+ iworkflowable.change_state('deactivated', u'deactivate 2')
+ req.cnx.commit()
+ e.cw_clear_relation_cache('wf_info_for', 'object')
+ self.assertEqual([tr.comment for tr in e.reverse_wf_info_for],
+ ['deactivate 1', 'activate 1', 'deactivate 2'])
+ self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2')
+
+ def test_possible_transitions(self):
+ with self.admin_access.web_request() as req:
+ user = req.execute('CWUser X').get_entity(0, 0)
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ trs = list(iworkflowable.possible_transitions())
+ self.assertEqual(len(trs), 1)
+ self.assertEqual(trs[0].name, u'deactivate')
+ self.assertEqual(trs[0].destination(None).name, u'deactivated')
+ # test a std user get no possible transition
+ with self.new_access('member').web_request() as req:
+ # fetch the entity using the new session
+ trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions())
+ self.assertEqual(len(trs), 0)
+
+ def _test_manager_deactivate(self, user):
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ user.cw_clear_relation_cache('in_state', 'subject')
+ self.assertEqual(len(user.in_state), 1)
+ self.assertEqual(iworkflowable.state, 'deactivated')
+ trinfo = iworkflowable.latest_trinfo()
+ self.assertEqual(trinfo.previous_state.name, 'activated')
+ self.assertEqual(trinfo.new_state.name, 'deactivated')
+ self.assertEqual(trinfo.comment, 'deactivate user')
+ self.assertEqual(trinfo.comment_format, 'text/plain')
+ return trinfo
+
+ def test_change_state(self):
+ with self.admin_access.client_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.change_state('deactivated', comment=u'deactivate user')
+ trinfo = self._test_manager_deactivate(user)
+ self.assertEqual(trinfo.transition, None)
+
+ def test_set_in_state_bad_wf(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ s = wf.add_state(u'foo', initial=True)
+ shell.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.security_enabled(write=False):
+ with self.assertRaises(ValidationError) as cm:
+ cnx.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+ {'x': cnx.user.eid, 's': s.eid})
+ self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. "
+ "You may want to set a custom workflow for this entity first."})
+
+ def test_fire_transition(self):
+ with self.admin_access.client_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'deactivated')
+ self._test_manager_deactivate(user)
+ trinfo = self._test_manager_deactivate(user)
+ self.assertEqual(trinfo.transition.name, 'deactivate')
+
+ def test_goback_transition(self):
+ with self.admin_access.web_request() as req:
+ wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
+ asleep = wf.add_state('asleep')
+ wf.add_transition('rest', (wf.state_by_name('activated'),
+ wf.state_by_name('deactivated')),
+ asleep)
+ wf.add_transition('wake up', asleep)
+ user = self.create_user(req, 'stduser')
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ iworkflowable.fire_transition('wake up')
+ req.cnx.commit()
+ self.assertEqual(iworkflowable.state, 'activated')
+ iworkflowable.fire_transition('deactivate')
+ req.cnx.commit()
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ iworkflowable.fire_transition('wake up')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'deactivated')
+
+ # XXX test managers can change state without matching transition
+
+ def _test_stduser_deactivate(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.create_user(cnx, 'tutu')
+ with self.new_access('tutu').web_request() as req:
+ iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('deactivate')
+ self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
+ with self.new_access('member').web_request() as req:
+ iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ req.cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('activate')
+ self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
+
+ def test_fire_transition_owned_by(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+ 'X expression "X owned_by U", T condition X '
+ 'WHERE T name "deactivate"')
+ cnx.commit()
+ self._test_stduser_deactivate()
+
+ def test_fire_transition_has_update_perm(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+ 'X expression "U has_update_permission X", T condition X '
+ 'WHERE T name "deactivate"')
+ cnx.commit()
+ self._test_stduser_deactivate()
+
+ def test_swf_base(self):
+ """subworkflow
+
+ +-----------+ tr1 +-----------+
+ | swfstate1 | ------>| swfstate2 |
+ +-----------+ +-----------+
+ | tr2 +-----------+
+ `------>| swfstate3 |
+ +-----------+
+
+ main workflow
+
+ +--------+ swftr1 +--------+
+ | state1 | -------[swfstate2]->| state2 |
+ +--------+ | +--------+
+ | +--------+
+ `-[swfstate3]-->| state3 |
+ +--------+
+ """
+ # sub-workflow
+ with self.admin_access.shell() as shell:
+ swf = add_wf(shell, 'CWGroup', name='subworkflow')
+ swfstate1 = swf.add_state(u'swfstate1', initial=True)
+ swfstate2 = swf.add_state(u'swfstate2')
+ swfstate3 = swf.add_state(u'swfstate3')
+ tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
+ tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3)
+ # main workflow
+ mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True)
+ state1 = mwf.add_state(u'state1', initial=True)
+ state2 = mwf.add_state(u'state2')
+ state3 = mwf.add_state(u'state3')
+ swftr1 = mwf.add_wftransition(u'swftr1', swf, state1,
+ [(swfstate2, state2), (swfstate3, state3)])
+ swf.cw_clear_all_caches()
+ self.assertEqual(swftr1.destination(None).eid, swfstate1.eid)
+ # workflows built, begin test
+ with self.admin_access.web_request() as req:
+ group = req.create_entity('CWGroup', name=u'grp1')
+ req.cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.current_state.eid, state1.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
+ iworkflowable.fire_transition('swftr1', u'go')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
+ iworkflowable.fire_transition('tr1', u'go')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, state2.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
+ # force back to swfstate1 is impossible since we can't any more find
+ # subworkflow input transition
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.change_state(swfstate1, u'gadget')
+ self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"})
+ req.cnx.rollback()
+ # force back to state1
+ iworkflowable.change_state('state1', u'gadget')
+ iworkflowable.fire_transition('swftr1', u'au')
+ group.cw_clear_all_caches()
+ iworkflowable.fire_transition('tr2', u'chapeau')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, state3.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertListEqual(parse_hist(iworkflowable.workflow_history),
+ [('state1', 'swfstate1', 'swftr1', 'go'),
+ ('swfstate1', 'swfstate2', 'tr1', 'go'),
+ ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'),
+ ('state2', 'state1', None, 'gadget'),
+ ('state1', 'swfstate1', 'swftr1', 'au'),
+ ('swfstate1', 'swfstate3', 'tr2', 'chapeau'),
+ ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'),
+ ])
+
+ def test_swf_exit_consistency(self):
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ swf = add_wf(shell, 'CWGroup', name='subworkflow')
+ swfstate1 = swf.add_state(u'swfstate1', initial=True)
+ swfstate2 = swf.add_state(u'swfstate2')
+ tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
+ # main workflow
+ mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True)
+ state1 = mwf.add_state(u'state1', initial=True)
+ state2 = mwf.add_state(u'state2')
+ state3 = mwf.add_state(u'state3')
+ mwf.add_wftransition(u'swftr1', swf, state1,
+ [(swfstate2, state2), (swfstate2, state3)])
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"})
+
+ def test_swf_fire_in_a_row(self):
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ subwf = add_wf(shell, 'CWGroup', name='subworkflow')
+ xsigning = subwf.add_state('xsigning', initial=True)
+ xaborted = subwf.add_state('xaborted')
+ xsigned = subwf.add_state('xsigned')
+ xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
+ xsign = subwf.add_transition('xsign', (xsigning,), xsigning)
+ xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned,
+ type=u'auto')
+ # main workflow
+ twf = add_wf(shell, 'CWGroup', name='mainwf', default=True)
+ created = twf.add_state(_('created'), initial=True)
+ identified = twf.add_state(_('identified'))
+ released = twf.add_state(_('released'))
+ closed = twf.add_state(_('closed'))
+ twf.add_wftransition(_('identify'), subwf, (created,),
+ [(xsigned, identified), (xaborted, created)])
+ twf.add_wftransition(_('release'), subwf, (identified,),
+ [(xsigned, released), (xaborted, identified)])
+ twf.add_wftransition(_('close'), subwf, (released,),
+ [(xsigned, closed), (xaborted, released)])
+ shell.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ group = cnx.create_entity('CWGroup', name=u'grp1')
+ cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ for trans in ('identify', 'release', 'close'):
+ iworkflowable.fire_transition(trans)
+ cnx.commit()
+
+
+ def test_swf_magic_tr(self):
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ subwf = add_wf(shell, 'CWGroup', name='subworkflow')
+ xsigning = subwf.add_state('xsigning', initial=True)
+ xaborted = subwf.add_state('xaborted')
+ xsigned = subwf.add_state('xsigned')
+ xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
+ xsign = subwf.add_transition('xsign', (xsigning,), xsigned)
+ # main workflow
+ twf = add_wf(shell, 'CWGroup', name='mainwf', default=True)
+ created = twf.add_state(_('created'), initial=True)
+ identified = twf.add_state(_('identified'))
+ released = twf.add_state(_('released'))
+ twf.add_wftransition(_('identify'), subwf, created,
+ [(xaborted, None), (xsigned, identified)])
+ twf.add_wftransition(_('release'), subwf, identified,
+ [(xaborted, None)])
+ shell.commit()
+ with self.admin_access.web_request() as req:
+ group = req.create_entity('CWGroup', name=u'grp1')
+ req.cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ for trans, nextstate in (('identify', 'xsigning'),
+ ('xabort', 'created'),
+ ('identify', 'xsigning'),
+ ('xsign', 'identified'),
+ ('release', 'xsigning'),
+ ('xabort', 'identified')
+ ):
+ iworkflowable.fire_transition(trans)
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, nextstate)
+
+ def test_replace_state(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWGroup', name='groupwf', default=True)
+ s_new = wf.add_state('new', initial=True)
+ s_state1 = wf.add_state('state1')
+ wf.add_transition('tr', (s_new,), s_state1)
+ shell.commit()
+
+ with self.admin_access.repo_cnx() as cnx:
+ group = cnx.create_entity('CWGroup', name=u'grp1')
+ cnx.commit()
+
+ iwf = group.cw_adapt_to('IWorkflowable')
+ iwf.fire_transition('tr')
+ cnx.commit()
+ group.cw_clear_all_caches()
+
+ wf = cnx.entity_from_eid(wf.eid)
+ wf.add_state('state2')
+ with cnx.security_enabled(write=False):
+ wf.replace_state('state1', 'state2')
+ cnx.commit()
+
+ self.assertEqual(iwf.state, 'state2')
+ self.assertEqual(iwf.latest_trinfo().to_state[0].name, 'state2')
+
+
+class CustomWorkflowTC(CubicWebTC):
+
+ def setup_database(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.member_eid = self.create_user(cnx, 'member').eid
+
+ def test_custom_wf_replace_state_no_history(self):
+ """member in inital state with no previous history, state is simply
+ redirected when changing workflow
+ """
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ with self.admin_access.web_request() as req:
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'activated') # no change before commit
+ req.cnx.commit()
+ member.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual(iworkflowable.workflow_history, ())
+
+ def test_custom_wf_replace_state_keep_history(self):
+ """member in inital state with some history, state is redirected and
+ state change is recorded to history
+ """
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ iworkflowable.fire_transition('activate')
+ req.cnx.commit()
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('activated', 'deactivated', 'deactivate', None),
+ ('deactivated', 'activated', 'activate', None),
+ ('activated', 'asleep', None, 'workflow changed to "CWUser"')])
+
+ def test_custom_wf_no_initial_state(self):
+ """try to set a custom workflow which has no initial state"""
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep')
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'})
+
+ def test_custom_wf_bad_etype(self):
+ """try to set a custom workflow which doesn't apply to entity type"""
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"})
+
+ def test_del_custom_wf(self):
+ """member in some state shared by the new workflow, nothing has to be
+ done
+ """
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ req.cnx.commit()
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ shell.commit()
+ with self.admin_access.web_request() as req:
+ req.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
+ req.cnx.commit()
+ member.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
+ self.assertEqual(iworkflowable.state, 'activated')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('activated', 'deactivated', 'deactivate', None),
+ ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'),
+ ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),])
+
+
+class AutoTransitionTC(CubicWebTC):
+
+ def setup_custom_wf(self):
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ asleep = wf.add_state('asleep', initial=True)
+ dead = wf.add_state('dead')
+ wf.add_transition('rest', asleep, asleep)
+ wf.add_transition('sick', asleep, dead, type=u'auto',
+ conditions=({'expr': u'X surname "toto"',
+ 'mainvars': u'X'},))
+ return wf
+
+ def test_auto_transition_fired(self):
+ wf = self.setup_custom_wf()
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member')
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': user.eid})
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
+ ['rest'])
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
+ ['rest'])
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('asleep', 'asleep', 'rest', None)])
+ user.cw_set(surname=u'toto') # fulfill condition
+ req.cnx.commit()
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'dead')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('asleep', 'asleep', 'rest', None),
+ ('asleep', 'asleep', 'rest', None),
+ ('asleep', 'dead', 'sick', None),])
+
+ def test_auto_transition_custom_initial_state_fired(self):
+ wf = self.setup_custom_wf()
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member', surname=u'toto')
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': user.eid})
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'dead')
+
+ def test_auto_transition_initial_state_fired(self):
+ with self.admin_access.web_request() as req:
+ wf = req.execute('Any WF WHERE ET default_workflow WF, '
+ 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0)
+ dead = wf.add_state('dead')
+ wf.add_transition('sick', wf.state_by_name('activated'), dead,
+ type=u'auto', conditions=({'expr': u'X surname "toto"',
+ 'mainvars': u'X'},))
+ req.cnx.commit()
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member', surname=u'toto')
+ req.cnx.commit()
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'dead')
+
+
+class WorkflowHooksTC(CubicWebTC):
+
+ def setUp(self):
+ CubicWebTC.setUp(self)
+ with self.admin_access.web_request() as req:
+ self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
+ self.s_activated = self.wf.state_by_name('activated').eid
+ self.s_deactivated = self.wf.state_by_name('deactivated').eid
+ self.s_dummy = self.wf.add_state(u'dummy').eid
+ self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy)
+ ueid = self.create_user(req, 'stduser', commit=False).eid
+ # test initial state is set
+ rset = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+ {'x' : ueid})
+ self.assertFalse(rset, rset.rows)
+ req.cnx.commit()
+ initialstate = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+ {'x' : ueid})[0][0]
+ self.assertEqual(initialstate, u'activated')
+ # give access to users group on the user's wf transitions
+ # so we can test wf enforcing on euser (managers don't have anymore this
+ # enforcement
+ req.execute('SET X require_group G '
+ 'WHERE G name "users", X transition_of WF, WF eid %(wf)s',
+ {'wf': self.wf.eid})
+ req.cnx.commit()
+
+ # XXX currently, we've to rely on hooks to set initial state, or to use execute
+ # def test_initial_state(self):
+ # cnx = self.login('stduser')
+ # cu = cnx.cursor()
+ # self.assertRaises(ValidationError, cu.execute,
+ # 'INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, '
+ # 'X in_state S WHERE S name "deactivated"', {'pwd': 'oops'})
+ # cnx.close()
+ # # though managers can do whatever he want
+ # self.execute('INSERT CWUser X: X login "badaboum", X upassword %(pwd)s, '
+ # 'X in_state S, X in_group G WHERE S name "deactivated", G name "users"', {'pwd': 'oops'})
+ # self.commit()
+
+ # test that the workflow is correctly enforced
+
+ def _cleanup_msg(self, msg):
+ """remove the variable part of one specific error message"""
+ lmsg = msg.split()
+ lmsg.pop(1)
+ lmsg.pop()
+ return ' '.join(lmsg)
+
+ def test_transition_checking1(self):
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('activate')
+ self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
+ u"transition isn't allowed from")
+
+ def test_transition_checking2(self):
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('dummy')
+ self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
+ u"transition isn't allowed from")
+
+ def test_transition_checking3(self):
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('deactivate')
+ self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
+ u"transition isn't allowed from")
+ cnx.rollback()
+ # get back now
+ iworkflowable.fire_transition('activate')
+ cnx.commit()
+
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entities/wfobjs.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entities/wfobjs.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,589 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""workflow handling:
+
+* entity types defining workflow (Workflow, State, Transition...)
+* workflow history (TrInfo)
+* adapter for workflowable entities (IWorkflowableAdapter)
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+from six import text_type, string_types
+
+from logilab.common.decorators import cached, clear_cache
+from logilab.common.deprecation import deprecated
+
+from cubicweb.entities import AnyEntity, fetch_config
+from cubicweb.view import EntityAdapter
+from cubicweb.predicates import relation_possible
+
+
+try:
+ from cubicweb import server
+except ImportError:
+ # We need to lookup DEBUG from there,
+ # however a pure dbapi client may not have it.
+ class server(object): pass
+ server.DEBUG = False
+
+
+class WorkflowException(Exception): pass
+
+class Workflow(AnyEntity):
+ __regid__ = 'Workflow'
+
+ @property
+ def initial(self):
+ """return the initial state for this workflow"""
+ return self.initial_state and self.initial_state[0] or None
+
+ def is_default_workflow_of(self, etype):
+ """return True if this workflow is the default workflow for the given
+ entity type
+ """
+ return any(et for et in self.reverse_default_workflow
+ if et.name == etype)
+
+ def iter_workflows(self, _done=None):
+ """return an iterator on actual workflows, eg this workflow and its
+ subworkflows
+ """
+ # infinite loop safety belt
+ if _done is None:
+ _done = set()
+ yield self
+ _done.add(self.eid)
+ for tr in self._cw.execute('Any T WHERE T is WorkflowTransition, '
+ 'T transition_of WF, WF eid %(wf)s',
+ {'wf': self.eid}).entities():
+ if tr.subwf.eid in _done:
+ continue
+ for subwf in tr.subwf.iter_workflows(_done):
+ yield subwf
+
+ # state / transitions accessors ############################################
+
+ def state_by_name(self, statename):
+ rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, '
+ 'S state_of WF, WF eid %(wf)s',
+ {'n': statename, 'wf': self.eid})
+ if rset:
+ return rset.get_entity(0, 0)
+ return None
+
+ def state_by_eid(self, eid):
+ rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, '
+ 'S state_of WF, WF eid %(wf)s',
+ {'s': eid, 'wf': self.eid})
+ if rset:
+ return rset.get_entity(0, 0)
+ return None
+
+ def transition_by_name(self, trname):
+ rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, '
+ 'T transition_of WF, WF eid %(wf)s',
+ {'n': text_type(trname), 'wf': self.eid})
+ if rset:
+ return rset.get_entity(0, 0)
+ return None
+
+ def transition_by_eid(self, eid):
+ rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, '
+ 'T transition_of WF, WF eid %(wf)s',
+ {'t': eid, 'wf': self.eid})
+ if rset:
+ return rset.get_entity(0, 0)
+ return None
+
+ # wf construction methods ##################################################
+
+ def add_state(self, name, initial=False, **kwargs):
+ """add a state to this workflow"""
+ state = self._cw.create_entity('State', name=text_type(name), **kwargs)
+ self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s',
+ {'s': state.eid, 'wf': self.eid})
+ if initial:
+ assert not self.initial, "Initial state already defined as %s" % self.initial
+ self._cw.execute('SET WF initial_state S '
+ 'WHERE S eid %(s)s, WF eid %(wf)s',
+ {'s': state.eid, 'wf': self.eid})
+ return state
+
+ def _add_transition(self, trtype, name, fromstates,
+ requiredgroups=(), conditions=(), **kwargs):
+ tr = self._cw.create_entity(trtype, name=text_type(name), **kwargs)
+ self._cw.execute('SET T transition_of WF '
+ 'WHERE T eid %(t)s, WF eid %(wf)s',
+ {'t': tr.eid, 'wf': self.eid})
+ assert fromstates, fromstates
+ if not isinstance(fromstates, (tuple, list)):
+ fromstates = (fromstates,)
+ for state in fromstates:
+ if hasattr(state, 'eid'):
+ state = state.eid
+ self._cw.execute('SET S allowed_transition T '
+ 'WHERE S eid %(s)s, T eid %(t)s',
+ {'s': state, 't': tr.eid})
+ tr.set_permissions(requiredgroups, conditions, reset=False)
+ return tr
+
+ def add_transition(self, name, fromstates, tostate=None,
+ requiredgroups=(), conditions=(), **kwargs):
+ """add a transition to this workflow from some state(s) to another"""
+ tr = self._add_transition('Transition', name, fromstates,
+ requiredgroups, conditions, **kwargs)
+ if tostate is not None:
+ if hasattr(tostate, 'eid'):
+ tostate = tostate.eid
+ self._cw.execute('SET T destination_state S '
+ 'WHERE S eid %(s)s, T eid %(t)s',
+ {'t': tr.eid, 's': tostate})
+ return tr
+
+ def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(),
+ requiredgroups=(), conditions=(), **kwargs):
+ """add a workflow transition to this workflow"""
+ tr = self._add_transition('WorkflowTransition', name, fromstates,
+ requiredgroups, conditions, **kwargs)
+ if hasattr(subworkflow, 'eid'):
+ subworkflow = subworkflow.eid
+ assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s',
+ {'t': tr.eid, 'wf': subworkflow})
+ for fromstate, tostate in exitpoints:
+ tr.add_exit_point(fromstate, tostate)
+ return tr
+
+ def replace_state(self, todelstate, replacement):
+ """migration convenience method"""
+ if not hasattr(todelstate, 'eid'):
+ todelstate = self.state_by_name(todelstate)
+ if not hasattr(replacement, 'eid'):
+ replacement = self.state_by_name(replacement)
+ args = {'os': todelstate.eid, 'ns': replacement.eid}
+ execute = self._cw.execute
+ execute('SET X in_state NS WHERE X in_state OS, '
+ 'NS eid %(ns)s, OS eid %(os)s', args)
+ execute('SET X from_state NS WHERE X from_state OS, '
+ 'OS eid %(os)s, NS eid %(ns)s', args)
+ execute('SET X to_state NS WHERE X to_state OS, '
+ 'OS eid %(os)s, NS eid %(ns)s', args)
+ todelstate.cw_delete()
+
+
+class BaseTransition(AnyEntity):
+ """customized class for abstract transition
+
+ provides a specific may_be_fired method to check if the relation may be
+ fired by the logged user
+ """
+ __regid__ = 'BaseTransition'
+ fetch_attrs, cw_fetch_order = fetch_config(['name', 'type'])
+
+ def __init__(self, *args, **kwargs):
+ if self.cw_etype == 'BaseTransition':
+ raise WorkflowException('should not be instantiated')
+ super(BaseTransition, self).__init__(*args, **kwargs)
+
+ @property
+ def workflow(self):
+ return self.transition_of[0]
+
+ def has_input_state(self, state):
+ if hasattr(state, 'eid'):
+ state = state.eid
+ return any(s for s in self.reverse_allowed_transition if s.eid == state)
+
+ def may_be_fired(self, eid):
+ """return true if the logged user may fire this transition
+
+ `eid` is the eid of the object on which we may fire the transition
+ """
+ DBG = False
+ if server.DEBUG & server.DBG_SEC:
+ if 'transition' in server._SECURITY_CAPS:
+ DBG = True
+ user = self._cw.user
+ # check user is at least in one of the required groups if any
+ groups = frozenset(g.name for g in self.require_group)
+ if groups:
+ matches = user.matching_groups(groups)
+ if matches:
+ if DBG:
+ print('may_be_fired: %r may fire: user matches %s' % (self.name, groups))
+ return matches
+ if 'owners' in groups and user.owns(eid):
+ if DBG:
+ print('may_be_fired: %r may fire: user is owner' % self.name)
+ return True
+ # check one of the rql expression conditions matches if any
+ if self.condition:
+ if DBG:
+ print('my_be_fired: %r: %s' %
+ (self.name, [(rqlexpr.expression,
+ rqlexpr.check_expression(self._cw, eid))
+ for rqlexpr in self.condition]))
+ for rqlexpr in self.condition:
+ if rqlexpr.check_expression(self._cw, eid):
+ return True
+ if self.condition or groups:
+ return False
+ return True
+
+ def set_permissions(self, requiredgroups=(), conditions=(), reset=True):
+ """set or add (if `reset` is False) groups and conditions for this
+ transition
+ """
+ if reset:
+ self._cw.execute('DELETE T require_group G WHERE T eid %(x)s',
+ {'x': self.eid})
+ self._cw.execute('DELETE T condition R WHERE T eid %(x)s',
+ {'x': self.eid})
+ for gname in requiredgroups:
+ rset = self._cw.execute('SET T require_group G '
+ 'WHERE T eid %(x)s, G name %(gn)s',
+ {'x': self.eid, 'gn': text_type(gname)})
+ assert rset, '%s is not a known group' % gname
+ if isinstance(conditions, string_types):
+ conditions = (conditions,)
+ for expr in conditions:
+ if isinstance(expr, string_types):
+ kwargs = {'expr': text_type(expr)}
+ else:
+ assert isinstance(expr, dict)
+ kwargs = expr
+ kwargs['x'] = self.eid
+ kwargs.setdefault('mainvars', u'X')
+ self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+ 'X expression %(expr)s, X mainvars %(mainvars)s, '
+ 'T condition X WHERE T eid %(x)s', kwargs)
+ # XXX clear caches?
+
+
+class Transition(BaseTransition):
+ """customized class for Transition entities"""
+ __regid__ = 'Transition'
+
+ def dc_long_title(self):
+ return '%s (%s)' % (self.name, self._cw._(self.name))
+
+ def destination(self, entity):
+ try:
+ return self.destination_state[0]
+ except IndexError:
+ return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state
+
+ def potential_destinations(self):
+ try:
+ yield self.destination_state[0]
+ except IndexError:
+ for incomingstate in self.reverse_allowed_transition:
+ for tr in incomingstate.reverse_destination_state:
+ for previousstate in tr.reverse_allowed_transition:
+ yield previousstate
+
+
+class WorkflowTransition(BaseTransition):
+ """customized class for WorkflowTransition entities"""
+ __regid__ = 'WorkflowTransition'
+
+ @property
+ def subwf(self):
+ return self.subworkflow[0]
+
+ def destination(self, entity):
+ return self.subwf.initial
+
+ def potential_destinations(self):
+ yield self.subwf.initial
+
+ def add_exit_point(self, fromstate, tostate):
+ if hasattr(fromstate, 'eid'):
+ fromstate = fromstate.eid
+ if tostate is None:
+ self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
+ 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s',
+ {'t': self.eid, 'fs': fromstate})
+ else:
+ if hasattr(tostate, 'eid'):
+ tostate = tostate.eid
+ self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
+ 'X subworkflow_state FS, X destination_state TS '
+ 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s',
+ {'t': self.eid, 'fs': fromstate, 'ts': tostate})
+
+ def get_exit_point(self, entity, stateeid):
+ """if state is an exit point, return its associated destination state"""
+ if hasattr(stateeid, 'eid'):
+ stateeid = stateeid.eid
+ try:
+ tostateeid = self.exit_points()[stateeid]
+ except KeyError:
+ return None
+ if tostateeid is None:
+ # go back to state from which we've entered the subworkflow
+ return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state
+ return self._cw.entity_from_eid(tostateeid)
+
+ @cached
+ def exit_points(self):
+ result = {}
+ for ep in self.subworkflow_exit:
+ result[ep.subwf_state.eid] = ep.destination and ep.destination.eid
+ return result
+
+ def cw_clear_all_caches(self):
+ super(WorkflowTransition, self).cw_clear_all_caches()
+ clear_cache(self, 'exit_points')
+
+
+class SubWorkflowExitPoint(AnyEntity):
+ """customized class for SubWorkflowExitPoint entities"""
+ __regid__ = 'SubWorkflowExitPoint'
+
+ @property
+ def subwf_state(self):
+ return self.subworkflow_state[0]
+
+ @property
+ def destination(self):
+ return self.destination_state and self.destination_state[0] or None
+
+
+class State(AnyEntity):
+ """customized class for State entities"""
+ __regid__ = 'State'
+ fetch_attrs, cw_fetch_order = fetch_config(['name'])
+ rest_attr = 'eid'
+
+ def dc_long_title(self):
+ return '%s (%s)' % (self.name, self._cw._(self.name))
+
+ @property
+ def workflow(self):
+ # take care, may be missing in multi-sources configuration
+ return self.state_of and self.state_of[0] or None
+
+
+class TrInfo(AnyEntity):
+ """customized class for Transition information entities
+ """
+ __regid__ = 'TrInfo'
+ fetch_attrs, cw_fetch_order = fetch_config(['creation_date', 'comment'],
+ pclass=None) # don't want modification_date
+ @property
+ def for_entity(self):
+ return self.wf_info_for[0]
+
+ @property
+ def previous_state(self):
+ return self.from_state[0]
+
+ @property
+ def new_state(self):
+ return self.to_state[0]
+
+ @property
+ def transition(self):
+ return self.by_transition and self.by_transition[0] or None
+
+
+
+class IWorkflowableAdapter(EntityAdapter):
+ """base adapter providing workflow helper methods for workflowable entities.
+ """
+ __regid__ = 'IWorkflowable'
+ __select__ = relation_possible('in_state')
+
+ @cached
+ def cwetype_workflow(self):
+ """return the default workflow for entities of this type"""
+ # XXX CWEType method
+ wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, '
+ 'ET name %(et)s', {'et': text_type(self.entity.cw_etype)})
+ if wfrset:
+ return wfrset.get_entity(0, 0)
+ self.warning("can't find any workflow for %s", self.entity.cw_etype)
+ return None
+
+ @property
+ def main_workflow(self):
+ """return current workflow applied to this entity"""
+ if self.entity.custom_workflow:
+ return self.entity.custom_workflow[0]
+ return self.cwetype_workflow()
+
+ @property
+ def current_workflow(self):
+ """return current workflow applied to this entity"""
+ return self.current_state and self.current_state.workflow or self.main_workflow
+
+ @property
+ def current_state(self):
+ """return current state entity"""
+ return self.entity.in_state and self.entity.in_state[0] or None
+
+ @property
+ def state(self):
+ """return current state name"""
+ try:
+ return self.current_state.name
+ except AttributeError:
+ self.warning('entity %s has no state', self.entity)
+ return None
+
+ @property
+ def printable_state(self):
+ """return current state name translated to context's language"""
+ state = self.current_state
+ if state:
+ return self._cw._(state.name)
+ return u''
+
+ @property
+ def workflow_history(self):
+ """return the workflow history for this entity (eg ordered list of
+ TrInfo entities)
+ """
+ return self.entity.reverse_wf_info_for
+
+ def latest_trinfo(self):
+ """return the latest transition information for this entity"""
+ try:
+ return self.workflow_history[-1]
+ except IndexError:
+ return None
+
+ def possible_transitions(self, type='normal'):
+ """generates transition that MAY be fired for the given entity,
+ expected to be in this state
+ used only by the UI
+ """
+ if self.current_state is None or self.current_workflow is None:
+ return
+ rset = self._cw.execute(
+ 'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, '
+ 'T type TT, T type %(type)s, '
+ 'T name TN, T transition_of WF, WF eid %(wfeid)s',
+ {'x': self.current_state.eid, 'type': text_type(type),
+ 'wfeid': self.current_workflow.eid})
+ for tr in rset.entities():
+ if tr.may_be_fired(self.entity.eid):
+ yield tr
+
+ def subworkflow_input_trinfo(self):
+ """return the TrInfo which has be recorded when this entity went into
+ the current sub-workflow
+ """
+ if self.main_workflow.eid == self.current_workflow.eid:
+ return # doesn't make sense
+ subwfentries = []
+ for trinfo in self.workflow_history:
+ if (trinfo.transition and
+ trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid):
+ # entering or leaving a subworkflow
+ if (subwfentries and
+ subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and
+ subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid):
+ # leave
+ del subwfentries[-1]
+ else:
+ # enter
+ subwfentries.append(trinfo)
+ if not subwfentries:
+ return None
+ return subwfentries[-1]
+
+ def subworkflow_input_transition(self):
+ """return the transition which has went through the current sub-workflow
+ """
+ return getattr(self.subworkflow_input_trinfo(), 'transition', None)
+
+ def _add_trinfo(self, comment, commentformat, treid=None, tseid=None):
+ kwargs = {}
+ if comment is not None:
+ kwargs['comment'] = comment
+ if commentformat is not None:
+ kwargs['comment_format'] = commentformat
+ kwargs['wf_info_for'] = self.entity
+ if treid is not None:
+ kwargs['by_transition'] = self._cw.entity_from_eid(treid)
+ if tseid is not None:
+ kwargs['to_state'] = self._cw.entity_from_eid(tseid)
+ return self._cw.create_entity('TrInfo', **kwargs)
+
+ def _get_transition(self, tr):
+ assert self.current_workflow
+ if isinstance(tr, string_types):
+ _tr = self.current_workflow.transition_by_name(tr)
+ assert _tr is not None, 'not a %s transition: %s' % (
+ self.__regid__, tr)
+ tr = _tr
+ return tr
+
+ def fire_transition(self, tr, comment=None, commentformat=None):
+ """change the entity's state by firing given transition (name or entity)
+ in entity's workflow
+ """
+ tr = self._get_transition(tr)
+ return self._add_trinfo(comment, commentformat, tr.eid)
+
+ def fire_transition_if_possible(self, tr, comment=None, commentformat=None):
+ """change the entity's state by firing given transition (name or entity)
+ in entity's workflow if this transition is possible
+ """
+ tr = self._get_transition(tr)
+ if any(tr_ for tr_ in self.possible_transitions()
+ if tr_.eid == tr.eid):
+ self.fire_transition(tr, comment, commentformat)
+
+ def change_state(self, statename, comment=None, commentformat=None, tr=None):
+ """change the entity's state to the given state (name or entity) in
+ entity's workflow. This method should only by used by manager to fix an
+ entity's state when their is no matching transition, otherwise
+ fire_transition should be used.
+ """
+ assert self.current_workflow
+ if hasattr(statename, 'eid'):
+ stateeid = statename.eid
+ else:
+ state = self.current_workflow.state_by_name(statename)
+ if state is None:
+ raise WorkflowException('not a %s state: %s' % (self.__regid__,
+ statename))
+ stateeid = state.eid
+ # XXX try to find matching transition?
+ return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid)
+
+ def set_initial_state(self, statename):
+ """set a newly created entity's state to the given state (name or entity)
+ in entity's workflow. This is useful if you don't want it to be the
+ workflow's initial state.
+ """
+ assert self.current_workflow
+ if hasattr(statename, 'eid'):
+ stateeid = statename.eid
+ else:
+ state = self.current_workflow.state_by_name(statename)
+ if state is None:
+ raise WorkflowException('not a %s state: %s' % (self.__regid__,
+ statename))
+ stateeid = state.eid
+ self._cw.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+ {'x': self.entity.eid, 's': stateeid})
diff -r 1400aee10df4 -r faf279e33298 cubicweb/entity.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/entity.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1426 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Base class for entity objects manipulated in clients"""
+
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from six import text_type, string_types, integer_types
+from six.moves import range
+
+from logilab.common.decorators import cached
+from logilab.common.deprecation import deprecated
+from logilab.common.registry import yes
+from logilab.mtconverter import TransformData, xml_escape
+
+from rql.utils import rqlvar_maker
+from rql.stmts import Select
+from rql.nodes import (Not, VariableRef, Constant, make_relation,
+ Relation as RqlRelation)
+
+from cubicweb import Unauthorized, neg_role
+from cubicweb.utils import support_args
+from cubicweb.rset import ResultSet
+from cubicweb.appobject import AppObject
+from cubicweb.schema import (RQLVocabularyConstraint, RQLConstraint,
+ GeneratedConstraint)
+from cubicweb.rqlrewrite import RQLRewriter
+
+from cubicweb.uilib import soup2xhtml
+from cubicweb.mttransforms import ENGINE
+
+_marker = object()
+
+def greater_card(rschema, subjtypes, objtypes, index):
+ for subjtype in subjtypes:
+ for objtype in objtypes:
+ card = rschema.rdef(subjtype, objtype).cardinality[index]
+ if card in '+*':
+ return card
+ return '1'
+
+def can_use_rest_path(value):
+ """return True if value can be used at the end of a Rest URL path"""
+ if value is None:
+ return False
+ value = text_type(value)
+ # the check for ?, /, & are to prevent problems when running
+ # behind Apache mod_proxy
+ if value == u'' or u'?' in value or u'/' in value or u'&' in value:
+ return False
+ return True
+
+def rel_vars(rel):
+ return ((isinstance(rel.children[0], VariableRef)
+ and rel.children[0].variable or None),
+ (isinstance(rel.children[1].children[0], VariableRef)
+ and rel.children[1].children[0].variable or None)
+ )
+
+def rel_matches(rel, rtype, role, varname, operator='='):
+ if rel.r_type == rtype and rel.children[1].operator == operator:
+ same_role_var_idx = 0 if role == 'subject' else 1
+ variables = rel_vars(rel)
+ if variables[same_role_var_idx].name == varname:
+ return variables[1 - same_role_var_idx]
+
+def build_cstr_with_linkto_infos(cstr, args, searchedvar, evar,
+ lt_infos, eidvars):
+ """restrict vocabulary as much as possible in entity creation,
+ based on infos provided by __linkto form param.
+
+ Example based on following schema:
+
+ class works_in(RelationDefinition):
+ subject = 'CWUser'
+ object = 'Lab'
+ cardinality = '1*'
+ constraints = [RQLConstraint('S in_group G, O welcomes G')]
+
+ class welcomes(RelationDefinition):
+ subject = 'Lab'
+ object = 'CWGroup'
+
+ If you create a CWUser in the "scientists" CWGroup you can show
+ only the labs that welcome them using :
+
+ lt_infos = {('in_group', 'subject'): 321}
+
+ You get following restriction : 'O welcomes G, G eid 321'
+
+ """
+ st = cstr.snippet_rqlst.copy()
+ # replace relations in ST by eid infos from linkto where possible
+ for (info_rtype, info_role), eids in lt_infos.items():
+ eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid
+ for rel in st.iget_nodes(RqlRelation):
+ targetvar = rel_matches(rel, info_rtype, info_role, evar.name)
+ if targetvar is not None:
+ if targetvar.name in eidvars:
+ rel.parent.remove(rel)
+ else:
+ eidrel = make_relation(
+ targetvar, 'eid', (targetvar.name, 'Substitute'),
+ Constant)
+ rel.parent.replace(rel, eidrel)
+ args[targetvar.name] = eid
+ eidvars.add(targetvar.name)
+ # if modified ST still contains evar references we must discard the
+ # constraint, otherwise evar is unknown in the final rql query which can
+ # lead to a SQL table cartesian product and multiple occurences of solutions
+ evarname = evar.name
+ for rel in st.iget_nodes(RqlRelation):
+ for variable in rel_vars(rel):
+ if variable and evarname == variable.name:
+ return
+ # else insert snippets into the global tree
+ return GeneratedConstraint(st, cstr.mainvars - set(evarname))
+
+def pruned_lt_info(eschema, lt_infos):
+ pruned = {}
+ for (lt_rtype, lt_role), eids in lt_infos.items():
+ # we can only use lt_infos describing relation with a cardinality
+ # of value 1 towards the linked entity
+ if not len(eids) == 1:
+ continue
+ lt_card = eschema.rdef(lt_rtype, lt_role).cardinality[
+ 0 if lt_role == 'subject' else 1]
+ if lt_card not in '?1':
+ continue
+ pruned[(lt_rtype, lt_role)] = eids
+ return pruned
+
+
+class Entity(AppObject):
+ """an entity instance has e_schema automagically set on
+ the class and instances has access to their issuing cursor.
+
+ A property is set for each attribute and relation on each entity's type
+ class. Becare that among attributes, 'eid' is *NEITHER* stored in the
+ dict containment (which acts as a cache for other attributes dynamically
+ fetched)
+
+ :type e_schema: `cubicweb.schema.EntitySchema`
+ :ivar e_schema: the entity's schema
+
+ :type rest_attr: str
+ :cvar rest_attr: indicates which attribute should be used to build REST urls
+ If `None` is specified (the default), the first unique attribute will
+ be used ('eid' if none found)
+
+ :type cw_skip_copy_for: list
+ :cvar cw_skip_copy_for: a list of couples (rtype, role) for each relation
+ that should be skipped when copying this kind of entity. Note that some
+ relations such as composite relations or relations that have '?1' as
+ object cardinality are always skipped.
+ """
+ __registry__ = 'etypes'
+ __select__ = yes()
+
+ # class attributes that must be set in class definition
+ rest_attr = None
+ fetch_attrs = None
+ skip_copy_for = () # bw compat (< 3.14), use cw_skip_copy_for instead
+ cw_skip_copy_for = [('in_state', 'subject')]
+ # class attributes set automatically at registration time
+ e_schema = None
+
+ @classmethod
+ def __initialize__(cls, schema):
+ """initialize a specific entity class by adding descriptors to access
+ entity type's attributes and relations
+ """
+ etype = cls.__regid__
+ assert etype != 'Any', etype
+ cls.e_schema = eschema = schema.eschema(etype)
+ for rschema, _ in eschema.attribute_definitions():
+ if rschema.type == 'eid':
+ continue
+ setattr(cls, rschema.type, Attribute(rschema.type))
+ mixins = []
+ for rschema, _, role in eschema.relation_definitions():
+ if role == 'subject':
+ attr = rschema.type
+ else:
+ attr = 'reverse_%s' % rschema.type
+ setattr(cls, attr, Relation(rschema, role))
+
+ fetch_attrs = ('modification_date',)
+
+ @classmethod
+ def cw_fetch_order(cls, select, attr, var):
+ """This class method may be used to control sort order when multiple
+ entities of this type are fetched through ORM methods. Its arguments
+ are:
+
+ * `select`, the RQL syntax tree
+
+ * `attr`, the attribute being watched
+
+ * `var`, the variable through which this attribute's value may be
+ accessed in the query
+
+ When you want to do some sorting on the given attribute, you should
+ modify the syntax tree accordingly. For instance:
+
+ .. sourcecode:: python
+
+ from rql import nodes
+
+ class Version(AnyEntity):
+ __regid__ = 'Version'
+
+ fetch_attrs = ('num', 'description', 'in_state')
+
+ @classmethod
+ def cw_fetch_order(cls, select, attr, var):
+ if attr == 'num':
+ func = nodes.Function('version_sort_value')
+ func.append(nodes.variable_ref(var))
+ sterm = nodes.SortTerm(func, asc=False)
+ select.add_sort_term(sterm)
+
+ The default implementation call
+ :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order`
+ """
+ cls.cw_fetch_unrelated_order(select, attr, var)
+
+ @classmethod
+ def cw_fetch_unrelated_order(cls, select, attr, var):
+ """This class method may be used to control sort order when multiple entities of
+ this type are fetched to use in edition (e.g. propose them to create a
+ new relation on an edited entity).
+
+ See :meth:`~cubicweb.entity.Entity.cw_fetch_unrelated_order` for a
+ description of its arguments and usage.
+
+ By default entities will be listed on their modification date descending,
+ i.e. you'll get entities recently modified first.
+ """
+ if attr == 'modification_date':
+ select.add_sort_var(var, asc=False)
+
+ @classmethod
+ def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X',
+ settype=True, ordermethod='fetch_order'):
+ st = cls.fetch_rqlst(user, mainvar=mainvar, fetchattrs=fetchattrs,
+ settype=settype, ordermethod=ordermethod)
+ rql = st.as_string()
+ if restriction:
+ # cannot use RQLRewriter API to insert 'X rtype %(x)s' restriction
+ warn('[3.14] fetch_rql: use of `restriction` parameter is '
+ 'deprecated, please use fetch_rqlst and supply a syntax'
+ 'tree with your restriction instead', DeprecationWarning)
+ insert = ' WHERE ' + ','.join(restriction)
+ if ' WHERE ' in rql:
+ select, where = rql.split(' WHERE ', 1)
+ rql = select + insert + ',' + where
+ else:
+ rql += insert
+ return rql
+
+ @classmethod
+ def fetch_rqlst(cls, user, select=None, mainvar='X', fetchattrs=None,
+ settype=True, ordermethod='fetch_order'):
+ if select is None:
+ select = Select()
+ mainvar = select.get_variable(mainvar)
+ select.add_selected(mainvar)
+ elif isinstance(mainvar, string_types):
+ assert mainvar in select.defined_vars
+ mainvar = select.get_variable(mainvar)
+ # eases string -> syntax tree test transition: please remove once stable
+ select._varmaker = rqlvar_maker(defined=select.defined_vars,
+ aliases=select.aliases, index=26)
+ if settype:
+ rel = select.add_type_restriction(mainvar, cls.__regid__)
+ # should use 'is_instance_of' instead of 'is' so we retrieve
+ # subclasses instances as well
+ rel.r_type = 'is_instance_of'
+ if fetchattrs is None:
+ fetchattrs = cls.fetch_attrs
+ cls._fetch_restrictions(mainvar, select, fetchattrs, user, ordermethod)
+ return select
+
+ @classmethod
+ def _fetch_ambiguous_rtypes(cls, select, var, fetchattrs, subjtypes, schema):
+ """find rtypes in `fetchattrs` that relate different subject etypes
+ taken from (`subjtypes`) to different target etypes; these so called
+ "ambiguous" relations, are added directly to the `select` syntax tree
+ selection but removed from `fetchattrs` to avoid the fetch recursion
+ because we have to choose only one targettype for the recursion and
+ adding its own fetch attrs to the selection -when we recurse- would
+ filter out the other possible target types from the result set
+ """
+ for attr in fetchattrs.copy():
+ rschema = schema.rschema(attr)
+ if rschema.final:
+ continue
+ ttypes = None
+ for subjtype in subjtypes:
+ cur_ttypes = set(rschema.objects(subjtype))
+ if ttypes is None:
+ ttypes = cur_ttypes
+ elif cur_ttypes != ttypes:
+ # we found an ambiguous relation: remove it from fetchattrs
+ fetchattrs.remove(attr)
+ # ... and add it to the selection
+ targetvar = select.make_variable()
+ select.add_selected(targetvar)
+ rel = make_relation(var, attr, (targetvar,), VariableRef)
+ select.add_restriction(rel)
+ break
+
+ @classmethod
+ def _fetch_restrictions(cls, mainvar, select, fetchattrs,
+ user, ordermethod='fetch_order', visited=None):
+ eschema = cls.e_schema
+ if visited is None:
+ visited = set((eschema.type,))
+ elif eschema.type in visited:
+ # avoid infinite recursion
+ return
+ else:
+ visited.add(eschema.type)
+ for attr in sorted(fetchattrs):
+ try:
+ rschema = eschema.subjrels[attr]
+ except KeyError:
+ cls.warning('skipping fetch_attr %s defined in %s (not found in schema)',
+ attr, cls.__regid__)
+ continue
+ # XXX takefirst=True to remove warning triggered by ambiguous inlined relations
+ rdef = eschema.rdef(attr, takefirst=True)
+ if not user.matching_groups(rdef.get_groups('read')):
+ continue
+ if rschema.final or rdef.cardinality[0] in '?1':
+ var = select.make_variable()
+ select.add_selected(var)
+ rel = make_relation(mainvar, attr, (var,), VariableRef)
+ select.add_restriction(rel)
+ else:
+ cls.warning('bad relation %s specified in fetch attrs for %s',
+ attr, cls)
+ continue
+ if not rschema.final:
+ # XXX we need outer join in case the relation is not mandatory
+ # (card == '?') *or if the entity is being added*, since in
+ # that case the relation may still be missing. As we miss this
+ # later information here, systematically add it.
+ rel.change_optional('right')
+ targettypes = rschema.objects(eschema.type)
+ vreg = user._cw.vreg # XXX user._cw.vreg iiiirk
+ etypecls = vreg['etypes'].etype_class(targettypes[0])
+ if len(targettypes) > 1:
+ # find fetch_attrs common to all destination types
+ fetchattrs = vreg['etypes'].fetch_attrs(targettypes)
+ # ... and handle ambiguous relations
+ cls._fetch_ambiguous_rtypes(select, var, fetchattrs,
+ targettypes, vreg.schema)
+ else:
+ fetchattrs = etypecls.fetch_attrs
+ etypecls._fetch_restrictions(var, select, fetchattrs,
+ user, None, visited=visited)
+ if ordermethod is not None:
+ try:
+ cmeth = getattr(cls, ordermethod)
+ warn('[3.14] %s %s class method should be renamed to cw_%s'
+ % (cls.__regid__, ordermethod, ordermethod),
+ DeprecationWarning)
+ except AttributeError:
+ cmeth = getattr(cls, 'cw_' + ordermethod)
+ if support_args(cmeth, 'select'):
+ cmeth(select, attr, var)
+ else:
+ warn('[3.14] %s should now take (select, attr, var) and '
+ 'modify the syntax tree when desired instead of '
+ 'returning something' % cmeth, DeprecationWarning)
+ orderterm = cmeth(attr, var.name)
+ if orderterm is not None:
+ try:
+ var, order = orderterm.split()
+ except ValueError:
+ if '(' in orderterm:
+ cls.error('ignore %s until %s is upgraded',
+ orderterm, cmeth)
+ orderterm = None
+ elif not ' ' in orderterm.strip():
+ var = orderterm
+ order = 'ASC'
+ if orderterm is not None:
+ select.add_sort_var(select.get_variable(var),
+ order=='ASC')
+
+ @classmethod
+ @cached
+ def cw_rest_attr_info(cls):
+ """this class method return an attribute name to be used in URL for
+ entities of this type and a boolean flag telling if its value should be
+ checked for uniqness.
+
+ The attribute returned is, in order of priority:
+
+ * class's `rest_attr` class attribute
+ * an attribute defined as unique in the class'schema
+ * 'eid'
+ """
+ mainattr, needcheck = 'eid', True
+ if cls.rest_attr:
+ mainattr = cls.rest_attr
+ needcheck = not cls.e_schema.has_unique_values(mainattr)
+ else:
+ for rschema in cls.e_schema.subject_relations():
+ if (rschema.final
+ and rschema not in ('eid', 'cwuri')
+ and cls.e_schema.has_unique_values(rschema)
+ and cls.e_schema.rdef(rschema.type).cardinality[0] == '1'):
+ mainattr = str(rschema)
+ needcheck = False
+ break
+ if mainattr == 'eid':
+ needcheck = False
+ return mainattr, needcheck
+
+ @classmethod
+ def _cw_build_entity_query(cls, kwargs):
+ relations = []
+ restrictions = set()
+ pendingrels = []
+ eschema = cls.e_schema
+ qargs = {}
+ attrcache = {}
+ for attr, value in kwargs.items():
+ if attr.startswith('reverse_'):
+ attr = attr[len('reverse_'):]
+ role = 'object'
+ else:
+ role = 'subject'
+ assert eschema.has_relation(attr, role), '%s %s not found on %s' % (attr, role, eschema)
+ rschema = eschema.subjrels[attr] if role == 'subject' else eschema.objrels[attr]
+ if not rschema.final and isinstance(value, (tuple, list, set, frozenset)):
+ if len(value) == 0:
+ continue # avoid crash with empty IN clause
+ elif len(value) == 1:
+ value = next(iter(value))
+ else:
+ # prepare IN clause
+ pendingrels.append( (attr, role, value) )
+ continue
+ if rschema.final: # attribute
+ relations.append('X %s %%(%s)s' % (attr, attr))
+ attrcache[attr] = value
+ elif value is None:
+ pendingrels.append( (attr, role, value) )
+ else:
+ rvar = attr.upper()
+ if role == 'object':
+ relations.append('%s %s X' % (rvar, attr))
+ else:
+ relations.append('X %s %s' % (attr, rvar))
+ restriction = '%s eid %%(%s)s' % (rvar, attr)
+ if not restriction in restrictions:
+ restrictions.add(restriction)
+ if hasattr(value, 'eid'):
+ value = value.eid
+ qargs[attr] = value
+ rql = u''
+ if relations:
+ rql += ', '.join(relations)
+ if restrictions:
+ rql += ' WHERE %s' % ', '.join(restrictions)
+ return rql, qargs, pendingrels, attrcache
+
+ @classmethod
+ def _cw_handle_pending_relations(cls, eid, pendingrels, execute):
+ for attr, role, values in pendingrels:
+ if role == 'object':
+ restr = 'Y %s X' % attr
+ else:
+ restr = 'X %s Y' % attr
+ if values is None:
+ execute('DELETE %s WHERE X eid %%(x)s' % restr, {'x': eid})
+ continue
+ execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % (
+ restr, ','.join(str(getattr(r, 'eid', r)) for r in values)),
+ {'x': eid}, build_descr=False)
+
+ @classmethod
+ def cw_instantiate(cls, execute, **kwargs):
+ """add a new entity of this given type
+
+ Example (in a shell session):
+
+ >>> companycls = vreg['etypes'].etype_class('Company')
+ >>> personcls = vreg['etypes'].etype_class('Person')
+ >>> c = companycls.cw_instantiate(session.execute, name=u'Logilab')
+ >>> p = personcls.cw_instantiate(session.execute, firstname=u'John', lastname=u'Doe',
+ ... works_for=c)
+
+ You can also set relations where the entity has 'object' role by
+ prefixing the relation name by 'reverse_'. Also, relation values may be
+ an entity or eid, a list of entities or eids.
+ """
+ rql, qargs, pendingrels, attrcache = cls._cw_build_entity_query(kwargs)
+ if rql:
+ rql = 'INSERT %s X: %s' % (cls.__regid__, rql)
+ else:
+ rql = 'INSERT %s X' % (cls.__regid__)
+ try:
+ created = execute(rql, qargs).get_entity(0, 0)
+ except IndexError:
+ raise Exception('could not create a %r with %r (%r)' %
+ (cls.__regid__, rql, qargs))
+ created._cw_update_attr_cache(attrcache)
+ cls._cw_handle_pending_relations(created.eid, pendingrels, execute)
+ return created
+
+ def __init__(self, req, rset=None, row=None, col=0):
+ AppObject.__init__(self, req, rset=rset, row=row, col=col)
+ self._cw_related_cache = {}
+ self._cw_adapters_cache = {}
+ if rset is not None:
+ self.eid = rset[row][col]
+ else:
+ self.eid = None
+ self._cw_is_saved = True
+ self.cw_attr_cache = {}
+
+ def __repr__(self):
+ return '' % (
+ self.e_schema, self.eid, list(self.cw_attr_cache), id(self))
+
+ def __lt__(self, other):
+ return NotImplemented
+
+ def __eq__(self, other):
+ if isinstance(self.eid, integer_types):
+ return self.eid == other.eid
+ return self is other
+
+ def __hash__(self):
+ if isinstance(self.eid, integer_types):
+ return self.eid
+ return super(Entity, self).__hash__()
+
+ def _cw_update_attr_cache(self, attrcache):
+ trdata = self._cw.transaction_data
+ uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set())
+ uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set()))
+ for attr in uncached_attrs:
+ attrcache.pop(attr, None)
+ self.cw_attr_cache.pop(attr, None)
+ self.cw_attr_cache.update(attrcache)
+
+ def _cw_dont_cache_attribute(self, attr, repo_side=False):
+ """Called when some attribute has been transformed by a *storage*,
+ hence the original value should not be cached **by anyone**.
+
+ For example we have a special "fs_importing" mode in BFSS
+ where a file path is given as attribute value and stored as is
+ in the data base. Later access to the attribute will provide
+ the content of the file at the specified path. We do not want
+ the "filepath" value to be cached.
+
+ """
+ trdata = self._cw.transaction_data
+ trdata.setdefault('%s.dont-cache-attrs' % self.eid, set()).add(attr)
+ if repo_side:
+ trdata.setdefault('%s.storage-special-process-attrs' % self.eid, set()).add(attr)
+
+ def __json_encode__(self):
+ """custom json dumps hook to dump the entity's eid
+ which is not part of dict structure itself
+ """
+ dumpable = self.cw_attr_cache.copy()
+ dumpable['eid'] = self.eid
+ return dumpable
+
+ def cw_adapt_to(self, interface):
+ """return an adapter the entity to the given interface name.
+
+ return None if it can not be adapted.
+ """
+ cache = self._cw_adapters_cache
+ try:
+ return cache[interface]
+ except KeyError:
+ adapter = self._cw.vreg['adapters'].select_or_none(
+ interface, self._cw, entity=self)
+ cache[interface] = adapter
+ return adapter
+
+ def has_eid(self): # XXX cw_has_eid
+ """return True if the entity has an attributed eid (False
+ meaning that the entity has to be created
+ """
+ try:
+ int(self.eid)
+ return True
+ except (ValueError, TypeError):
+ return False
+
+ def cw_is_saved(self):
+ """during entity creation, there is some time during which the entity
+ has an eid attributed though it's not saved (eg during
+ 'before_add_entity' hooks). You can use this method to ensure the entity
+ has an eid *and* is saved in its source.
+ """
+ return self.has_eid() and self._cw_is_saved
+
+ @cached
+ def cw_metainformation(self):
+ metas = self._cw.entity_metas(self.eid)
+ metas['source'] = self._cw.source_defs()[metas['source']]
+ return metas
+
+ def cw_check_perm(self, action):
+ self.e_schema.check_perm(self._cw, action, eid=self.eid)
+
+ def cw_has_perm(self, action):
+ return self.e_schema.has_perm(self._cw, action, eid=self.eid)
+
+ def view(self, __vid, __registry='views', w=None, initargs=None, **kwargs): # XXX cw_view
+ """shortcut to apply a view on this entity"""
+ if initargs is None:
+ initargs = kwargs
+ else:
+ initargs.update(kwargs)
+ view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset,
+ row=self.cw_row, col=self.cw_col,
+ **initargs)
+ return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs)
+
+ def absolute_url(self, *args, **kwargs): # XXX cw_url
+ """return an absolute url to view this entity"""
+ # use *args since we don't want first argument to be "anonymous" to
+ # avoid potential clash with kwargs
+ if args:
+ assert len(args) == 1, 'only 0 or 1 non-named-argument expected'
+ method = args[0]
+ else:
+ method = None
+ # in linksearch mode, we don't want external urls else selecting
+ # the object for use in the relation is tricky
+ # XXX search_state is web specific
+ use_ext_id = False
+ if 'base_url' not in kwargs and \
+ getattr(self._cw, 'search_state', ('normal',))[0] == 'normal':
+ sourcemeta = self.cw_metainformation()['source']
+ if sourcemeta.get('use-cwuri-as-url'):
+ return self.cwuri # XXX consider kwargs?
+ if sourcemeta.get('base-url'):
+ kwargs['base_url'] = sourcemeta['base-url']
+ use_ext_id = True
+ if method in (None, 'view'):
+ kwargs['_restpath'] = self.rest_path(use_ext_id)
+ else:
+ kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid
+ return self._cw.build_url(method, **kwargs)
+
+ def rest_path(self, use_ext_eid=False): # XXX cw_rest_path
+ """returns a REST-like (relative) path for this entity"""
+ mainattr, needcheck = self.cw_rest_attr_info()
+ etype = str(self.e_schema)
+ path = etype.lower()
+ fallback = False
+ if mainattr != 'eid':
+ value = getattr(self, mainattr)
+ if not can_use_rest_path(value):
+ mainattr = 'eid'
+ path = None
+ elif needcheck:
+ # make sure url is not ambiguous
+ try:
+ nbresults = self.__unique
+ except AttributeError:
+ rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % (
+ etype, mainattr)
+ nbresults = self.__unique = self._cw.execute(rql, {'value' : value})[0][0]
+ if nbresults != 1: # ambiguity?
+ mainattr = 'eid'
+ path = None
+ if mainattr == 'eid':
+ if use_ext_eid:
+ value = self.cw_metainformation()['extid']
+ else:
+ value = self.eid
+ if path is None:
+ # fallback url: / url is used as cw entities uri,
+ # prefer it to //eid/
+ return text_type(value)
+ return u'%s/%s' % (path, self._cw.url_quote(value))
+
+ def cw_attr_metadata(self, attr, metadata):
+ """return a metadata for an attribute (None if unspecified)"""
+ value = getattr(self, '%s_%s' % (attr, metadata), None)
+ if value is None and metadata == 'encoding':
+ value = self._cw.vreg.property_value('ui.encoding')
+ return value
+
+ def printable_value(self, attr, value=_marker, attrtype=None,
+ format='text/html', displaytime=True): # XXX cw_printable_value
+ """return a displayable value (i.e. unicode string) which may contains
+ html tags
+ """
+ attr = str(attr)
+ if value is _marker:
+ value = getattr(self, attr)
+ if isinstance(value, string_types):
+ value = value.strip()
+ if value is None or value == '': # don't use "not", 0 is an acceptable value
+ return u''
+ if attrtype is None:
+ attrtype = self.e_schema.destination(attr)
+ props = self.e_schema.rdef(attr)
+ if attrtype == 'String':
+ # internalinalized *and* formatted string such as schema
+ # description...
+ if props.internationalizable:
+ value = self._cw._(value)
+ attrformat = self.cw_attr_metadata(attr, 'format')
+ if attrformat:
+ return self._cw_mtc_transform(value, attrformat, format,
+ self._cw.encoding)
+ elif attrtype == 'Bytes':
+ attrformat = self.cw_attr_metadata(attr, 'format')
+ if attrformat:
+ encoding = self.cw_attr_metadata(attr, 'encoding')
+ return self._cw_mtc_transform(value.getvalue(), attrformat, format,
+ encoding)
+ return u''
+ value = self._cw.printable_value(attrtype, value, props,
+ displaytime=displaytime)
+ if format == 'text/html':
+ value = xml_escape(value)
+ return value
+
+ def _cw_mtc_transform(self, data, format, target_format, encoding,
+ _engine=ENGINE):
+ trdata = TransformData(data, format, encoding, appobject=self)
+ data = _engine.convert(trdata, target_format).decode()
+ if target_format == 'text/html':
+ data = soup2xhtml(data, self._cw.encoding)
+ return data
+
+ # entity cloning ##########################################################
+
+ def copy_relations(self, ceid): # XXX cw_copy_relations
+ """copy relations of the object with the given eid on this
+ object (this method is called on the newly created copy, and
+ ceid designates the original entity).
+
+ By default meta and composite relations are skipped.
+ Overrides this if you want another behaviour
+ """
+ assert self.has_eid()
+ execute = self._cw.execute
+ skip_copy_for = {'subject': set(), 'object': set()}
+ for rtype in self.skip_copy_for:
+ skip_copy_for['subject'].add(rtype)
+ warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, '
+ 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype,
+ DeprecationWarning)
+ for rtype, role in self.cw_skip_copy_for:
+ assert role in ('subject', 'object'), role
+ skip_copy_for[role].add(rtype)
+ for rschema in self.e_schema.subject_relations():
+ if rschema.type in skip_copy_for['subject']:
+ continue
+ if rschema.final or rschema.meta or rschema.rule:
+ continue
+ # skip already defined relations
+ if getattr(self, rschema.type):
+ continue
+ # XXX takefirst=True to remove warning triggered by ambiguous relations
+ rdef = self.e_schema.rdef(rschema, takefirst=True)
+ # skip composite relation
+ if rdef.composite:
+ continue
+ # skip relation with card in ?1 else we either change the copied
+ # object (inlined relation) or inserting some inconsistency
+ if rdef.cardinality[1] in '?1':
+ continue
+ rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % (
+ rschema.type, rschema.type)
+ execute(rql, {'x': self.eid, 'y': ceid})
+ self.cw_clear_relation_cache(rschema.type, 'subject')
+ for rschema in self.e_schema.object_relations():
+ if rschema.meta or rschema.rule:
+ continue
+ # skip already defined relations
+ if self.related(rschema.type, 'object'):
+ continue
+ if rschema.type in skip_copy_for['object']:
+ continue
+ # XXX takefirst=True to remove warning triggered by ambiguous relations
+ rdef = self.e_schema.rdef(rschema, 'object', takefirst=True)
+ # skip composite relation
+ if rdef.composite:
+ continue
+ # skip relation with card in ?1 else we either change the copied
+ # object (inlined relation) or inserting some inconsistency
+ if rdef.cardinality[0] in '?1':
+ continue
+ rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % (
+ rschema.type, rschema.type)
+ execute(rql, {'x': self.eid, 'y': ceid})
+ self.cw_clear_relation_cache(rschema.type, 'object')
+
+ # data fetching methods ###################################################
+
+ @cached
+ def as_rset(self): # XXX .cw_as_rset
+ """returns a resultset containing `self` information"""
+ rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
+ {'x': self.eid}, [(self.cw_etype,)])
+ rset.req = self._cw
+ return rset
+
+ def _cw_to_complete_relations(self):
+ """by default complete final relations to when calling .complete()"""
+ for rschema in self.e_schema.subject_relations():
+ if rschema.final:
+ continue
+ targets = rschema.objects(self.e_schema)
+ if rschema.inlined:
+ matching_groups = self._cw.user.matching_groups
+ if all(matching_groups(e.get_groups('read')) and
+ rschema.rdef(self.e_schema, e).get_groups('read')
+ for e in targets):
+ yield rschema, 'subject'
+
+ def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True):
+ for rschema, attrschema in self.e_schema.attribute_definitions():
+ # skip binary data by default
+ if skip_bytes and attrschema.type == 'Bytes':
+ continue
+ attr = rschema.type
+ if attr == 'eid':
+ continue
+ # password retrieval is blocked at the repository server level
+ rdef = rschema.rdef(self.e_schema, attrschema)
+ if not self._cw.user.matching_groups(rdef.get_groups('read')) \
+ or (attrschema.type == 'Password' and skip_pwd):
+ self.cw_attr_cache[attr] = None
+ continue
+ yield attr
+
+ _cw_completed = False
+ def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete
+ """complete this entity by adding missing attributes (i.e. query the
+ repository to fill the entity)
+
+ :type skip_bytes: bool
+ :param skip_bytes:
+ if true, attribute of type Bytes won't be considered
+ """
+ assert self.has_eid()
+ if self._cw_completed:
+ return
+ if attributes is None:
+ self._cw_completed = True
+ varmaker = rqlvar_maker()
+ V = next(varmaker)
+ rql = ['WHERE %s eid %%(x)s' % V]
+ selected = []
+ for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)):
+ # if attribute already in entity, nothing to do
+ if attr in self.cw_attr_cache:
+ continue
+ # case where attribute must be completed, but is not yet in entity
+ var = next(varmaker)
+ rql.append('%s %s %s' % (V, attr, var))
+ selected.append((attr, var))
+ # +1 since this doesn't include the main variable
+ lastattr = len(selected) + 1
+ # don't fetch extra relation if attributes specified or of the entity is
+ # coming from an external source (may lead to error)
+ if attributes is None and self.cw_metainformation()['source']['uri'] == 'system':
+ # fetch additional relations (restricted to 0..1 relations)
+ for rschema, role in self._cw_to_complete_relations():
+ rtype = rschema.type
+ if self.cw_relation_cached(rtype, role):
+ continue
+ # at this point we suppose that:
+ # * this is a inlined relation
+ # * entity (self) is the subject
+ # * user has read perm on the relation and on the target entity
+ assert rschema.inlined
+ assert role == 'subject'
+ var = next(varmaker)
+ # keep outer join anyway, we don't want .complete to crash on
+ # missing mandatory relation (see #1058267)
+ rql.append('%s %s %s?' % (V, rtype, var))
+ selected.append(((rtype, role), var))
+ if selected:
+ # select V, we need it as the left most selected variable
+ # if some outer join are included to fetch inlined relations
+ rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected),
+ ','.join(rql))
+ try:
+ rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0]
+ except IndexError:
+ raise Exception('unable to fetch attributes for entity with eid %s'
+ % self.eid)
+ # handle attributes
+ for i in range(1, lastattr):
+ self.cw_attr_cache[str(selected[i-1][0])] = rset[i]
+ # handle relations
+ for i in range(lastattr, len(rset)):
+ rtype, role = selected[i-1][0]
+ value = rset[i]
+ if value is None:
+ rrset = ResultSet([], rql, {'x': self.eid})
+ rrset.req = self._cw
+ else:
+ rrset = self._cw.eid_rset(value)
+ self.cw_set_relation_cache(rtype, role, rrset)
+
+ def cw_attr_value(self, name):
+ """get value for the attribute relation , query the repository
+ to get the value if necessary.
+
+ :type name: str
+ :param name: name of the attribute to get
+ """
+ try:
+ return self.cw_attr_cache[name]
+ except KeyError:
+ if not self.cw_is_saved():
+ return None
+ rql = "Any A WHERE X eid %%(x)s, X %s A" % name
+ try:
+ rset = self._cw.execute(rql, {'x': self.eid})
+ except Unauthorized:
+ self.cw_attr_cache[name] = value = None
+ else:
+ assert rset.rowcount <= 1, (self, rql, rset.rowcount)
+ try:
+ self.cw_attr_cache[name] = value = rset.rows[0][0]
+ except IndexError:
+ # probably a multisource error
+ self.critical("can't get value for attribute %s of entity with eid %s",
+ name, self.eid)
+ if self.e_schema.destination(name) == 'String':
+ self.cw_attr_cache[name] = value = self._cw._('unaccessible')
+ else:
+ self.cw_attr_cache[name] = value = None
+ return value
+
+ def related(self, rtype, role='subject', limit=None, entities=False, # XXX .cw_related
+ safe=False, targettypes=None):
+ """returns a resultset of related entities
+
+ :param rtype:
+ the name of the relation, aka relation type
+ :param role:
+ the role played by 'self' in the relation ('subject' or 'object')
+ :param limit:
+ resultset's maximum size
+ :param entities:
+ if True, the entites are returned; if False, a result set is returned
+ :param safe:
+ if True, an empty rset/list of entities will be returned in case of
+ :exc:`Unauthorized`, else (the default), the exception is propagated
+ :param targettypes:
+ a tuple of target entity types to restrict the query
+ """
+ rtype = str(rtype)
+ # Caching restricted/limited results is best avoided.
+ cacheable = limit is None and targettypes is None
+ if cacheable:
+ cache_key = '%s_%s' % (rtype, role)
+ if cache_key in self._cw_related_cache:
+ return self._cw_related_cache[cache_key][entities]
+ if not self.has_eid():
+ if entities:
+ return []
+ return self._cw.empty_rset()
+ rql = self.cw_related_rql(rtype, role, limit=limit, targettypes=targettypes)
+ try:
+ rset = self._cw.execute(rql, {'x': self.eid})
+ except Unauthorized:
+ if not safe:
+ raise
+ rset = self._cw.empty_rset()
+ if entities:
+ if cacheable:
+ self.cw_set_relation_cache(rtype, role, rset)
+ return self.related(rtype, role, entities=entities)
+ return list(rset.entities())
+ else:
+ return rset
+
+ def cw_related_rql(self, rtype, role='subject', targettypes=None, limit=None):
+ return self.cw_related_rqlst(
+ rtype, role=role, targettypes=targettypes, limit=limit).as_string()
+
+ def cw_related_rqlst(self, rtype, role='subject', targettypes=None,
+ limit=None, sort_terms=None):
+ """Return the select node of the RQL query of entities related through
+ `rtype` with this entity as `role`, possibly filtered by
+ `targettypes`.
+
+ The RQL query can be given a `limit` and sort terms with `sort_terms`
+ arguments being a sequence of ``(, )``
+ (e.g. ``[('name', True), ('modification_date', False)]`` would lead to
+ a sorting by ``name``, ascending and then by ``modification_date``,
+ descending. If `sort_terms` is not specified the default sorting is by
+ ``modification_date``, descending.
+ """
+ vreg = self._cw.vreg
+ rschema = vreg.schema[rtype]
+ select = Select()
+ mainvar, evar = select.get_variable('X'), select.get_variable('E')
+ select.add_selected(mainvar)
+ if limit is not None:
+ select.set_limit(limit)
+ select.add_eid_restriction(evar, 'x', 'Substitute')
+ if role == 'subject':
+ rel = make_relation(evar, rtype, (mainvar,), VariableRef)
+ select.add_restriction(rel)
+ if targettypes is None:
+ targettypes = rschema.objects(self.e_schema)
+ else:
+ select.add_constant_restriction(mainvar, 'is',
+ targettypes, 'etype')
+ gcard = greater_card(rschema, (self.e_schema,), targettypes, 0)
+ else:
+ rel = make_relation(mainvar, rtype, (evar,), VariableRef)
+ select.add_restriction(rel)
+ if targettypes is None:
+ targettypes = rschema.subjects(self.e_schema)
+ else:
+ select.add_constant_restriction(mainvar, 'is', targettypes,
+ 'etype')
+ gcard = greater_card(rschema, targettypes, (self.e_schema,), 1)
+ etypecls = vreg['etypes'].etype_class(targettypes[0])
+ if len(targettypes) > 1:
+ fetchattrs = vreg['etypes'].fetch_attrs(targettypes)
+ self._fetch_ambiguous_rtypes(select, mainvar, fetchattrs,
+ targettypes, vreg.schema)
+ else:
+ fetchattrs = etypecls.fetch_attrs
+ etypecls.fetch_rqlst(self._cw.user, select, mainvar, fetchattrs,
+ settype=False)
+ # optimisation: remove ORDERBY if cardinality is 1 or ? (though
+ # greater_card return 1 for those both cases)
+ if gcard == '1':
+ select.remove_sort_terms()
+ elif not select.orderby:
+ # Build a mapping (rtype, node) for relations usable for sorting.
+ sorting_relations = {}
+ for r in select.where.get_nodes(RqlRelation):
+ lhs, rhs = r.children
+ if lhs.variable != mainvar:
+ continue
+ if r.operator() != '=':
+ continue
+ rhs_term = rhs.children[0]
+ if not isinstance(rhs_term, VariableRef):
+ continue
+ sorting_relations[r.r_type] = r
+ sort_terms = sort_terms or [('modification_date', False)]
+ for term, order in sort_terms:
+ # Add a relation for sorting only if it is not only retrieved
+ # (e.g. modification_date) instead of adding another variable
+ # for sorting. This should not be problematic, but it is with
+ # sqlserver, see ticket #694445.
+ rel = sorting_relations.get(term)
+ if rel is None:
+ mdvar = select.make_variable()
+ rel = make_relation(mainvar, term, (mdvar,), VariableRef)
+ select.add_restriction(rel)
+ var = rel.children[1].children[0].variable
+ select.add_sort_var(var, asc=order)
+ return select
+
+ # generic vocabulary methods ##############################################
+
+ def cw_linkable_rql(self, rtype, targettype, role, ordermethod=None,
+ vocabconstraints=True, lt_infos={}, limit=None):
+ """build a rql to fetch targettype entities either related or unrelated
+ to this entity using (rtype, role) relation.
+
+ Consider relation permissions so that returned entities may be actually
+ linked by `rtype`.
+
+ `lt_infos` are supplementary informations, usually coming from __linkto
+ parameter, that can help further restricting the results in case current
+ entity is not yet created. It is a dict describing entities the current
+ entity will be linked to, which keys are (rtype, role) tuples and values
+ are a list of eids.
+ """
+ return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None,
+ vocabconstraints=vocabconstraints,
+ lt_infos=lt_infos, limit=limit,
+ unrelated_only=False)
+
+ def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None,
+ vocabconstraints=True, lt_infos={}, limit=None):
+ """build a rql to fetch `targettype` entities unrelated to this entity
+ using (rtype, role) relation.
+
+ Consider relation permissions so that returned entities may be actually
+ linked by `rtype`.
+
+ `lt_infos` are supplementary informations, usually coming from __linkto
+ parameter, that can help further restricting the results in case current
+ entity is not yet created. It is a dict describing entities the current
+ entity will be linked to, which keys are (rtype, role) tuples and values
+ are a list of eids.
+ """
+ return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None,
+ vocabconstraints=vocabconstraints,
+ lt_infos=lt_infos, limit=limit,
+ unrelated_only=True)
+
+ def _cw_compute_linkable_rql(self, rtype, targettype, role, ordermethod=None,
+ vocabconstraints=True, lt_infos={}, limit=None,
+ unrelated_only=False):
+ """build a rql to fetch `targettype` entities that may be related to
+ this entity using the (rtype, role) relation.
+
+ By default (unrelated_only=False), this includes the already linked
+ entities as well as the unrelated ones. If `unrelated_only` is True, the
+ rql filters out the already related entities.
+ """
+ ordermethod = ordermethod or 'fetch_unrelated_order'
+ rschema = self._cw.vreg.schema.rschema(rtype)
+ rdef = rschema.role_rdef(self.e_schema, targettype, role)
+ rewriter = RQLRewriter(self._cw)
+ select = Select()
+ # initialize some variables according to the `role` of `self` in the
+ # relation (variable names must respect constraints conventions):
+ # * variable for myself (`evar`)
+ # * variable for searched entities (`searchvedvar`)
+ if role == 'subject':
+ evar = subjvar = select.get_variable('S')
+ searchedvar = objvar = select.get_variable('O')
+ else:
+ searchedvar = subjvar = select.get_variable('S')
+ evar = objvar = select.get_variable('O')
+ select.add_selected(searchedvar)
+ if limit is not None:
+ select.set_limit(limit)
+ # initialize some variables according to `self` existence
+ if rdef.role_cardinality(neg_role(role)) in '?1':
+ # if cardinality in '1?', we want a target entity which isn't
+ # already linked using this relation
+ variable = select.make_variable()
+ if role == 'subject':
+ rel = make_relation(variable, rtype, (searchedvar,), VariableRef)
+ else:
+ rel = make_relation(searchedvar, rtype, (variable,), VariableRef)
+ select.add_restriction(Not(rel))
+ elif self.has_eid() and unrelated_only:
+ # elif we have an eid, we don't want a target entity which is
+ # already linked to ourself through this relation
+ rel = make_relation(subjvar, rtype, (objvar,), VariableRef)
+ select.add_restriction(Not(rel))
+ if self.has_eid():
+ rel = make_relation(evar, 'eid', ('x', 'Substitute'), Constant)
+ select.add_restriction(rel)
+ args = {'x': self.eid}
+ if role == 'subject':
+ sec_check_args = {'fromeid': self.eid}
+ else:
+ sec_check_args = {'toeid': self.eid}
+ existant = None # instead of 'SO', improve perfs
+ else:
+ args = {}
+ sec_check_args = {}
+ existant = searchedvar.name
+ # undefine unused evar, or the type resolver will consider it
+ select.undefine_variable(evar)
+ # retrieve entity class for targettype to compute base rql
+ etypecls = self._cw.vreg['etypes'].etype_class(targettype)
+ etypecls.fetch_rqlst(self._cw.user, select, searchedvar,
+ ordermethod=ordermethod)
+ # from now on, we need variable type resolving
+ self._cw.vreg.solutions(self._cw, select, args)
+ # insert RQL expressions for schema constraints into the rql syntax tree
+ if vocabconstraints:
+ cstrcls = (RQLVocabularyConstraint, RQLConstraint)
+ else:
+ cstrcls = RQLConstraint
+ lt_infos = pruned_lt_info(self.e_schema, lt_infos or {})
+ # if there are still lt_infos, use set to keep track of added eid
+ # relations (adding twice the same eid relation is incorrect RQL)
+ eidvars = set()
+ for cstr in rdef.constraints:
+ # consider constraint.mainvars to check if constraint apply
+ if isinstance(cstr, cstrcls) and searchedvar.name in cstr.mainvars:
+ if not self.has_eid():
+ if lt_infos:
+ # we can perhaps further restrict with linkto infos using
+ # a custom constraint built from cstr and lt_infos
+ cstr = build_cstr_with_linkto_infos(
+ cstr, args, searchedvar, evar, lt_infos, eidvars)
+ if cstr is None:
+ continue # could not build constraint -> discard
+ elif evar.name in cstr.mainvars:
+ continue
+ # compute a varmap suitable to RQLRewriter.rewrite argument
+ varmap = dict((v, v) for v in (searchedvar.name, evar.name)
+ if v in select.defined_vars and v in cstr.mainvars)
+ # rewrite constraint by constraint since we want a AND between
+ # expressions.
+ rewriter.rewrite(select, [(varmap, (cstr,))], args, existant)
+ # insert security RQL expressions granting the permission to 'add' the
+ # relation into the rql syntax tree, if necessary
+ rqlexprs = rdef.get_rqlexprs('add')
+ if not self.has_eid():
+ rqlexprs = [rqlexpr for rqlexpr in rqlexprs
+ if searchedvar.name in rqlexpr.mainvars]
+ if rqlexprs and not rdef.has_perm(self._cw, 'add', **sec_check_args):
+ # compute a varmap suitable to RQLRewriter.rewrite argument
+ varmap = dict((v, v) for v in (searchedvar.name, evar.name)
+ if v in select.defined_vars)
+ # rewrite all expressions at once since we want a OR between them.
+ rewriter.rewrite(select, [(varmap, rqlexprs)], args, existant)
+ # ensure we have an order defined
+ if not select.orderby:
+ select.add_sort_var(select.defined_vars[searchedvar.name])
+ # we're done, turn the rql syntax tree as a string
+ rql = select.as_string()
+ return rql, args
+
+ def unrelated(self, rtype, targettype, role='subject', limit=None,
+ ordermethod=None, lt_infos={}): # XXX .cw_unrelated
+ """return a result set of target type objects that may be related
+ by a given relation, with self as subject or object
+ """
+ try:
+ rql, args = self.cw_unrelated_rql(rtype, targettype, role, limit=limit,
+ ordermethod=ordermethod, lt_infos=lt_infos)
+ except Unauthorized:
+ return self._cw.empty_rset()
+ return self._cw.execute(rql, args)
+
+ # relations cache handling #################################################
+
+ def cw_relation_cached(self, rtype, role):
+ """return None if the given relation isn't already cached on the
+ instance, else the content of the cache (a 2-uple (rset, entities)).
+ """
+ return self._cw_related_cache.get('%s_%s' % (rtype, role))
+
+ def cw_set_relation_cache(self, rtype, role, rset):
+ """set cached values for the given relation"""
+ if rset:
+ related = list(rset.entities(0))
+ rschema = self._cw.vreg.schema.rschema(rtype)
+ if role == 'subject':
+ rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1]
+ target = 'object'
+ else:
+ rcard = rschema.rdef(related[0].e_schema, self.e_schema).cardinality[0]
+ target = 'subject'
+ if rcard in '?1':
+ for rentity in related:
+ rentity._cw_related_cache['%s_%s' % (rtype, target)] = (
+ self.as_rset(), (self,))
+ else:
+ related = ()
+ self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related)
+
+ def cw_clear_relation_cache(self, rtype=None, role=None):
+ """clear cached values for the given relation or the entire cache if
+ no relation is given
+ """
+ if rtype is None:
+ self._cw_related_cache.clear()
+ self._cw_adapters_cache.clear()
+ else:
+ assert role
+ self._cw_related_cache.pop('%s_%s' % (rtype, role), None)
+
+ def cw_clear_all_caches(self):
+ """flush all caches on this entity. Further attributes/relations access
+ will triggers new database queries to get back values.
+
+ If you use custom caches on your entity class (take care to @cached!),
+ you should override this method to clear them as well.
+ """
+ # clear attributes cache
+ self._cw_completed = False
+ self.cw_attr_cache.clear()
+ # clear relations cache
+ self.cw_clear_relation_cache()
+ # rest path unique cache
+ try:
+ del self.__unique
+ except AttributeError:
+ pass
+
+ # raw edition utilities ###################################################
+
+ def cw_set(self, **kwargs):
+ """update this entity using given attributes / relation, working in the
+ same fashion as :meth:`cw_instantiate`.
+
+ Example (in a shell session):
+
+ >>> c = rql('Any X WHERE X is Company').get_entity(0, 0)
+ >>> p = rql('Any X WHERE X is Person').get_entity(0, 0)
+ >>> c.cw_set(name=u'Logilab')
+ >>> p.cw_set(firstname=u'John', lastname=u'Doe', works_for=c)
+
+ You can also set relations where the entity has 'object' role by
+ prefixing the relation name by 'reverse_'. Also, relation values may be
+ an entity or eid, a list of entities or eids, or None (meaning that all
+ relations of the given type from or to this object should be deleted).
+ """
+ assert kwargs
+ assert self.cw_is_saved(), "should not call set_attributes while entity "\
+ "hasn't been saved yet"
+ rql, qargs, pendingrels, attrcache = self._cw_build_entity_query(kwargs)
+ if rql:
+ rql = 'SET ' + rql
+ qargs['x'] = self.eid
+ if ' WHERE ' in rql:
+ rql += ', X eid %(x)s'
+ else:
+ rql += ' WHERE X eid %(x)s'
+ self._cw.execute(rql, qargs)
+ # update current local object _after_ the rql query to avoid
+ # interferences between the query execution itself and the cw_edited /
+ # skip_security machinery
+ self._cw_update_attr_cache(attrcache)
+ self._cw_handle_pending_relations(self.eid, pendingrels, self._cw.execute)
+ # XXX update relation cache
+
+ def cw_delete(self, **kwargs):
+ assert self.has_eid(), self.eid
+ self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema,
+ {'x': self.eid}, **kwargs)
+
+ # server side utilities ####################################################
+
+ def _cw_clear_local_perm_cache(self, action):
+ for rqlexpr in self.e_schema.get_rqlexprs(action):
+ self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None)
+
+ # deprecated stuff #########################################################
+
+ @deprecated('[3.16] use cw_set() instead of set_attributes()')
+ def set_attributes(self, **kwargs): # XXX cw_set_attributes
+ if kwargs:
+ self.cw_set(**kwargs)
+
+ @deprecated('[3.16] use cw_set() instead of set_relations()')
+ def set_relations(self, **kwargs): # XXX cw_set_relations
+ """add relations to the given object. To set a relation where this entity
+ is the object of the relation, use 'reverse_' as argument name.
+
+ Values may be an entity or eid, a list of entities or eids, or None
+ (meaning that all relations of the given type from or to this object
+ should be deleted).
+ """
+ if kwargs:
+ self.cw_set(**kwargs)
+
+ @deprecated('[3.13] use entity.cw_clear_all_caches()')
+ def clear_all_caches(self):
+ return self.cw_clear_all_caches()
+
+
+# attribute and relation descriptors ##########################################
+
+class Attribute(object):
+ """descriptor that controls schema attribute access"""
+
+ def __init__(self, attrname):
+ assert attrname != 'eid'
+ self._attrname = attrname
+
+ def __get__(self, eobj, eclass):
+ if eobj is None:
+ return self
+ return eobj.cw_attr_value(self._attrname)
+
+ @deprecated('[3.10] assign to entity.cw_attr_cache[attr] or entity.cw_edited[attr]')
+ def __set__(self, eobj, value):
+ if hasattr(eobj, 'cw_edited') and not eobj.cw_edited.saved:
+ eobj.cw_edited[self._attrname] = value
+ else:
+ eobj.cw_attr_cache[self._attrname] = value
+
+
+class Relation(object):
+ """descriptor that controls schema relation access"""
+
+ def __init__(self, rschema, role):
+ self._rtype = rschema.type
+ self._role = role
+
+ def __get__(self, eobj, eclass):
+ if eobj is None:
+ raise AttributeError('%s can only be accessed from instances'
+ % self._rtype)
+ return eobj.related(self._rtype, self._role, entities=True)
+
+ def __set__(self, eobj, value):
+ raise NotImplementedError
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Entity, getLogger('cubicweb.entity'))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,20 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+""" CW - nevow/twisted client
+
+"""
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/http.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/http.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,43 @@
+"""twisted server for CubicWeb web instances
+
+:organization: Logilab
+:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+
+__docformat__ = "restructuredtext en"
+
+class HTTPResponse(object):
+ """An object representing an HTTP Response to be sent to the client.
+ """
+ def __init__(self, twisted_request, code=None, headers=None, stream=None):
+ self._headers_out = headers
+ self._twreq = twisted_request
+ self._stream = stream
+ self._code = code
+
+ self._init_headers()
+ self._finalize()
+
+ def _init_headers(self):
+ if self._headers_out is None:
+ return
+ # initialize headers
+ for k, values in self._headers_out.getAllRawHeaders():
+ self._twreq.responseHeaders.setRawHeaders(k, values)
+ # add content-length if not present
+ if (self._headers_out.getHeader('content-length') is None
+ and self._stream is not None):
+ self._twreq.setHeader('content-length', len(self._stream))
+
+ def _finalize(self):
+ # we must set code before writing anything, else it's too late
+ if self._code is not None:
+ self._twreq.setResponseCode(self._code)
+ if self._stream is not None:
+ self._twreq.write(str(self._stream))
+ self._twreq.finish()
+
+ def __repr__(self):
+ return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/request.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/request.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,59 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Twisted request handler for CubicWeb"""
+
+__docformat__ = "restructuredtext en"
+
+
+from cubicweb.web.request import CubicWebRequestBase
+
+
+class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
+ """ from twisted .req to cubicweb .form
+ req.files are put into .form[]
+ """
+ def __init__(self, req, vreg, https):
+ self._twreq = req
+ super(CubicWebTwistedRequestAdapter, self).__init__(
+ vreg, https, req.args, headers=req.received_headers)
+ for key, name_stream_list in req.files.items():
+ for name, stream in name_stream_list:
+ if name is not None:
+ name = unicode(name, self.encoding)
+ self.form.setdefault(key, []).append((name, stream))
+ # 3.16.4 backward compat
+ if len(self.form[key]) == 1:
+ self.form[key] = self.form[key][0]
+ self.content = self._twreq.content # stream
+
+ def http_method(self):
+ """returns 'POST', 'GET', 'HEAD', etc."""
+ return self._twreq.method
+
+ def relative_path(self, includeparams=True):
+ """return the normalized path of the request (ie at least relative to
+ the instance's root, but some other normalization may be needed so that
+ the returned path may be used to compare to generated urls
+
+ :param includeparams:
+ boolean indicating if GET form parameters should be kept in the path
+ """
+ path = self._twreq.uri[1:] # remove the root '/'
+ if not includeparams:
+ path = path.split('?', 1)[0]
+ return path
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/server.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/server.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,298 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""twisted server for CubicWeb web instances"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import select
+import traceback
+import threading
+from cgi import FieldStorage, parse_header
+
+from six.moves.urllib.parse import urlsplit, urlunsplit
+
+from cubicweb.statsd_logger import statsd_timeit
+
+from twisted.internet import reactor, task, threads
+from twisted.web import http, server
+from twisted.web import resource
+from twisted.web.server import NOT_DONE_YET
+
+
+from logilab.mtconverter import xml_escape
+from logilab.common.decorators import monkeypatch
+
+from cubicweb import ConfigurationError, CW_EVENT_MANAGER
+from cubicweb.utils import json_dumps
+from cubicweb.web import DirectResponse
+from cubicweb.web.application import CubicWebPublisher
+from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
+from cubicweb.etwist.http import HTTPResponse
+
+def start_task(interval, func):
+ lc = task.LoopingCall(func)
+ # wait until interval has expired to actually start the task, else we have
+ # to wait all tasks to be finished for the server to be actually started
+ lc.start(interval, now=False)
+
+
+class CubicWebRootResource(resource.Resource):
+ def __init__(self, config, repo):
+ resource.Resource.__init__(self)
+ self.config = config
+ # instantiate publisher here and not in init_publisher to get some
+ # checks done before daemonization (eg versions consistency)
+ self.appli = CubicWebPublisher(repo, config)
+ self.base_url = config['base-url']
+ self.https_url = config['https-url']
+ global MAX_POST_LENGTH
+ MAX_POST_LENGTH = config['max-post-length']
+
+ def init_publisher(self):
+ config = self.config
+ # when we have an in-memory repository, clean unused sessions every XX
+ # seconds and properly shutdown the server
+ if config['repository-uri'] == 'inmemory://':
+ if config.mode != 'test':
+ reactor.addSystemEventTrigger('before', 'shutdown',
+ self.shutdown_event)
+ self.appli.repo.start_looping_tasks()
+ self.set_url_rewriter()
+ CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter)
+
+ def start_service(self):
+ start_task(self.appli.session_handler.clean_sessions_interval,
+ self.appli.session_handler.clean_sessions)
+
+ def set_url_rewriter(self):
+ self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter')
+
+ def shutdown_event(self):
+ """callback fired when the server is shutting down to properly
+ clean opened sessions
+ """
+ self.appli.repo.shutdown()
+
+ def getChild(self, path, request):
+ """Indicate which resource to use to process down the URL's path"""
+ return self
+
+ def render(self, request):
+ """Render a page from the root resource"""
+ # reload modified files in debug mode
+ if self.config.debugmode:
+ self.config.uiprops.reload_if_needed()
+ if self.https_url:
+ self.config.https_uiprops.reload_if_needed()
+ self.appli.vreg.reload_if_needed()
+ if self.config['profile']: # default profiler don't trace threads
+ return self.render_request(request)
+ else:
+ deferred = threads.deferToThread(self.render_request, request)
+ return NOT_DONE_YET
+
+ @statsd_timeit
+ def render_request(self, request):
+ try:
+ # processing HUGE files (hundred of megabytes) in http.processReceived
+ # blocks other HTTP requests processing
+ # due to the clumsy & slow parsing algorithm of cgi.FieldStorage
+ # so we deferred that part to the cubicweb thread
+ request.process_multipart()
+ return self._render_request(request)
+ except Exception:
+ trace = traceback.format_exc()
+ return HTTPResponse(stream='
%s
' % xml_escape(trace),
+ code=500, twisted_request=request)
+
+ def _render_request(self, request):
+ origpath = request.path
+ host = request.host
+ # dual http/https access handling: expect a rewrite rule to prepend
+ # 'https' to the path to detect https access
+ https = False
+ if origpath.split('/', 2)[1] == 'https':
+ origpath = origpath[6:]
+ request.uri = request.uri[6:]
+ https = True
+ if self.url_rewriter is not None:
+ # XXX should occur before authentication?
+ path = self.url_rewriter.rewrite(host, origpath, request)
+ request.uri.replace(origpath, path, 1)
+ else:
+ path = origpath
+ req = CubicWebTwistedRequestAdapter(request, self.appli.vreg, https)
+ try:
+ ### Try to generate the actual request content
+ content = self.appli.handle_request(req, path)
+ except DirectResponse as ex:
+ return ex.response
+ # at last: create twisted object
+ return HTTPResponse(code = req.status_out,
+ headers = req.headers_out,
+ stream = content,
+ twisted_request=req._twreq)
+
+ # these are overridden by set_log_methods below
+ # only defining here to prevent pylint from complaining
+ @classmethod
+ def debug(cls, msg, *a, **kw):
+ pass
+ info = warning = error = critical = exception = debug
+
+
+JSON_PATHS = set(('json',))
+FRAME_POST_PATHS = set(('validateform',))
+
+orig_gotLength = http.Request.gotLength
+@monkeypatch(http.Request)
+def gotLength(self, length):
+ orig_gotLength(self, length)
+ if length > MAX_POST_LENGTH: # length is 0 on GET
+ path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1]
+ self.clientproto = 'HTTP/1.1' # not yet initialized
+ self.channel.persistent = 0 # force connection close on cleanup
+ self.setResponseCode(http.REQUEST_ENTITY_TOO_LARGE)
+ if path in JSON_PATHS: # XXX better json path detection
+ self.setHeader('content-type',"application/json")
+ body = json_dumps({'reason': 'request max size exceeded'})
+ elif path in FRAME_POST_PATHS: # XXX better frame post path detection
+ self.setHeader('content-type',"text/html")
+ body = ('' % json_dumps( (False, 'request max size exceeded', None) ))
+ else:
+ self.setHeader('content-type',"text/html")
+ body = ("Processing Failed"
+ "request max size exceeded")
+ self.setHeader('content-length', str(len(body)))
+ self.write(body)
+ # see request.finish(). Done here since we get error due to not full
+ # initialized request
+ self.finished = 1
+ if not self.queued:
+ self._cleanup()
+ for d in self.notifications:
+ d.callback(None)
+ self.notifications = []
+
+@monkeypatch(http.Request)
+def requestReceived(self, command, path, version):
+ """Called by channel when all data has been received.
+
+ This method is not intended for users.
+ """
+ self.content.seek(0, 0)
+ self.args = {}
+ self.files = {}
+ self.stack = []
+ self.method, self.uri = command, path
+ self.clientproto = version
+ x = self.uri.split('?', 1)
+ if len(x) == 1:
+ self.path = self.uri
+ else:
+ self.path, argstring = x
+ self.args = http.parse_qs(argstring, 1)
+ # cache the client and server information, we'll need this later to be
+ # serialized and sent with the request so CGIs will work remotely
+ self.client = self.channel.transport.getPeer()
+ self.host = self.channel.transport.getHost()
+ # Argument processing
+ ctype = self.getHeader('content-type')
+ self._do_process_multipart = False
+ if self.method == "POST" and ctype:
+ key, pdict = parse_header(ctype)
+ if key == 'application/x-www-form-urlencoded':
+ self.args.update(http.parse_qs(self.content.read(), 1))
+ self.content.seek(0)
+ elif key == 'multipart/form-data':
+ # defer this as it can be extremely time consumming
+ # with big files
+ self._do_process_multipart = True
+ self.process()
+
+@monkeypatch(http.Request)
+def process_multipart(self):
+ if not self._do_process_multipart:
+ return
+ form = FieldStorage(self.content, self.received_headers,
+ environ={'REQUEST_METHOD': 'POST'},
+ keep_blank_values=1,
+ strict_parsing=1)
+ for key in form:
+ values = form[key]
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if value.filename:
+ if value.done != -1: # -1 is transfer has been interrupted
+ self.files.setdefault(key, []).append((value.filename, value.file))
+ else:
+ self.files.setdefault(key, []).append((None, None))
+ else:
+ self.args.setdefault(key, []).append(value.value)
+
+from logging import getLogger
+from cubicweb import set_log_methods
+LOGGER = getLogger('cubicweb.twisted')
+set_log_methods(CubicWebRootResource, LOGGER)
+
+def run(config, debug=None, repo=None):
+ # repo may by passed during test.
+ #
+ # Test has already created a repo object so we should not create a new one.
+ # Explicitly passing the repo object avoid relying on the fragile
+ # config.repository() cache. We could imagine making repo a mandatory
+ # argument and receives it from the starting command directly.
+ if debug is not None:
+ config.debugmode = debug
+ config.check_writeable_uid_directory(config.appdatahome)
+ # create the site
+ if repo is None:
+ repo = config.repository()
+ root_resource = CubicWebRootResource(config, repo)
+ website = server.Site(root_resource)
+ # serve it via standard HTTP on port set in the configuration
+ port = config['port'] or 8080
+ interface = config['interface']
+ reactor.suggestThreadPoolSize(config['webserver-threadpool-size'])
+ reactor.listenTCP(port, website, interface=interface)
+ if not config.debugmode:
+ if sys.platform == 'win32':
+ raise ConfigurationError("Under windows, you must use the service management "
+ "commands (e.g : 'net start my_instance)'")
+ from logilab.common.daemon import daemonize
+ LOGGER.info('instance started in the background on %s', root_resource.base_url)
+ whichproc = daemonize(config['pid-file'], umask=config['umask'])
+ if whichproc: # 1 = orig process, 2 = first fork, None = second fork (eg daemon process)
+ return whichproc # parent process
+ root_resource.init_publisher() # before changing uid
+ if config['uid'] is not None:
+ from logilab.common.daemon import setugid
+ setugid(config['uid'])
+ root_resource.start_service()
+ LOGGER.info('instance started on %s', root_resource.base_url)
+ # avoid annoying warnign if not in Main Thread
+ signals = threading.currentThread().getName() == 'MainThread'
+ if config['profile']:
+ import cProfile
+ cProfile.runctx('reactor.run(installSignalHandlers=%s)' % signals,
+ globals(), locals(), config['profile'])
+ else:
+ reactor.run(installSignalHandlers=signals)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/service.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/service.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,99 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from __future__ import print_function
+
+import os
+import sys
+
+try:
+ import win32serviceutil
+ import win32service
+except ImportError:
+ print('Win32 extensions for Python are likely not installed.')
+ sys.exit(3)
+
+from os.path import join
+
+from cubicweb.etwist.server import (CubicWebRootResource, reactor, server)
+
+from logilab.common.shellutils import rm
+
+import logging
+from logging import getLogger, handlers
+from cubicweb import set_log_methods
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+
+def _check_env(env):
+ env_vars = ('CW_INSTANCES_DIR', 'CW_INSTANCES_DATA_DIR', 'CW_RUNTIME_DIR')
+ for var in env_vars:
+ if var not in env:
+ raise Exception('The environment variables %s must be set.' % \
+ ', '.join(env_vars))
+ if not env.get('USERNAME'):
+ env['USERNAME'] = 'cubicweb'
+
+class CWService(object, win32serviceutil.ServiceFramework):
+ _svc_name_ = None
+ _svc_display_name_ = None
+ instance = None
+
+ def __init__(self, *args, **kwargs):
+ win32serviceutil.ServiceFramework.__init__(self, *args, **kwargs)
+ cwcfg.load_cwctl_plugins()
+ logger = getLogger('cubicweb')
+ set_log_methods(CubicWebRootResource, logger)
+
+ def SvcStop(self):
+ self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
+ logger = getLogger('cubicweb.twisted')
+ logger.info('stopping %s service' % self.instance)
+ reactor.stop()
+ self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
+
+ def SvcDoRun(self):
+ self.ReportServiceStatus(win32service.SERVICE_START_PENDING)
+ logger = getLogger('cubicweb.twisted')
+ handler = handlers.NTEventLogHandler('cubicweb')
+ handler.setLevel(logging.INFO)
+ logger.addHandler(handler)
+ logger.info('starting %s service' % self.instance)
+ try:
+ _check_env(os.environ)
+ # create the site
+ config = cwcfg.config_for(self.instance)
+ config.init_log(force=True)
+ config.debugmode = False
+ logger.info('starting cubicweb instance %s ', self.instance)
+ config.info('clear ui caches')
+ for cachedir in ('uicache', 'uicachehttps'):
+ rm(join(config.appdatahome, cachedir, '*'))
+ root_resource = CubicWebRootResource(config, config.repository())
+ website = server.Site(root_resource)
+ # serve it via standard HTTP on port set in the configuration
+ port = config['port'] or 8080
+ logger.info('listening on port %s' % port)
+ reactor.listenTCP(port, website)
+ root_resource.init_publisher()
+ root_resource.start_service()
+ logger.info('instance started on %s', root_resource.base_url)
+ self.ReportServiceStatus(win32service.SERVICE_RUNNING)
+ reactor.run()
+ except Exception as e:
+ logger.error('service %s stopped (cause: %s)' % (self.instance, e))
+ logger.exception('what happened ...')
+ self.ReportServiceStatus(win32service.SERVICE_STOPPED)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/test/data/views.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/test/data/views.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,29 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""only for unit tests !"""
+
+from cubicweb.view import View
+from cubicweb.predicates import match_http_method
+
+class PutView(View):
+ __regid__ = 'put'
+ __select__ = match_http_method('PUT') | match_http_method('POST')
+ binary = True
+
+ def call(self):
+ self.w(self._cw.content.read())
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/test/unittest_server.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/test/unittest_server.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,38 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+import os, os.path as osp, glob
+import urllib
+
+from cubicweb.devtools.httptest import CubicWebServerTC
+
+
+class ETwistHTTPTC(CubicWebServerTC):
+ def test_put_content(self):
+ data = {'hip': 'hop'}
+ headers = {'Content-Type': 'application/x-www-form-urlencoded'}
+ body = urllib.urlencode(data)
+ response = self.web_request('?vid=put', method='PUT', body=body)
+ self.assertEqual(body, response.body)
+ response = self.web_request('?vid=put', method='POST', body=body,
+ headers=headers)
+ self.assertEqual(body, response.body)
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/twconfig.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/twconfig.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,115 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""twisted server configurations:
+
+* the "all-in-one" configuration to get a web instance running in a twisted
+ web server integrating a repository server in the same process (only available
+ if the repository part of the software is installed
+"""
+__docformat__ = "restructuredtext en"
+
+from os.path import join
+
+from logilab.common.configuration import Method, merge_options
+
+from cubicweb.cwconfig import CONFIGURATIONS
+from cubicweb.web.webconfig import WebConfiguration
+
+
+class WebConfigurationBase(WebConfiguration):
+ """web instance (in a twisted web server) client of a RQL server"""
+
+ options = merge_options((
+ # ctl configuration
+ ('port',
+ {'type' : 'int',
+ 'default': None,
+ 'help': 'http server port number (default to 8080)',
+ 'group': 'web', 'level': 0,
+ }),
+ ('interface',
+ {'type' : 'string',
+ 'default': "",
+ 'help': 'http server address on which to listen (default to everywhere)',
+ 'group': 'web', 'level': 1,
+ }),
+ ('max-post-length',
+ {'type' : 'bytes',
+ 'default': '100MB',
+ 'help': 'maximum length of HTTP request. Default to 100 MB.',
+ 'group': 'web', 'level': 1,
+ }),
+ ('profile',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'profile code and use the specified file to store stats if this option is set',
+ 'group': 'web', 'level': 3,
+ }),
+ ('host',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'host name if not correctly detectable through gethostname',
+ 'group': 'main', 'level': 1,
+ }),
+ ('pid-file',
+ {'type' : 'string',
+ 'default': Method('default_pid_file'),
+ 'help': 'repository\'s pid file',
+ 'group': 'main', 'level': 2,
+ }),
+ ('uid',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'if this option is set, use the specified user to start \
+the repository rather than the user running the command',
+ 'group': 'main', 'level': WebConfiguration.mode == 'system'
+ }),
+ ('webserver-threadpool-size',
+ {'type': 'int',
+ 'default': 4,
+ 'help': "size of twisted's reactor threadpool. It should probably be not too \
+much greater than connection-poolsize",
+ 'group': 'web', 'level': 3,
+ }),
+ ) + WebConfiguration.options)
+
+ def server_file(self):
+ return join(self.apphome, '%s-%s.py' % (self.appid, self.name))
+
+ def default_base_url(self):
+ from socket import getfqdn
+ return 'http://%s:%s/' % (self['host'] or getfqdn().lower(), self['port'] or 8080)
+
+
+try:
+ from cubicweb.server.serverconfig import ServerConfiguration
+
+ class AllInOneConfiguration(WebConfigurationBase, ServerConfiguration):
+ """repository and web instance in the same twisted process"""
+ name = 'all-in-one'
+ options = merge_options(WebConfigurationBase.options
+ + ServerConfiguration.options)
+
+ cubicweb_appobject_path = WebConfigurationBase.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path
+ cube_appobject_path = WebConfigurationBase.cube_appobject_path | ServerConfiguration.cube_appobject_path
+
+
+ CONFIGURATIONS.append(AllInOneConfiguration)
+
+except ImportError:
+ pass
diff -r 1400aee10df4 -r faf279e33298 cubicweb/etwist/twctl.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/etwist/twctl.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,79 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""cubicweb-clt handlers for twisted"""
+
+from cubicweb.toolsutils import CommandHandler
+from cubicweb.web.webctl import WebCreateHandler, WebUpgradeHandler
+
+# trigger configuration registration
+import cubicweb.etwist.twconfig # pylint: disable=W0611
+
+class TWCreateHandler(WebCreateHandler):
+ cfgname = 'twisted'
+
+class TWStartHandler(CommandHandler):
+ cmdname = 'start'
+ cfgname = 'twisted'
+
+ def start_server(self, config):
+ from cubicweb.etwist import server
+ return server.run(config)
+
+class TWStopHandler(CommandHandler):
+ cmdname = 'stop'
+ cfgname = 'twisted'
+
+ def poststop(self):
+ pass
+
+class TWUpgradeHandler(WebUpgradeHandler):
+ cfgname = 'twisted'
+
+
+try:
+ from cubicweb.server import serverctl
+ class AllInOneCreateHandler(serverctl.RepositoryCreateHandler,
+ TWCreateHandler):
+ """configuration to get an instance running in a twisted web server
+ integrating a repository server in the same process
+ """
+ cfgname = 'all-in-one'
+
+ def bootstrap(self, cubes, automatic=False, inputlevel=0):
+ """bootstrap this configuration"""
+ serverctl.RepositoryCreateHandler.bootstrap(self, cubes, automatic, inputlevel)
+ TWCreateHandler.bootstrap(self, cubes, automatic, inputlevel)
+
+ class AllInOneStartHandler(TWStartHandler):
+ cmdname = 'start'
+ cfgname = 'all-in-one'
+ subcommand = 'cubicweb-twisted'
+
+ class AllInOneStopHandler(CommandHandler):
+ cmdname = 'stop'
+ cfgname = 'all-in-one'
+ subcommand = 'cubicweb-twisted'
+
+ def poststop(self):
+ pass
+
+ class AllInOneUpgradeHandler(TWUpgradeHandler):
+ cfgname = 'all-in-one'
+
+except ImportError:
+ pass
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,17 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/html4zope.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/html4zope.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,174 @@
+# Author: David Goodger
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+
+"""
+# Contact: goodger@users.sourceforge.net
+# Revision: $Revision: 1.2 $
+# Date: $Date: 2005-07-04 16:36:50 $
+# Copyright: This module has been placed in the public domain.
+
+"""
+Simple HyperText Markup Language document tree Writer.
+
+The output conforms to the HTML 4.01 Transitional DTD and to the Extensible
+HTML version 1.0 Transitional DTD (*almost* strict). The output contains a
+minimum of formatting information. A cascading style sheet ("default.css" by
+default) is required for proper viewing with a modern graphical browser.
+
+http://cvs.zope.org/Zope/lib/python/docutils/writers/Attic/html4zope.py?rev=1.1.2.2&only_with_tag=ajung-restructuredtext-integration-branch&content-type=text/vnd.viewcvs-markup
+"""
+
+__docformat__ = 'reStructuredText'
+
+import os
+
+from logilab.mtconverter import xml_escape
+
+from docutils import nodes
+from docutils.writers.html4css1 import Writer as CSS1Writer
+from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator
+
+default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3))
+
+class Writer(CSS1Writer):
+ """css writer using our html translator"""
+ def __init__(self, base_url):
+ CSS1Writer.__init__(self)
+ self.translator_class = URLBinder(base_url, HTMLTranslator)
+
+ def apply_template(self):
+ """overriding this is necessary with docutils >= 0.5"""
+ return self.visitor.astext()
+
+class URLBinder:
+ def __init__(self, url, klass):
+ self.base_url = url
+ self.translator_class = HTMLTranslator
+
+ def __call__(self, document):
+ translator = self.translator_class(document)
+ translator.base_url = self.base_url
+ return translator
+
+class HTMLTranslator(CSS1HTMLTranslator):
+ """ReST tree to html translator"""
+
+ def astext(self):
+ """return the extracted html"""
+ return ''.join(self.body)
+
+ def visit_title(self, node):
+ """Only 6 section levels are supported by HTML."""
+ if isinstance(node.parent, nodes.topic):
+ self.body.append(
+ self.starttag(node, 'p', '', CLASS='topic-title'))
+ if node.parent.hasattr('id'):
+ self.body.append(
+ self.starttag({}, 'a', '', name=node.parent['id']))
+ self.context.append('
"""
+ def depart_document(self, node):
+ """syt: i don't want the enclosing
"""
+
+ def visit_reference(self, node):
+ """syt: i want absolute urls"""
+ if 'refuri' in node:
+ href = node['refuri']
+ if ( self.settings.cloak_email_addresses
+ and href.startswith('mailto:')):
+ href = self.cloak_mailto(href)
+ self.in_mailto = 1
+ else:
+ assert 'refid' in node, \
+ 'References must have "refuri" or "refid" attribute.'
+ href = '%s#%s' % (self.base_url, node['refid'])
+ atts = {'href': href, 'class': 'reference'}
+ if not isinstance(node.parent, nodes.TextElement):
+ assert len(node) == 1 and isinstance(node[0], nodes.image)
+ atts['class'] += ' image-reference'
+ self.body.append(self.starttag(node, 'a', '', **atts))
+
+ ## override error messages to avoid XHTML problems ########################
+ def visit_problematic(self, node):
+ pass
+
+ def depart_problematic(self, node):
+ pass
+
+ def visit_system_message(self, node):
+ backref_text = ''
+ if len(node['backrefs']):
+ backrefs = node['backrefs']
+ if len(backrefs) == 1:
+ backref_text = '; backlink'
+ else:
+ i = 1
+ backlinks = []
+ for backref in backrefs:
+ backlinks.append(str(i))
+ i += 1
+ backref_text = ('; backlinks: %s'
+ % ', '.join(backlinks))
+ if node.hasattr('line'):
+ line = ', line %s' % node['line']
+ else:
+ line = ''
+ a_start = a_end = ''
+ error = u'System Message: %s%s/%s%s (%s %s)%s\n' % (
+ a_start, node['type'], node['level'], a_end,
+ self.encode(node['source']), line, backref_text)
+ self.body.append(u'
ReST / HTML errors:%s
' % xml_escape(error))
+
+ def depart_system_message(self, node):
+ pass
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/markdown.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/markdown.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,27 @@
+from __future__ import absolute_import
+import markdown
+
+import logging
+
+log = logging.getLogger(__name__)
+
+
+def markdown_publish(context, data):
+ """publish a string formatted as MarkDown Text to HTML
+
+ :type context: a cubicweb application object
+
+ :type data: str
+ :param data: some MarkDown text
+
+ :rtype: unicode
+ :return:
+ the data formatted as HTML or the original data if an error occurred
+ """
+ md = markdown.Markdown()
+ try:
+ return md.convert(data)
+ except:
+ import traceback; traceback.print_exc()
+ log.exception("Error while converting Markdown to HTML")
+ return data
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/rest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/rest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,469 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""rest publishing functions
+
+contains some functions and setup of docutils for cubicweb. Provides the
+following ReST directives:
+
+* `eid`, create link to entity in the repository by their eid
+
+* `card`, create link to card entity in the repository by their wikiid
+ (proposing to create it when the refered card doesn't exist yet)
+
+* `winclude`, reference to a web documentation file (in wdoc/ directories)
+
+* `sourcecode` (if pygments is installed), source code colorization
+
+* `rql-table`, create a table from a RQL query
+
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from itertools import chain
+from logging import getLogger
+from os.path import join
+
+from six import text_type
+from six.moves.urllib.parse import urlsplit
+
+from docutils import statemachine, nodes, utils, io
+from docutils.core import Publisher
+from docutils.parsers.rst import Parser, states, directives, Directive
+from docutils.parsers.rst.roles import register_canonical_role, set_classes
+
+from logilab.mtconverter import ESC_UCAR_TABLE, ESC_CAR_TABLE, xml_escape
+
+from cubicweb import UnknownEid
+from cubicweb.ext.html4zope import Writer
+
+from cubicweb.web.views import vid_from_rset # XXX better not to import c.w.views here...
+
+# We provide our own parser as an attempt to get rid of
+# state machine reinstanciation
+
+import re
+# compile states.Body patterns
+for k, v in states.Body.patterns.items():
+ if isinstance(v, str):
+ states.Body.patterns[k] = re.compile(v)
+
+# register ReStructured Text mimetype / extensions
+import mimetypes
+mimetypes.add_type('text/rest', '.rest')
+mimetypes.add_type('text/rest', '.rst')
+
+
+LOGGER = getLogger('cubicweb.rest')
+
+
+def eid_reference_role(role, rawtext, text, lineno, inliner,
+ options={}, content=[]):
+ try:
+ try:
+ eid_num, rest = text.split(u':', 1)
+ except ValueError:
+ eid_num, rest = text, '#'+text
+ eid_num = int(eid_num)
+ if eid_num < 0:
+ raise ValueError
+ except ValueError:
+ msg = inliner.reporter.error(
+ 'EID number must be a positive number; "%s" is invalid.'
+ % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ # Base URL mainly used by inliner.pep_reference; so this is correct:
+ context = inliner.document.settings.context
+ try:
+ refedentity = context._cw.entity_from_eid(eid_num)
+ except UnknownEid:
+ ref = '#'
+ rest += u' ' + context._cw._('(UNEXISTANT EID)')
+ else:
+ ref = refedentity.absolute_url()
+ set_classes(options)
+ return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref,
+ **options)], []
+
+
+def rql_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
+ """``:rql:```` or ``:rql:`:```
+
+ Example: ``:rql:`Any X,Y WHERE X is CWUser, X login Y:table```
+
+ Replace the directive with the output of applying the view to the resultset
+ returned by the query.
+
+ "X eid %(userid)s" can be used in the RQL query for this query will be
+ executed with the argument {'userid': _cw.user.eid}.
+ """
+ _cw = inliner.document.settings.context._cw
+ text = text.strip()
+ if ':' in text:
+ rql, vid = text.rsplit(u':', 1)
+ rql = rql.strip()
+ else:
+ rql, vid = text, None
+ _cw.ensure_ro_rql(rql)
+ try:
+ rset = _cw.execute(rql, {'userid': _cw.user.eid})
+ if rset:
+ if vid is None:
+ vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
+ else:
+ vid = 'noresult'
+ view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+ content = view.render()
+ except Exception as exc:
+ content = 'an error occurred while interpreting this rql directive: %r' % exc
+ set_classes(options)
+ return [nodes.raw('', content, format='html')], []
+
+
+def bookmark_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
+ """``:bookmark:```` or ``:bookmark:`:```
+
+ Example: ``:bookmark:`1234:table```
+
+ Replace the directive with the output of applying the view to the resultset
+ returned by the query stored in the bookmark. By default, the view is the one
+ stored in the bookmark, but it can be overridden by the directive as in the
+ example above.
+
+ "X eid %(userid)s" can be used in the RQL query stored in the Bookmark, for
+ this query will be executed with the argument {'userid': _cw.user.eid}.
+ """
+ _cw = inliner.document.settings.context._cw
+ text = text.strip()
+ try:
+ if ':' in text:
+ eid, vid = text.rsplit(u':', 1)
+ eid = int(eid)
+ else:
+ eid, vid = int(text), None
+ except ValueError:
+ msg = inliner.reporter.error(
+ 'EID number must be a positive number; "%s" is invalid.'
+ % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ bookmark = _cw.entity_from_eid(eid)
+ except UnknownEid:
+ msg = inliner.reporter.error('Unknown EID %s.' % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ params = dict(_cw.url_parse_qsl(urlsplit(bookmark.path).query))
+ rql = params['rql']
+ if vid is None:
+ vid = params.get('vid')
+ except (ValueError, KeyError) as exc:
+ msg = inliner.reporter.error('Could not parse bookmark path %s [%s].'
+ % (bookmark.path, exc), line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ rset = _cw.execute(rql, {'userid': _cw.user.eid})
+ if rset:
+ if vid is None:
+ vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
+ else:
+ vid = 'noresult'
+ view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+ content = view.render()
+ except Exception as exc:
+ content = 'An error occurred while interpreting directive bookmark: %r' % exc
+ set_classes(options)
+ return [nodes.raw('', content, format='html')], []
+
+
+def winclude_directive(name, arguments, options, content, lineno,
+ content_offset, block_text, state, state_machine):
+ """Include a reST file as part of the content of this reST file.
+
+ same as standard include directive but using config.locate_doc_resource to
+ get actual file to include.
+
+ Most part of this implementation is copied from `include` directive defined
+ in `docutils.parsers.rst.directives.misc`
+ """
+ context = state.document.settings.context
+ cw = context._cw
+ source = state_machine.input_lines.source(
+ lineno - state_machine.input_offset - 1)
+ #source_dir = os.path.dirname(os.path.abspath(source))
+ fid = arguments[0]
+ for lang in chain((cw.lang, cw.vreg.property_value('ui.language')),
+ cw.vreg.config.available_languages()):
+ rid = '%s_%s.rst' % (fid, lang)
+ resourcedir = cw.vreg.config.locate_doc_file(rid)
+ if resourcedir:
+ break
+ else:
+ severe = state_machine.reporter.severe(
+ 'Problems with "%s" directive path:\nno resource matching %s.'
+ % (name, fid),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ path = join(resourcedir, rid)
+ encoding = options.get('encoding', state.document.settings.input_encoding)
+ try:
+ state.document.settings.record_dependencies.add(path)
+ include_file = io.FileInput(
+ source_path=path, encoding=encoding,
+ error_handler=state.document.settings.input_encoding_error_handler,
+ handle_io_errors=None)
+ except IOError as error:
+ severe = state_machine.reporter.severe(
+ 'Problems with "%s" directive path:\n%s: %s.'
+ % (name, error.__class__.__name__, error),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ try:
+ include_text = include_file.read()
+ except UnicodeError as error:
+ severe = state_machine.reporter.severe(
+ 'Problem with "%s" directive:\n%s: %s'
+ % (name, error.__class__.__name__, error),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ if 'literal' in options:
+ literal_block = nodes.literal_block(include_text, include_text,
+ source=path)
+ literal_block.line = 1
+ return literal_block
+ else:
+ include_lines = statemachine.string2lines(include_text,
+ convert_whitespace=1)
+ state_machine.insert_input(include_lines, path)
+ return []
+
+winclude_directive.arguments = (1, 0, 1)
+winclude_directive.options = {'literal': directives.flag,
+ 'encoding': directives.encoding}
+
+
+class RQLTableDirective(Directive):
+ """rql-table directive
+
+ Example:
+
+ .. rql-table::
+ :vid: mytable
+ :headers: , , progress
+ :colvids: 2=progress
+
+ Any X,U,X WHERE X is Project, X url U
+
+ All fields but the RQL string are optionnal. The ``:headers:`` option can
+ contain empty column names.
+ """
+
+ required_arguments = 0
+ optional_arguments = 0
+ has_content= True
+ final_argument_whitespace = True
+ option_spec = {'vid': directives.unchanged,
+ 'headers': directives.unchanged,
+ 'colvids': directives.unchanged}
+
+ def run(self):
+ errid = "rql-table directive"
+ self.assert_has_content()
+ if self.arguments:
+ raise self.warning('%s does not accept arguments' % errid)
+ rql = ' '.join([l.strip() for l in self.content])
+ _cw = self.state.document.settings.context._cw
+ _cw.ensure_ro_rql(rql)
+ try:
+ rset = _cw.execute(rql)
+ except Exception as exc:
+ raise self.error("fail to execute RQL query in %s: %r" %
+ (errid, exc))
+ if not rset:
+ raise self.warning("empty result set")
+ vid = self.options.get('vid', 'table')
+ try:
+ view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+ except Exception as exc:
+ raise self.error("fail to select '%s' view in %s: %r" %
+ (vid, errid, exc))
+ headers = None
+ if 'headers' in self.options:
+ headers = [h.strip() for h in self.options['headers'].split(',')]
+ while headers.count(''):
+ headers[headers.index('')] = None
+ if len(headers) != len(rset[0]):
+ raise self.error("the number of 'headers' does not match the "
+ "number of columns in %s" % errid)
+ cellvids = None
+ if 'colvids' in self.options:
+ cellvids = {}
+ for f in self.options['colvids'].split(','):
+ try:
+ idx, vid = f.strip().split('=')
+ except ValueError:
+ raise self.error("malformatted 'colvids' option in %s" %
+ errid)
+ cellvids[int(idx.strip())] = vid.strip()
+ try:
+ content = view.render(headers=headers, cellvids=cellvids)
+ except Exception as exc:
+ raise self.error("Error rendering %s (%s)" % (errid, exc))
+ return [nodes.raw('', content, format='html')]
+
+
+try:
+ from pygments import highlight
+ from pygments.lexers import get_lexer_by_name
+ from pygments.formatters.html import HtmlFormatter
+except ImportError:
+ pygments_directive = None
+else:
+ _PYGMENTS_FORMATTER = HtmlFormatter()
+
+ def pygments_directive(name, arguments, options, content, lineno,
+ content_offset, block_text, state, state_machine):
+ try:
+ lexer = get_lexer_by_name(arguments[0])
+ except ValueError:
+ # no lexer found
+ lexer = get_lexer_by_name('text')
+ parsed = highlight(u'\n'.join(content), lexer, _PYGMENTS_FORMATTER)
+ # don't fail if no context set on the sourcecode directive
+ try:
+ context = state.document.settings.context
+ context._cw.add_css('pygments.css')
+ except AttributeError:
+ # used outside cubicweb XXX use hasattr instead
+ pass
+ return [nodes.raw('', parsed, format='html')]
+
+ pygments_directive.arguments = (1, 0, 1)
+ pygments_directive.content = 1
+
+
+class CubicWebReSTParser(Parser):
+ """The (customized) reStructuredText parser."""
+
+ def __init__(self):
+ self.initial_state = 'Body'
+ self.state_classes = states.state_classes
+ self.inliner = states.Inliner()
+ self.statemachine = states.RSTStateMachine(
+ state_classes=self.state_classes,
+ initial_state=self.initial_state,
+ debug=0)
+
+ def parse(self, inputstring, document):
+ """Parse `inputstring` and populate `document`, a document tree."""
+ self.setup_parse(inputstring, document)
+ inputlines = statemachine.string2lines(inputstring,
+ convert_whitespace=1)
+ self.statemachine.run(inputlines, document, inliner=self.inliner)
+ self.finish_parse()
+
+
+# XXX docutils keep a ref on context, can't find a correct way to remove it
+class CWReSTPublisher(Publisher):
+ def __init__(self, context, settings, **kwargs):
+ Publisher.__init__(self, **kwargs)
+ self.set_components('standalone', 'restructuredtext', 'pseudoxml')
+ self.process_programmatic_settings(None, settings, None)
+ self.settings.context = context
+
+
+def rest_publish(context, data):
+ """publish a string formatted as ReStructured Text to HTML
+
+ :type context: a cubicweb application object
+
+ :type data: str
+ :param data: some ReST text
+
+ :rtype: unicode
+ :return:
+ the data formatted as HTML or the original data if an error occurred
+ """
+ req = context._cw
+ if isinstance(data, text_type):
+ encoding = 'unicode'
+ # remove unprintable characters unauthorized in xml
+ data = data.translate(ESC_UCAR_TABLE)
+ else:
+ encoding = req.encoding
+ # remove unprintable characters unauthorized in xml
+ data = data.translate(ESC_CAR_TABLE)
+ settings = {'input_encoding': encoding, 'output_encoding': 'unicode',
+ 'warning_stream': False,
+ 'traceback': True, # don't sys.exit
+ 'stylesheet': None, # don't try to embed stylesheet (may cause
+ # obscure bug due to docutils computing
+ # relative path according to the directory
+ # used *at import time*
+ # dunno what's the max, severe is 4, and we never want a crash
+ # (though try/except may be a better option...). May be the
+ # above traceback option will avoid this?
+ 'halt_level': 10,
+ # disable stupid switch to colspan=2 if field name is above a size limit
+ 'field_name_limit': sys.maxsize,
+ }
+ if context:
+ if hasattr(req, 'url'):
+ base_url = req.url()
+ elif hasattr(context, 'absolute_url'):
+ base_url = context.absolute_url()
+ else:
+ base_url = req.base_url()
+ else:
+ base_url = None
+ try:
+ pub = CWReSTPublisher(context, settings,
+ parser=CubicWebReSTParser(),
+ writer=Writer(base_url=base_url),
+ source_class=io.StringInput,
+ destination_class=io.StringOutput)
+ pub.set_source(data)
+ pub.set_destination()
+ res = pub.publish(enable_exit_status=None)
+ # necessary for proper garbage collection, else a ref is kept somewhere in docutils...
+ del pub.settings.context
+ return res
+ except BaseException:
+ LOGGER.exception('error while publishing ReST text')
+ if not isinstance(data, text_type):
+ data = text_type(data, encoding, 'replace')
+ return xml_escape(req._('error while publishing ReST text')
+ + '\n\n' + data)
+
+
+_INITIALIZED = False
+def cw_rest_init():
+ global _INITIALIZED
+ if _INITIALIZED:
+ return
+ _INITIALIZED = True
+ register_canonical_role('eid', eid_reference_role)
+ register_canonical_role('rql', rql_role)
+ register_canonical_role('bookmark', bookmark_role)
+ directives.register_directive('winclude', winclude_directive)
+ if pygments_directive is not None:
+ directives.register_directive('sourcecode', pygments_directive)
+ directives.register_directive('rql-table', RQLTableDirective)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/tal.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/tal.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,273 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""provides simpleTAL extensions for CubicWeb
+
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import re
+from os.path import exists, isdir, join
+from logging import getLogger
+from StringIO import StringIO
+
+from simpletal import simpleTAL, simpleTALES
+
+from logilab.common.decorators import cached
+
+LOGGER = getLogger('cubicweb.tal')
+
+
+class LoggerAdapter(object):
+ def __init__(self, tal_logger):
+ self.tal_logger = tal_logger
+
+ def debug(self, msg):
+ LOGGER.debug(msg)
+
+ def warn(self, msg):
+ LOGGER.warning(msg)
+
+ def __getattr__(self, attrname):
+ return getattr(self.tal_logger, attrname)
+
+
+class CubicWebContext(simpleTALES.Context):
+ """add facilities to access entity / resultset"""
+
+ def __init__(self, options=None, allowPythonPath=1):
+ simpleTALES.Context.__init__(self, options, allowPythonPath)
+ self.log = LoggerAdapter(self.log)
+
+ def update(self, context):
+ for varname, value in context.items():
+ self.addGlobal(varname, value)
+
+ def addRepeat(self, name, var, initialValue):
+ simpleTALES.Context.addRepeat(self, name, var, initialValue)
+
+# XXX FIXME need to find a clean to define OPCODE values for extensions
+I18N_CONTENT = 18
+I18N_REPLACE = 19
+RQL_EXECUTE = 20
+# simpleTAL uses the OPCODE values to define priority over commands.
+# TAL_ITER should have the same priority than TAL_REPEAT (i.e. 3), but
+# we can't use the same OPCODE for two different commands without changing
+# the simpleTAL implementation. Another solution would be to totally override
+# the REPEAT implementation with the ITER one, but some specific operations
+# (involving len() for instance) are not implemented for ITER, so we prefer
+# to keep both implementations for now, and to fool simpleTAL by using a float
+# number between 3 and 4
+TAL_ITER = 3.1
+
+
+# FIX simpleTAL HTML 4.01 stupidity
+# (simpleTAL never closes tags like INPUT, IMG, HR ...)
+simpleTAL.HTML_FORBIDDEN_ENDTAG.clear()
+
+class CubicWebTemplateCompiler(simpleTAL.HTMLTemplateCompiler):
+ """extends default compiler by adding i18n:content commands"""
+
+ def __init__(self):
+ simpleTAL.HTMLTemplateCompiler.__init__(self)
+ self.commandHandler[I18N_CONTENT] = self.compile_cmd_i18n_content
+ self.commandHandler[I18N_REPLACE] = self.compile_cmd_i18n_replace
+ self.commandHandler[RQL_EXECUTE] = self.compile_cmd_rql
+ self.commandHandler[TAL_ITER] = self.compile_cmd_tal_iter
+
+ def setTALPrefix(self, prefix):
+ simpleTAL.TemplateCompiler.setTALPrefix(self, prefix)
+ self.tal_attribute_map['i18n:content'] = I18N_CONTENT
+ self.tal_attribute_map['i18n:replace'] = I18N_REPLACE
+ self.tal_attribute_map['rql:execute'] = RQL_EXECUTE
+ self.tal_attribute_map['tal:iter'] = TAL_ITER
+
+ def compile_cmd_i18n_content(self, argument):
+ # XXX tal:content structure=, text= should we support this ?
+ structure_flag = 0
+ return (I18N_CONTENT, (argument, False, structure_flag, self.endTagSymbol))
+
+ def compile_cmd_i18n_replace(self, argument):
+ # XXX tal:content structure=, text= should we support this ?
+ structure_flag = 0
+ return (I18N_CONTENT, (argument, True, structure_flag, self.endTagSymbol))
+
+ def compile_cmd_rql(self, argument):
+ return (RQL_EXECUTE, (argument, self.endTagSymbol))
+
+ def compile_cmd_tal_iter(self, argument):
+ original_id, (var_name, expression, end_tag_symbol) = \
+ simpleTAL.HTMLTemplateCompiler.compileCmdRepeat(self, argument)
+ return (TAL_ITER, (var_name, expression, self.endTagSymbol))
+
+ def getTemplate(self):
+ return CubicWebTemplate(self.commandList, self.macroMap, self.symbolLocationTable)
+
+ def compileCmdAttributes (self, argument):
+ """XXX modified to support single attribute
+ definition ending by a ';'
+
+ backport this to simpleTAL
+ """
+ # Compile tal:attributes into attribute command
+ # Argument: [(attributeName, expression)]
+
+ # Break up the list of attribute settings first
+ commandArgs = []
+ # We only want to match semi-colons that are not escaped
+ argumentSplitter = re.compile(r'(?.
+
+
+from cubicweb.web.views import tableview
+
+class CustomRsetTableView(tableview.RsetTableView):
+ __regid__ = 'mytable'
diff -r 1400aee10df4 -r faf279e33298 cubicweb/ext/test/unittest_rest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/ext/test/unittest_rest.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,244 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from six import PY3
+
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.testlib import CubicWebTC
+
+from cubicweb.ext.rest import rest_publish
+
+class RestTC(CubicWebTC):
+
+ def context(self, req):
+ return req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
+
+ def test_eid_role(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid),
+ '
\n')
+
+ def test_bad_rest_no_crash(self):
+ with self.admin_access.web_request() as req:
+ rest_publish(self.context(req), '''
+| card | implication |
+--------------------------
+| 1-1 | N1 = N2 |
+| 1-? | N1 <= N2 |
+| 1-+ | N1 >= N2 |
+| 1-* | N1>0 => N2>0 |
+--------------------------
+| ?-? | N1 # N2 |
+| ?-+ | N1 >= N2 |
+| ?-* | N1 # N2 |
+--------------------------
+| +-+ | N1>0 => N2>0 et |
+| | N2>0 => N1>0 |
+| +-* | N1>+ => N2>0 |
+--------------------------
+| *-* | N1#N2 |
+--------------------------
+
+''')
+
+ def test_disable_field_name_colspan(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ value = rest_publish(context, '''my field list:
+
+:a long dumb param name: value
+''')
+ self.assertNotIn('colspan', value)
+
+ def test_rql_role_with_vid(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ out = rest_publish(context, ':rql:`Any X ORDERBY XL WHERE X is CWUser, X login XL:table`')
+ self.assertTrue(out.endswith('anon\n'
+ '
\n'))
+
+ def test_rql_role_with_vid_empty_rset(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`')
+ self.assertTrue(out.endswith('
'
+ 'No result matching query
\n\n'))
+
+ def test_rql_role_with_unknown_vid(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`')
+ self.assertTrue(out.startswith("
an error occurred while interpreting this "
+ "rql directive: ObjectNotFound(%s'toto',)
" %
+ ('' if PY3 else 'u')),
+ out)
+
+ def test_rql_role_without_vid(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ out = rest_publish(context, ':rql:`Any X,XL ORDERBY XL WHERE X is CWUser, X login XL`')
+ self.assertEqual(out, u'
\n')
+
+ def test_rqltable_nocontent(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ out = rest_publish(context, """.. rql-table::""")
+ self.assertIn("System Message: ERROR", out)
+ self.assertIn("Content block expected for the "rql-table" "
+ "directive; none found" , out)
+
+ def test_rqltable_norset(self):
+ with self.admin_access.web_request() as req:
+ context = self.context(req)
+ rql = "Any X WHERE X is CWUser, X firstname 'franky'"
+ out = rest_publish(
+ context, """\
+.. rql-table::
+
+ %(rql)s""" % {'rql': rql})
+ self.assertIn("System Message: WARNING", out)
+ self.assertIn("empty result set", out)
+
+ def test_rqltable_nooptions(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+
+ %(rql)s
+ """ % {'rql': rql})
+ view = self.vreg['views'].select('table', req, rset=req.execute(rql))
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+
+ def test_rqltable_vid(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ vid = 'mytable'
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :vid: %(vid)s
+
+ %(rql)s
+ """ % {'rql': rql, 'vid': vid})
+ view = self.vreg['views'].select(vid, req, rset=req.execute(rql))
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+ self.assertIn(vid, out[:49])
+
+ def test_rqltable_badvid(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ vid = 'mytabel'
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :vid: %(vid)s
+
+ %(rql)s
+ """ % {'rql': rql, 'vid': vid})
+ self.assertIn("fail to select '%s' view" % vid, out)
+
+ def test_rqltable_headers(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ headers = ["nom", "prenom", "identifiant"]
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :headers: %(headers)s
+
+ %(rql)s
+ """ % {'rql': rql, 'headers': ', '.join(headers)})
+ view = self.vreg['views'].select('table', req, rset=req.execute(rql))
+ view.headers = headers
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+
+ def test_rqltable_headers_missing(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ headers = ["nom", "", "identifiant"]
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :headers: %(headers)s
+
+ %(rql)s
+ """ % {'rql': rql, 'headers': ', '.join(headers)})
+ view = self.vreg['views'].select('table', req, rset=req.execute(rql))
+ view.headers = [headers[0], None, headers[2]]
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+
+ def test_rqltable_headers_missing_edges(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ headers = [" ", "prenom", ""]
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :headers: %(headers)s
+
+ %(rql)s
+ """ % {'rql': rql, 'headers': ', '.join(headers)})
+ view = self.vreg['views'].select('table', req, rset=req.execute(rql))
+ view.headers = [None, headers[1], None]
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+
+ def test_rqltable_colvids(self):
+ with self.admin_access.web_request() as req:
+ rql = "Any X,S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L"
+ colvids = {0: "oneline"}
+ out = rest_publish(
+ self.context(req), """\
+.. rql-table::
+ :colvids: %(colvids)s
+
+ %(rql)s
+ """ % {'rql': rql,
+ 'colvids': ', '.join(["%d=%s" % (k, v)
+ for k, v in colvids.items()])
+ })
+ view = self.vreg['views'].select('table', req, rset=req.execute(rql))
+ view.cellvids = colvids
+ self.assertEqual(view.render(w=None)[49:], out[49:])
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,84 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""core hooks registering some maintainance tasks as server startup time"""
+
+__docformat__ = "restructuredtext en"
+
+from datetime import timedelta, datetime
+
+from cubicweb.server import hook
+
+class TransactionsCleanupStartupHook(hook.Hook):
+ """start task to cleanup transaction data"""
+ __regid__ = 'cw.looping-tasks.transactions-cleanup'
+ events = ('server_startup',)
+
+ def __call__(self):
+ # XXX use named args and inner functions to avoid referencing globals
+ # which may cause reloading pb
+ lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime'])
+ def cleanup_old_transactions(repo=self.repo, lifetime=lifetime):
+ mindate = datetime.utcnow() - lifetime
+ with repo.internal_cnx() as cnx:
+ cnx.system_sql(
+ 'DELETE FROM transactions WHERE tx_time < %(time)s',
+ {'time': mindate})
+ cnx.commit()
+ if self.repo.config['undo-enabled']:
+ self.repo.looping_task(60*60*24, cleanup_old_transactions,
+ self.repo)
+
+class UpdateFeedsStartupHook(hook.Hook):
+ """start task to update datafeed based sources"""
+ __regid__ = 'cw.looping-tasks.update-feeds'
+ events = ('server_startup',)
+
+ def __call__(self):
+ def update_feeds(repo):
+ # take a list to avoid iterating on a dictionary whose size may
+ # change
+ for uri, source in list(repo.sources_by_uri.items()):
+ if (uri == 'system'
+ or not repo.config.source_enabled(source)
+ or not source.config['synchronize']):
+ continue
+ with repo.internal_cnx() as cnx:
+ try:
+ source.pull_data(cnx)
+ except Exception as exc:
+ cnx.exception('while trying to update feed %s', source)
+ self.repo.looping_task(60, update_feeds, self.repo)
+
+
+class DataImportsCleanupStartupHook(hook.Hook):
+ """start task to cleanup old data imports (ie datafeed import logs)"""
+ __regid__ = 'cw.looping-tasks.dataimports-cleanup'
+ events = ('server_startup',)
+
+ def __call__(self):
+ def expire_dataimports(repo=self.repo):
+ for uri, source in repo.sources_by_uri.items():
+ if (uri == 'system'
+ or not repo.config.source_enabled(source)):
+ continue
+ with repo.internal_cnx() as cnx:
+ mindate = datetime.utcnow() - timedelta(seconds=source.config['logs-lifetime'])
+ cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s',
+ {'time': mindate})
+ cnx.commit()
+ self.repo.looping_task(60*60*24, expire_dataimports, self.repo)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/bookmark.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/bookmark.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,42 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""bookmark related hooks"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb.server import hook
+
+
+class AutoDeleteBookmarkOp(hook.Operation):
+ bookmark = None # make pylint happy
+ def precommit_event(self):
+ if not self.cnx.deleted_in_transaction(self.bookmark.eid):
+ if not self.bookmark.bookmarked_by:
+ self.bookmark.cw_delete()
+
+
+class DelBookmarkedByHook(hook.Hook):
+ """ensure user logins are stripped"""
+ __regid__ = 'autodelbookmark'
+ __select__ = hook.Hook.__select__ & hook.match_rtype('bookmarked_by',)
+ category = 'bookmark'
+ events = ('after_delete_relation',)
+
+ def __call__(self):
+ AutoDeleteBookmarkOp(self._cw,
+ bookmark=self._cw.entity_from_eid(self.eidfrom))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/email.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/email.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,80 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""hooks to ensure use_email / primary_email relations consistency"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb.server import hook
+
+
+class SetUseEmailRelationOp(hook.Operation):
+ """delay this operation to commit to avoid conflict with a late rql query
+ already setting the relation
+ """
+ rtype = 'use_email'
+ entity = email = None # make pylint happy
+
+ def condition(self):
+ """check entity has use_email set for the email address"""
+ return not any(e for e in self.entity.use_email
+ if self.email.eid == e.eid)
+
+ def precommit_event(self):
+ if self.cnx.deleted_in_transaction(self.entity.eid):
+ return
+ if self.cnx.deleted_in_transaction(self.email.eid):
+ return
+ if self.condition():
+ self.cnx.execute(
+ 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype,
+ {'x': self.entity.eid, 'y': self.email.eid})
+
+
+class SetPrimaryEmailRelationOp(SetUseEmailRelationOp):
+ rtype = 'primary_email'
+
+ def condition(self):
+ """check entity has no primary_email set"""
+ return not self.entity.primary_email
+
+
+class SetPrimaryEmailHook(hook.Hook):
+ """notify when a bug or story or version has its state modified"""
+ __regid__ = 'setprimaryemail'
+ __select__ = hook.Hook.__select__ & hook.match_rtype('use_email')
+ category = 'email'
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ entity = self._cw.entity_from_eid(self.eidfrom)
+ if 'primary_email' in entity.e_schema.subject_relations():
+ SetPrimaryEmailRelationOp(self._cw, entity=entity,
+ email=self._cw.entity_from_eid(self.eidto))
+
+class SetUseEmailHook(hook.Hook):
+ """notify when a bug or story or version has its state modified"""
+ __regid__ = 'setprimaryemail'
+ __select__ = hook.Hook.__select__ & hook.match_rtype('primary_email')
+ category = 'email'
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ entity = self._cw.entity_from_eid(self.eidfrom)
+ if 'use_email' in entity.e_schema.subject_relations():
+ SetUseEmailRelationOp(self._cw, entity=entity,
+ email=self._cw.entity_from_eid(self.eidto))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/integrity.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/integrity.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,328 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Core hooks: check for data integrity according to the instance'schema
+validity
+"""
+
+__docformat__ = "restructuredtext en"
+from cubicweb import _
+
+from threading import Lock
+
+from six import text_type
+
+from cubicweb import validation_error, neg_role
+from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES,
+ RQLConstraint, RQLUniqueConstraint)
+from cubicweb.predicates import is_instance, composite_etype
+from cubicweb.uilib import soup2xhtml
+from cubicweb.server import hook
+
+# special relations that don't have to be checked for integrity, usually
+# because they are handled internally by hooks (so we trust ourselves)
+DONT_CHECK_RTYPES_ON_ADD = META_RTYPES | WORKFLOW_RTYPES
+DONT_CHECK_RTYPES_ON_DEL = META_RTYPES | WORKFLOW_RTYPES
+
+_UNIQUE_CONSTRAINTS_LOCK = Lock()
+_UNIQUE_CONSTRAINTS_HOLDER = None
+
+
+def _acquire_unique_cstr_lock(cnx):
+ """acquire the _UNIQUE_CONSTRAINTS_LOCK for the cnx.
+
+ This lock used to avoid potential integrity pb when checking
+ RQLUniqueConstraint in two different transactions, as explained in
+ https://extranet.logilab.fr/3577926
+ """
+ if 'uniquecstrholder' in cnx.transaction_data:
+ return
+ _UNIQUE_CONSTRAINTS_LOCK.acquire()
+ cnx.transaction_data['uniquecstrholder'] = True
+ # register operation responsible to release the lock on commit/rollback
+ _ReleaseUniqueConstraintsOperation(cnx)
+
+def _release_unique_cstr_lock(cnx):
+ if 'uniquecstrholder' in cnx.transaction_data:
+ del cnx.transaction_data['uniquecstrholder']
+ _UNIQUE_CONSTRAINTS_LOCK.release()
+
+class _ReleaseUniqueConstraintsOperation(hook.Operation):
+ def postcommit_event(self):
+ _release_unique_cstr_lock(self.cnx)
+ def rollback_event(self):
+ _release_unique_cstr_lock(self.cnx)
+
+
+class _CheckRequiredRelationOperation(hook.DataOperationMixIn,
+ hook.LateOperation):
+ """checking relation cardinality has to be done after commit in case the
+ relation is being replaced
+ """
+ containercls = list
+ role = key = base_rql = None
+
+ def precommit_event(self):
+ cnx = self.cnx
+ pendingeids = cnx.transaction_data.get('pendingeids', ())
+ pendingrtypes = cnx.transaction_data.get('pendingrtypes', ())
+ for eid, rtype in self.get_data():
+ # recheck pending eids / relation types
+ if eid in pendingeids:
+ continue
+ if rtype in pendingrtypes:
+ continue
+ if not cnx.execute(self.base_rql % rtype, {'x': eid}):
+ etype = cnx.entity_metas(eid)['type']
+ msg = _('at least one relation %(rtype)s is required on '
+ '%(etype)s (%(eid)s)')
+ raise validation_error(eid, {(rtype, self.role): msg},
+ {'rtype': rtype, 'etype': etype, 'eid': eid},
+ ['rtype', 'etype'])
+
+
+class _CheckSRelationOp(_CheckRequiredRelationOperation):
+ """check required subject relation"""
+ role = 'subject'
+ base_rql = 'Any O WHERE S eid %%(x)s, S %s O'
+
+class _CheckORelationOp(_CheckRequiredRelationOperation):
+ """check required object relation"""
+ role = 'object'
+ base_rql = 'Any S WHERE O eid %%(x)s, S %s O'
+
+
+class IntegrityHook(hook.Hook):
+ __abstract__ = True
+ category = 'integrity'
+
+
+class _EnsureSymmetricRelationsAdd(hook.Hook):
+ """ ensure X r Y => Y r X iff r is symmetric """
+ __regid__ = 'cw.add_ensure_symmetry'
+ __abstract__ = True
+ category = 'activeintegrity'
+ events = ('after_add_relation',)
+ # __select__ is set in the registration callback
+
+ def __call__(self):
+ self._cw.repo.system_source.add_relation(self._cw, self.eidto,
+ self.rtype, self.eidfrom)
+
+
+class _EnsureSymmetricRelationsDelete(hook.Hook):
+ """ ensure X r Y => Y r X iff r is symmetric """
+ __regid__ = 'cw.delete_ensure_symmetry'
+ __abstract__ = True
+ category = 'activeintegrity'
+ events = ('after_delete_relation',)
+ # __select__ is set in the registration callback
+
+ def __call__(self):
+ self._cw.repo.system_source.delete_relation(self._cw, self.eidto,
+ self.rtype, self.eidfrom)
+
+
+class CheckCardinalityHookBeforeDeleteRelation(IntegrityHook):
+ """check cardinalities are satisfied"""
+ __regid__ = 'checkcard_before_delete_relation'
+ events = ('before_delete_relation',)
+
+ def __call__(self):
+ rtype = self.rtype
+ if rtype in DONT_CHECK_RTYPES_ON_DEL:
+ return
+ cnx = self._cw
+ eidfrom, eidto = self.eidfrom, self.eidto
+ rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto)
+ if (rdef.subject, rtype, rdef.object) in cnx.transaction_data.get('pendingrdefs', ()):
+ return
+ card = rdef.cardinality
+ if card[0] in '1+' and not cnx.deleted_in_transaction(eidfrom):
+ _CheckSRelationOp.get_instance(cnx).add_data((eidfrom, rtype))
+ if card[1] in '1+' and not cnx.deleted_in_transaction(eidto):
+ _CheckORelationOp.get_instance(cnx).add_data((eidto, rtype))
+
+
+class CheckCardinalityHookAfterAddEntity(IntegrityHook):
+ """check cardinalities are satisfied"""
+ __regid__ = 'checkcard_after_add_entity'
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ eid = self.entity.eid
+ eschema = self.entity.e_schema
+ for rschema, targetschemas, role in eschema.relation_definitions():
+ # skip automatically handled relations
+ if rschema.type in DONT_CHECK_RTYPES_ON_ADD:
+ continue
+ rdef = rschema.role_rdef(eschema, targetschemas[0], role)
+ if rdef.role_cardinality(role) in '1+':
+ if role == 'subject':
+ op = _CheckSRelationOp.get_instance(self._cw)
+ else:
+ op = _CheckORelationOp.get_instance(self._cw)
+ op.add_data((eid, rschema.type))
+
+
+class _CheckConstraintsOp(hook.DataOperationMixIn, hook.LateOperation):
+ """ check a new relation satisfy its constraints """
+ containercls = list
+ def precommit_event(self):
+ cnx = self.cnx
+ for values in self.get_data():
+ eidfrom, rtype, eidto, constraints = values
+ # first check related entities have not been deleted in the same
+ # transaction
+ if cnx.deleted_in_transaction(eidfrom):
+ continue
+ if cnx.deleted_in_transaction(eidto):
+ continue
+ for constraint in constraints:
+ # XXX
+ # * lock RQLConstraint as well?
+ # * use a constraint id to use per constraint lock and avoid
+ # unnecessary commit serialization ?
+ if isinstance(constraint, RQLUniqueConstraint):
+ _acquire_unique_cstr_lock(cnx)
+ try:
+ constraint.repo_check(cnx, eidfrom, rtype, eidto)
+ except NotImplementedError:
+ self.critical('can\'t check constraint %s, not supported',
+ constraint)
+
+
+class CheckConstraintHook(IntegrityHook):
+ """check the relation satisfy its constraints
+
+ this is delayed to a precommit time operation since other relation which
+ will make constraint satisfied (or unsatisfied) may be added later.
+ """
+ __regid__ = 'checkconstraint'
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ # XXX get only RQL[Unique]Constraints?
+ rdef = self._cw.rtype_eids_rdef(self.rtype, self.eidfrom, self.eidto)
+ constraints = rdef.constraints
+ if constraints:
+ _CheckConstraintsOp.get_instance(self._cw).add_data(
+ (self.eidfrom, self.rtype, self.eidto, constraints))
+
+
+class CheckAttributeConstraintHook(IntegrityHook):
+ """check the attribute relation satisfy its constraints
+
+ this is delayed to a precommit time operation since other relation which
+ will make constraint satisfied (or unsatisfied) may be added later.
+ """
+ __regid__ = 'checkattrconstraint'
+ events = ('after_add_entity', 'after_update_entity')
+
+ def __call__(self):
+ eschema = self.entity.e_schema
+ for attr in self.entity.cw_edited:
+ if eschema.subjrels[attr].final:
+ constraints = [c for c in eschema.rdef(attr).constraints
+ if isinstance(c, (RQLUniqueConstraint, RQLConstraint))]
+ if constraints:
+ _CheckConstraintsOp.get_instance(self._cw).add_data(
+ (self.entity.eid, attr, None, constraints))
+
+
+class DontRemoveOwnersGroupHook(IntegrityHook):
+ """delete the composed of a composite relation when this relation is deleted
+ """
+ __regid__ = 'checkownersgroup'
+ __select__ = IntegrityHook.__select__ & is_instance('CWGroup')
+ events = ('before_delete_entity', 'before_update_entity')
+
+ def __call__(self):
+ entity = self.entity
+ if self.event == 'before_delete_entity' and entity.name == 'owners':
+ raise validation_error(entity, {None: _("can't be deleted")})
+ elif self.event == 'before_update_entity' \
+ and 'name' in entity.cw_edited:
+ oldname, newname = entity.cw_edited.oldnewvalue('name')
+ if oldname == 'owners' and newname != oldname:
+ raise validation_error(entity, {('name', 'subject'): _("can't be changed")})
+
+
+class TidyHtmlFields(IntegrityHook):
+ """tidy HTML in rich text strings"""
+ __regid__ = 'htmltidy'
+ events = ('before_add_entity', 'before_update_entity')
+
+ def __call__(self):
+ entity = self.entity
+ metaattrs = entity.e_schema.meta_attributes()
+ edited = entity.cw_edited
+ for metaattr, (metadata, attr) in metaattrs.items():
+ if metadata == 'format' and attr in edited:
+ try:
+ value = edited[attr]
+ except KeyError:
+ continue # no text to tidy
+ if isinstance(value, text_type): # filter out None and Binary
+ if getattr(entity, str(metaattr)) == 'text/html':
+ edited[attr] = soup2xhtml(value, self._cw.encoding)
+
+
+class StripCWUserLoginHook(IntegrityHook):
+ """ensure user logins are stripped"""
+ __regid__ = 'stripuserlogin'
+ __select__ = IntegrityHook.__select__ & is_instance('CWUser')
+ events = ('before_add_entity', 'before_update_entity',)
+
+ def __call__(self):
+ login = self.entity.cw_edited.get('login')
+ if login:
+ self.entity.cw_edited['login'] = login.strip()
+
+
+class DeleteCompositeOrphanHook(hook.Hook):
+ """Delete the composed of a composite relation when the composite is
+ deleted (this is similar to the cascading ON DELETE CASCADE
+ semantics of sql).
+ """
+ __regid__ = 'deletecomposite'
+ __select__ = hook.Hook.__select__ & composite_etype()
+ events = ('before_delete_entity',)
+ category = 'activeintegrity'
+ # give the application's before_delete_entity hooks a chance to run before we cascade
+ order = 99
+
+ def __call__(self):
+ eid = self.entity.eid
+ for rdef, role in self.entity.e_schema.composite_rdef_roles:
+ rtype = rdef.rtype.type
+ target = getattr(rdef, neg_role(role))
+ expr = ('C %s X' % rtype) if role == 'subject' else ('X %s C' % rtype)
+ self._cw.execute('DELETE %s X WHERE C eid %%(c)s, %s' % (target, expr),
+ {'c': eid})
+
+
+def registration_callback(vreg):
+ vreg.register_all(globals().values(), __name__)
+ symmetric_rtypes = [rschema.type for rschema in vreg.schema.relations()
+ if rschema.symmetric]
+ class EnsureSymmetricRelationsAdd(_EnsureSymmetricRelationsAdd):
+ __select__ = _EnsureSymmetricRelationsAdd.__select__ & hook.match_rtype(*symmetric_rtypes)
+ vreg.register(EnsureSymmetricRelationsAdd)
+ class EnsureSymmetricRelationsDelete(_EnsureSymmetricRelationsDelete):
+ __select__ = _EnsureSymmetricRelationsDelete.__select__ & hook.match_rtype(*symmetric_rtypes)
+ vreg.register(EnsureSymmetricRelationsDelete)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/logstats.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/logstats.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,59 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+"""looping task for dumping instance's stats in a file
+"""
+
+__docformat__ = "restructuredtext en"
+
+from datetime import datetime
+import json
+
+from cubicweb.server import hook
+
+class LogStatsStartHook(hook.Hook):
+ """register task to regularly dump instance's stats in a file
+
+ data are stored as one json entry per row
+ """
+ __regid__ = 'cubicweb.hook.logstats.start'
+ events = ('server_startup',)
+
+ def __call__(self):
+ interval = self.repo.config.get('logstat-interval', 0)
+ if interval <= 0:
+ return
+
+ def dump_stats(repo):
+ statsfile = repo.config.get('logstat-file')
+ with repo.internal_cnx() as cnx:
+ stats = cnx.call_service('repo_stats')
+ gcstats = cnx.call_service('repo_gc_stats', nmax=5)
+
+ allstats = {'resources': stats,
+ 'memory': gcstats,
+ 'timestamp': datetime.utcnow().isoformat(),
+ }
+ try:
+ with open(statsfile, 'ab') as ofile:
+ json.dump(allstats, ofile)
+ ofile.write('\n')
+ except IOError:
+ repo.warning('Cannot open stats file for writing: %s', statsfile)
+
+ self.repo.looping_task(interval, dump_stats, self.repo)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/metadata.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/metadata.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,219 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Core hooks: set generic metadata"""
+
+__docformat__ = "restructuredtext en"
+
+from datetime import datetime
+from base64 import b64encode
+
+from pytz import utc
+
+from cubicweb.predicates import is_instance
+from cubicweb.server import hook
+from cubicweb.server.edition import EditedEntity
+
+
+class MetaDataHook(hook.Hook):
+ __abstract__ = True
+ category = 'metadata'
+
+
+class InitMetaAttrsHook(MetaDataHook):
+ """before create a new entity -> set creation and modification date
+
+ this is a conveniency hook, you shouldn't have to disable it
+ """
+ __regid__ = 'metaattrsinit'
+ events = ('before_add_entity',)
+
+ def __call__(self):
+ timestamp = datetime.now(utc)
+ edited = self.entity.cw_edited
+ if not edited.get('creation_date'):
+ edited['creation_date'] = timestamp
+ if not edited.get('modification_date'):
+ edited['modification_date'] = timestamp
+ if not self._cw.transaction_data.get('do-not-insert-cwuri'):
+ cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid)
+ edited.setdefault('cwuri', cwuri)
+
+
+class UpdateMetaAttrsHook(MetaDataHook):
+ """update an entity -> set modification date"""
+ __regid__ = 'metaattrsupdate'
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ # repairing is true during c-c upgrade/shell and similar commands. We
+ # usually don't want to update modification date in such cases.
+ #
+ # XXX to be really clean, we should turn off modification_date update
+ # explicitly on each command where we do not want that behaviour.
+ if not self._cw.vreg.config.repairing:
+ self.entity.cw_edited.setdefault('modification_date', datetime.now(utc))
+
+
+class SetCreatorOp(hook.DataOperationMixIn, hook.Operation):
+
+ def precommit_event(self):
+ cnx = self.cnx
+ relations = [(eid, cnx.user.eid) for eid in self.get_data()
+ # don't consider entities that have been created and deleted in
+ # the same transaction, nor ones where created_by has been
+ # explicitly set
+ if not cnx.deleted_in_transaction(eid) and \
+ not cnx.entity_from_eid(eid).created_by]
+ cnx.add_relations([('created_by', relations)])
+
+
+class SetOwnershipHook(MetaDataHook):
+ """create a new entity -> set owner and creator metadata"""
+ __regid__ = 'setowner'
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ if not self._cw.is_internal_session:
+ self._cw.add_relation(self.entity.eid, 'owned_by', self._cw.user.eid)
+ SetCreatorOp.get_instance(self._cw).add_data(self.entity.eid)
+
+
+class SyncOwnersOp(hook.DataOperationMixIn, hook.Operation):
+ def precommit_event(self):
+ for compositeeid, composedeid in self.get_data():
+ if self.cnx.deleted_in_transaction(compositeeid):
+ continue
+ if self.cnx.deleted_in_transaction(composedeid):
+ continue
+ self.cnx.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,'
+ 'NOT EXISTS(X owned_by U, X eid %(x)s)',
+ {'c': compositeeid, 'x': composedeid})
+
+
+class SyncCompositeOwner(MetaDataHook):
+ """when adding composite relation, the composed should have the same owners
+ has the composite
+ """
+ __regid__ = 'synccompositeowner'
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ if self.rtype == 'wf_info_for':
+ # skip this special composite relation # XXX (syt) why?
+ return
+ eidfrom, eidto = self.eidfrom, self.eidto
+ composite = self._cw.rtype_eids_rdef(self.rtype, eidfrom, eidto).composite
+ if composite == 'subject':
+ SyncOwnersOp.get_instance(self._cw).add_data( (eidfrom, eidto) )
+ elif composite == 'object':
+ SyncOwnersOp.get_instance(self._cw).add_data( (eidto, eidfrom) )
+
+
+class FixUserOwnershipHook(MetaDataHook):
+ """when a user has been created, add owned_by relation on itself"""
+ __regid__ = 'fixuserowner'
+ __select__ = MetaDataHook.__select__ & is_instance('CWUser')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ self._cw.add_relation(self.entity.eid, 'owned_by', self.entity.eid)
+
+
+class UpdateFTIHook(MetaDataHook):
+ """sync fulltext index text index container when a relation with
+ fulltext_container set is added / removed
+ """
+ __regid__ = 'updateftirel'
+ events = ('after_add_relation', 'after_delete_relation')
+
+ def __call__(self):
+ rtype = self.rtype
+ cnx = self._cw
+ ftcontainer = cnx.vreg.schema.rschema(rtype).fulltext_container
+ if ftcontainer == 'subject':
+ cnx.repo.system_source.index_entity(
+ cnx, cnx.entity_from_eid(self.eidfrom))
+ elif ftcontainer == 'object':
+ cnx.repo.system_source.index_entity(
+ cnx, cnx.entity_from_eid(self.eidto))
+
+
+
+# entity source handling #######################################################
+
+class ChangeEntitySourceUpdateCaches(hook.Operation):
+ oldsource = newsource = entity = None # make pylint happy
+
+ def postcommit_event(self):
+ self.oldsource.reset_caches()
+ repo = self.cnx.repo
+ entity = self.entity
+ extid = entity.cw_metainformation()['extid']
+ repo._type_source_cache[entity.eid] = (
+ entity.cw_etype, None, self.newsource.uri)
+ repo._extid_cache[extid] = -entity.eid
+
+
+class ChangeEntitySourceDeleteHook(MetaDataHook):
+ """support for moving an entity from an external source by watching 'Any
+ cw_source CWSource' relation
+ """
+
+ __regid__ = 'cw.metadata.source-change'
+ __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+ events = ('before_delete_relation',)
+
+ def __call__(self):
+ if (self._cw.deleted_in_transaction(self.eidfrom)
+ or self._cw.deleted_in_transaction(self.eidto)):
+ return
+ schange = self._cw.transaction_data.setdefault('cw_source_change', {})
+ schange[self.eidfrom] = self.eidto
+
+
+class ChangeEntitySourceAddHook(MetaDataHook):
+ __regid__ = 'cw.metadata.source-change'
+ __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+ events = ('before_add_relation',)
+
+ def __call__(self):
+ schange = self._cw.transaction_data.get('cw_source_change')
+ if schange is not None and self.eidfrom in schange:
+ newsource = self._cw.entity_from_eid(self.eidto)
+ if newsource.name != 'system':
+ raise Exception('changing source to something else than the '
+ 'system source is unsupported')
+ syssource = newsource.repo_source
+ oldsource = self._cw.entity_from_eid(schange[self.eidfrom])
+ entity = self._cw.entity_from_eid(self.eidfrom)
+ # we don't want the moved entity to be reimported later. To
+ # distinguish this state, move the record from the 'entities' table
+ # to 'moved_entities'. External source will then have consider
+ # case where `extid2eid` returns a negative eid as 'this entity was
+ # known but has been moved, ignore it'.
+ extid = self._cw.entity_metas(entity.eid)['extid']
+ assert extid is not None
+ attrs = {'eid': entity.eid, 'extid': b64encode(extid).decode('ascii')}
+ self._cw.system_sql(syssource.sqlgen.insert('moved_entities', attrs), attrs)
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None,
+ 'asource': 'system'}
+ self._cw.system_sql(syssource.sqlgen.update('entities', attrs, ['eid']), attrs)
+ # register an operation to update repository/sources caches
+ ChangeEntitySourceUpdateCaches(self._cw, entity=entity,
+ oldsource=oldsource.repo_source,
+ newsource=syssource)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/notification.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/notification.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,244 @@
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""some hooks to handle notification on entity's changes"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.textutils import normalize_text
+from logilab.common.deprecation import deprecated
+
+from cubicweb import RegistryNotFound
+from cubicweb.predicates import is_instance
+from cubicweb.server import hook
+from cubicweb.sobjects.supervising import SupervisionMailOp
+
+
+@deprecated('[3.17] use notify_on_commit instead')
+def RenderAndSendNotificationView(cnx, view, viewargs=None):
+ notify_on_commit(cnx, view, viewargs)
+
+
+def notify_on_commit(cnx, view, viewargs=None):
+ """register a notification view (see
+ :class:`~cubicweb.sobjects.notification.NotificationView`) to be sent at
+ post-commit time, ie only if the transaction has succeeded.
+
+ `viewargs` is an optional dictionary containing extra argument to be given
+ to :meth:`~cubicweb.sobjects.notification.NotificationView.render_and_send`
+ """
+ if viewargs is None:
+ viewargs = {}
+ notif_op = _RenderAndSendNotificationOp.get_instance(cnx)
+ notif_op.add_data((view, viewargs))
+
+
+class _RenderAndSendNotificationOp(hook.DataOperationMixIn, hook.Operation):
+ """End of the notification chain. Do render and send views after commit
+
+ All others Operations end up adding data to this Operation.
+ The notification are done on ``postcommit_event`` to make sure to prevent
+ sending notification about rolled back data.
+ """
+
+ containercls = list
+
+ def postcommit_event(self):
+ deleted = self.cnx.deleted_in_transaction
+ for view, viewargs in self.get_data():
+ if view.cw_rset is not None:
+ if not view.cw_rset:
+ # entity added and deleted in the same transaction
+ # (cache effect)
+ continue
+ elif deleted(view.cw_rset[view.cw_row or 0][view.cw_col or 0]):
+ # entity added and deleted in the same transaction
+ continue
+ try:
+ view.render_and_send(**viewargs)
+ except Exception:
+ # error in post commit are not propagated
+ # We keep this logic here to prevent a small notification error
+ # to prevent them all.
+ self.exception('Notification failed')
+
+
+class NotificationHook(hook.Hook):
+ __abstract__ = True
+ category = 'notification'
+
+ def select_view(self, vid, rset, row=0, col=0):
+ try:
+ return self._cw.vreg['views'].select_or_none(vid, self._cw, rset=rset,
+ row=row, col=col)
+ except RegistryNotFound: # can happen in some config
+ # (e.g. repo only config with no
+ # notification views registered by
+ # the instance's cubes)
+ return None
+
+
+class StatusChangeHook(NotificationHook):
+ """notify when a workflowable entity has its state modified"""
+ __regid__ = 'notifystatuschange'
+ __select__ = NotificationHook.__select__ & is_instance('TrInfo')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ if not entity.from_state: # not a transition
+ return
+ rset = entity.related('wf_info_for')
+ view = self.select_view('notif_status_change', rset=rset, row=0)
+ if view is None:
+ return
+ comment = entity.printable_value('comment', format='text/plain')
+ # XXX don't try to wrap rest until we've a proper transformation (see
+ # #103822)
+ if comment and entity.comment_format != 'text/rest':
+ comment = normalize_text(comment, 80)
+ viewargs = {'comment': comment,
+ 'previous_state': entity.previous_state.name,
+ 'current_state': entity.new_state.name}
+ notify_on_commit(self._cw, view, viewargs=viewargs)
+
+class RelationChangeHook(NotificationHook):
+ __regid__ = 'notifyrelationchange'
+ events = ('before_add_relation', 'after_add_relation',
+ 'before_delete_relation', 'after_delete_relation')
+
+ def __call__(self):
+ """if a notification view is defined for the event, send notification
+ email defined by the view
+ """
+ rset = self._cw.eid_rset(self.eidfrom)
+ view = self.select_view('notif_%s_%s' % (self.event, self.rtype),
+ rset=rset, row=0)
+ if view is None:
+ return
+ notify_on_commit(self._cw, view)
+
+
+class EntityChangeHook(NotificationHook):
+ """if a notification view is defined for the event, send notification
+ email defined by the view
+ """
+ __regid__ = 'notifyentitychange'
+ events = ('after_add_entity', 'after_update_entity')
+
+ def __call__(self):
+ rset = self.entity.as_rset()
+ view = self.select_view('notif_%s' % self.event, rset=rset, row=0)
+ if view is None:
+ return
+ notify_on_commit(self._cw, view)
+
+
+class EntityUpdatedNotificationOp(hook.SingleLastOperation):
+ """scrap all changed entity to prepare a Notification Operation for them"""
+
+ def precommit_event(self):
+ # precommit event that creates postcommit operation
+ cnx = self.cnx
+ for eid in cnx.transaction_data['changes']:
+ view = cnx.vreg['views'].select('notif_entity_updated', cnx,
+ rset=cnx.eid_rset(eid),
+ row=0)
+ notify_on_commit(self.cnx, view,
+ viewargs={'changes': cnx.transaction_data['changes'][eid]})
+
+
+class EntityUpdateHook(NotificationHook):
+ __regid__ = 'notifentityupdated'
+ __abstract__ = True # do not register by default
+ __select__ = NotificationHook.__select__ & hook.issued_from_user_query()
+ events = ('before_update_entity',)
+ skip_attrs = set(['modification_date'])
+
+ def __call__(self):
+ cnx = self._cw
+ if cnx.added_in_transaction(self.entity.eid):
+ return # entity is being created
+ # then compute changes
+ attrs = [k for k in self.entity.cw_edited
+ if not k in self.skip_attrs]
+ if not attrs:
+ return
+ changes = cnx.transaction_data.setdefault('changes', {})
+ thisentitychanges = changes.setdefault(self.entity.eid, set())
+ rqlsel, rqlrestr = [], ['X eid %(x)s']
+ for i, attr in enumerate(attrs):
+ var = chr(65+i)
+ rqlsel.append(var)
+ rqlrestr.append('X %s %s' % (attr, var))
+ rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr))
+ rset = cnx.execute(rql, {'x': self.entity.eid})
+ for i, attr in enumerate(attrs):
+ oldvalue = rset[0][i]
+ newvalue = self.entity.cw_edited[attr]
+ if oldvalue != newvalue:
+ thisentitychanges.add((attr, oldvalue, newvalue))
+ if thisentitychanges:
+ EntityUpdatedNotificationOp(cnx)
+
+
+# supervising ##################################################################
+
+class SomethingChangedHook(NotificationHook):
+ __regid__ = 'supervising'
+ __select__ = NotificationHook.__select__ & hook.issued_from_user_query()
+ events = ('before_add_relation', 'before_delete_relation',
+ 'after_add_entity', 'before_update_entity')
+
+ def __call__(self):
+ dest = self._cw.vreg.config['supervising-addrs']
+ if not dest: # no supervisors, don't do this for nothing...
+ return
+ if self._call():
+ SupervisionMailOp(self._cw)
+
+ def _call(self):
+ event = self.event.split('_', 1)[1]
+ if event == 'update_entity':
+ if self._cw.added_in_transaction(self.entity.eid):
+ return False
+ if self.entity.e_schema == 'CWUser':
+ if not (frozenset(self.entity.cw_edited)
+ - frozenset(('eid', 'modification_date',
+ 'last_login_time'))):
+ # don't record last_login_time update which are done
+ # automatically at login time
+ return False
+ self._cw.transaction_data.setdefault('pendingchanges', []).append(
+ (event, self))
+ return True
+
+
+class EntityDeleteHook(SomethingChangedHook):
+ __regid__ = 'supervisingentitydel'
+ events = ('before_delete_entity',)
+
+ def _call(self):
+ try:
+ title = self.entity.dc_title()
+ except Exception:
+ # may raise an error during deletion process, for instance due to
+ # missing required relation
+ title = '#%s' % self.entity.eid
+ self._cw.transaction_data.setdefault('pendingchanges', []).append(
+ ('delete_entity', (self.entity.eid, self.entity.cw_etype, title)))
+ return True
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/security.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/security.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,209 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Security hooks: check permissions to add/delete/update entities according to
+the connected user
+"""
+
+__docformat__ = "restructuredtext en"
+from warnings import warn
+
+from logilab.common.registry import objectify_predicate
+
+from yams import buildobjs
+
+from cubicweb import Unauthorized
+from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS, hook
+
+
+
+def check_entity_attributes(cnx, entity, action, editedattrs=None):
+ eid = entity.eid
+ eschema = entity.e_schema
+ if action == 'delete':
+ eschema.check_perm(session, action, eid=eid)
+ return
+ # ._cw_skip_security_attributes is there to bypass security for attributes
+ # set by hooks by modifying the entity's dictionary
+ if editedattrs is None:
+ editedattrs = entity.cw_edited
+ dontcheck = editedattrs.skip_security
+ etypechecked = False
+ for attr in editedattrs:
+ if attr in dontcheck:
+ continue
+ rdef = eschema.rdef(attr, takefirst=True)
+ if rdef.final: # non final relation are checked by standard hooks
+ perms = rdef.permissions.get(action)
+ # comparison below works because the default update perm is:
+ #
+ # ('managers', ERQLExpression(Any X WHERE U has_update_permission X,
+ # X eid %(x)s, U eid %(u)s))
+ #
+ # is deserialized in this order (groups first), and ERQLExpression
+ # implements comparison by rql expression.
+ if perms == buildobjs.DEFAULT_ATTRPERMS[action]:
+ # The default rule is to delegate to the entity
+ # rule. This needs to be checked only once.
+ if not etypechecked:
+ entity.cw_check_perm(action)
+ etypechecked = True
+ continue
+ if perms == ():
+ # That means an immutable attribute; as an optimization, avoid
+ # going through check_perm.
+ raise Unauthorized(action, str(rdef))
+ rdef.check_perm(cnx, action, eid=eid)
+
+ if action == 'add' and not etypechecked:
+ # think about cnx.create_entity('Foo')
+ # the standard metadata were inserted by a hook
+ # with a bypass ... we conceptually need to check
+ # the eid attribute at *creation* time
+ entity.cw_check_perm(action)
+
+
+class CheckEntityPermissionOp(hook.DataOperationMixIn, hook.LateOperation):
+ def precommit_event(self):
+ cnx = self.cnx
+ for eid, action, edited in self.get_data():
+ entity = cnx.entity_from_eid(eid)
+ check_entity_attributes(cnx, entity, action, edited)
+
+
+class CheckRelationPermissionOp(hook.DataOperationMixIn, hook.LateOperation):
+ def precommit_event(self):
+ cnx = self.cnx
+ for action, rschema, eidfrom, eidto in self.get_data():
+ rdef = rschema.rdef(cnx.entity_metas(eidfrom)['type'],
+ cnx.entity_metas(eidto)['type'])
+ rdef.check_perm(cnx, action, fromeid=eidfrom, toeid=eidto)
+
+
+@objectify_predicate
+def write_security_enabled(cls, req, **kwargs):
+ if req is None or not req.write_security:
+ return 0
+ return 1
+
+class SecurityHook(hook.Hook):
+ __abstract__ = True
+ category = 'security'
+ __select__ = hook.Hook.__select__ & write_security_enabled()
+
+
+class AfterAddEntitySecurityHook(SecurityHook):
+ __regid__ = 'securityafteraddentity'
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ CheckEntityPermissionOp.get_instance(self._cw).add_data(
+ (self.entity.eid, 'add', self.entity.cw_edited) )
+
+
+class AfterUpdateEntitySecurityHook(SecurityHook):
+ __regid__ = 'securityafterupdateentity'
+ events = ('after_update_entity',)
+
+ def __call__(self):
+ # save back editedattrs in case the entity is reedited later in the
+ # same transaction, which will lead to cw_edited being
+ # overwritten
+ action = 'add' if self._cw.added_in_transaction(self.entity.eid) else 'update'
+ CheckEntityPermissionOp.get_instance(self._cw).add_data(
+ (self.entity.eid, action, self.entity.cw_edited) )
+
+
+class BeforeDelEntitySecurityHook(SecurityHook):
+ __regid__ = 'securitybeforedelentity'
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ self.entity.cw_check_perm('delete')
+
+
+def skip_inlined_relation_security(cnx, rschema, eid):
+ """return True if security for the given inlined relation should be skipped,
+ in case where the relation has been set through modification of
+ `entity.cw_edited` in a hook
+ """
+ assert rschema.inlined
+ try:
+ entity = cnx.entity_cache(eid)
+ except KeyError:
+ return False
+ edited = getattr(entity, 'cw_edited', None)
+ if edited is None:
+ return False
+ return rschema.type in edited.skip_security
+
+
+class BeforeAddRelationSecurityHook(SecurityHook):
+ __regid__ = 'securitybeforeaddrelation'
+ events = ('before_add_relation',)
+
+ def __call__(self):
+ if self.rtype in BEFORE_ADD_RELATIONS:
+ nocheck = self._cw.transaction_data.get('skip-security', ())
+ if (self.eidfrom, self.rtype, self.eidto) in nocheck:
+ return
+ rschema = self._cw.repo.schema[self.rtype]
+ if rschema.inlined and skip_inlined_relation_security(
+ self._cw, rschema, self.eidfrom):
+ return
+ rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'],
+ self._cw.entity_metas(self.eidto)['type'])
+ rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto)
+
+
+class AfterAddRelationSecurityHook(SecurityHook):
+ __regid__ = 'securityafteraddrelation'
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ if self.rtype not in BEFORE_ADD_RELATIONS:
+ nocheck = self._cw.transaction_data.get('skip-security', ())
+ if (self.eidfrom, self.rtype, self.eidto) in nocheck:
+ return
+ rschema = self._cw.repo.schema[self.rtype]
+ if rschema.inlined and skip_inlined_relation_security(
+ self._cw, rschema, self.eidfrom):
+ return
+ if self.rtype in ON_COMMIT_ADD_RELATIONS:
+ CheckRelationPermissionOp.get_instance(self._cw).add_data(
+ ('add', rschema, self.eidfrom, self.eidto) )
+ else:
+ rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'],
+ self._cw.entity_metas(self.eidto)['type'])
+ rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto)
+
+
+class BeforeDeleteRelationSecurityHook(SecurityHook):
+ __regid__ = 'securitybeforedelrelation'
+ events = ('before_delete_relation',)
+
+ def __call__(self):
+ nocheck = self._cw.transaction_data.get('skip-security', ())
+ if (self.eidfrom, self.rtype, self.eidto) in nocheck:
+ return
+ rschema = self._cw.repo.schema[self.rtype]
+ if rschema.inlined and skip_inlined_relation_security(
+ self._cw, rschema, self.eidfrom):
+ return
+ rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'],
+ self._cw.entity_metas(self.eidto)['type'])
+ rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/synccomputed.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/synccomputed.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,227 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Hooks for synchronizing computed attributes"""
+
+__docformat__ = "restructuredtext en"
+from cubicweb import _
+
+from collections import defaultdict
+
+from rql import nodes
+
+from cubicweb.server import hook
+
+
+class RecomputeAttributeOperation(hook.DataOperationMixIn, hook.Operation):
+ """Operation to recompute caches of computed attribute at commit time,
+ depending on what's have been modified in the transaction and avoiding to
+ recompute twice the same attribute
+ """
+ containercls = dict
+ def add_data(self, computed_attribute, eid=None):
+ try:
+ self._container[computed_attribute].add(eid)
+ except KeyError:
+ self._container[computed_attribute] = set((eid,))
+
+ def precommit_event(self):
+ for computed_attribute_rdef, eids in self.get_data().items():
+ attr = computed_attribute_rdef.rtype
+ formula = computed_attribute_rdef.formula
+ select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0]
+ xvar = select.get_variable('X')
+ select.add_selected(xvar, index=0)
+ select.add_group_var(xvar, index=0)
+ if None in eids:
+ select.add_type_restriction(xvar, computed_attribute_rdef.subject)
+ else:
+ select.add_eid_restriction(xvar, eids)
+ update_rql = 'SET X %s %%(value)s WHERE X eid %%(x)s' % attr
+ for eid, value in self.cnx.execute(select.as_string()):
+ self.cnx.execute(update_rql, {'value': value, 'x': eid})
+
+
+class EntityWithCACreatedHook(hook.Hook):
+ """When creating an entity that has some computed attribute, those
+ attributes have to be computed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_add_entity',)
+ # list of computed attribute rdefs that have to be recomputed
+ computed_attributes = None
+
+ def __call__(self):
+ for rdef in self.computed_attributes:
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(
+ rdef, self.entity.eid)
+
+
+class RelationInvolvedInCAModifiedHook(hook.Hook):
+ """When some relation used in a computed attribute is updated, those
+ attributes have to be recomputed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_add_relation', 'before_delete_relation')
+ # list of (computed attribute rdef, optimize_on) that have to be recomputed
+ optimized_computed_attributes = None
+
+ def __call__(self):
+ for rdef, optimize_on in self.optimized_computed_attributes:
+ if optimize_on is None:
+ eid = None
+ else:
+ eid = getattr(self, optimize_on)
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef, eid)
+
+
+class AttributeInvolvedInCAModifiedHook(hook.Hook):
+ """When some attribute used in a computed attribute is updated, those
+ attributes have to be recomputed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_update_entity',)
+ # list of (computed attribute rdef, attributes of this entity type involved)
+ # that may have to be recomputed
+ attributes_computed_attributes = None
+
+ def __call__(self):
+ edited_attributes = frozenset(self.entity.cw_edited)
+ for rdef, used_attributes in self.attributes_computed_attributes.items():
+ if edited_attributes.intersection(used_attributes):
+ # XXX optimize if the modified attributes belong to the same
+ # entity as the computed attribute
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef)
+
+
+# code generation at registration time #########################################
+
+def _optimize_on(formula_select, rtype):
+ """Given a formula and some rtype, tells whether on update of the given
+ relation, formula may be recomputed only for rhe relation's subject
+ ('eidfrom' returned), object ('eidto' returned) or None.
+
+ Optimizing is only possible when X is used as direct subject/object of this
+ relation, else we may miss some necessary update.
+ """
+ for rel in formula_select.get_nodes(nodes.Relation):
+ if rel.r_type == rtype:
+ sub = rel.get_variable_parts()[0]
+ obj = rel.get_variable_parts()[1]
+ if sub.name == 'X':
+ return 'eidfrom'
+ elif obj.name == 'X':
+ return 'eidto'
+ else:
+ return None
+
+
+class _FormulaDependenciesMatrix(object):
+ """This class computes and represents the dependencies of computed attributes
+ towards relations and attributes
+ """
+
+ def __init__(self, schema):
+ """Analyzes the schema to compute the dependencies"""
+ # entity types holding some computed attribute {etype: [computed rdefs]}
+ self.computed_attribute_by_etype = defaultdict(list)
+ # depending entity types {dep. etype: {computed rdef: dep. etype attributes}}
+ self.computed_attribute_by_etype_attrs = defaultdict(lambda: defaultdict(set))
+ # depending relations def {dep. rdef: [computed rdefs]
+ self.computed_attribute_by_relation = defaultdict(list) # by rdef
+ # Walk through all attributes definitions
+ for rdef in schema.iter_computed_attributes():
+ self.computed_attribute_by_etype[rdef.subject.type].append(rdef)
+ # extract the relations it depends upon - `rdef.formula_select` is
+ # expected to have been set by finalize_computed_attributes
+ select = rdef.formula_select
+ for rel_node in select.get_nodes(nodes.Relation):
+ if rel_node.is_types_restriction():
+ continue
+ rschema = schema.rschema(rel_node.r_type)
+ lhs, rhs = rel_node.get_variable_parts()
+ for sol in select.solutions:
+ subject_etype = sol[lhs.name]
+ if isinstance(rhs, nodes.VariableRef):
+ object_etypes = set(sol[rhs.name] for sol in select.solutions)
+ else:
+ object_etypes = rschema.objects(subject_etype)
+ for object_etype in object_etypes:
+ if rschema.final:
+ attr_for_computations = self.computed_attribute_by_etype_attrs[subject_etype]
+ attr_for_computations[rdef].add(rschema.type)
+ else:
+ depend_on_rdef = rschema.rdefs[subject_etype, object_etype]
+ self.computed_attribute_by_relation[depend_on_rdef].append(rdef)
+
+ def generate_entity_creation_hooks(self):
+ for etype, computed_attributes in self.computed_attribute_by_etype.items():
+ regid = 'computed_attribute.%s_created' % etype
+ selector = hook.is_instance(etype)
+ yield type('%sCreatedHook' % etype,
+ (EntityWithCACreatedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'computed_attributes': computed_attributes})
+
+ def generate_relation_change_hooks(self):
+ for rdef, computed_attributes in self.computed_attribute_by_relation.items():
+ regid = 'computed_attribute.%s_modified' % rdef.rtype
+ selector = hook.match_rtype(rdef.rtype.type,
+ frometypes=(rdef.subject.type,),
+ toetypes=(rdef.object.type,))
+ optimized_computed_attributes = []
+ for computed_rdef in computed_attributes:
+ optimized_computed_attributes.append(
+ (computed_rdef,
+ _optimize_on(computed_rdef.formula_select, rdef.rtype))
+ )
+ yield type('%sModifiedHook' % rdef.rtype,
+ (RelationInvolvedInCAModifiedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'optimized_computed_attributes': optimized_computed_attributes})
+
+ def generate_entity_update_hooks(self):
+ for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items():
+ regid = 'computed_attribute.%s_updated' % etype
+ selector = hook.is_instance(etype)
+ yield type('%sModifiedHook' % etype,
+ (AttributeInvolvedInCAModifiedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'attributes_computed_attributes': attributes_computed_attributes})
+
+
+def registration_callback(vreg):
+ vreg.register_all(globals().values(), __name__)
+ dependencies = _FormulaDependenciesMatrix(vreg.schema)
+ for hook_class in dependencies.generate_entity_creation_hooks():
+ vreg.register(hook_class)
+ for hook_class in dependencies.generate_relation_change_hooks():
+ vreg.register(hook_class)
+ for hook_class in dependencies.generate_entity_update_hooks():
+ vreg.register(hook_class)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/syncschema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/syncschema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1430 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""schema hooks:
+
+- synchronize the living schema object with the persistent schema
+- perform physical update on the source when necessary
+
+checking for schema consistency is done in hooks.py
+"""
+
+__docformat__ = "restructuredtext en"
+from cubicweb import _
+
+import json
+from copy import copy
+
+from yams.schema import BASE_TYPES, BadSchemaDefinition, RelationDefinitionSchema
+from yams.constraints import UniqueConstraint
+from yams import buildobjs as ybo, convert_default_value
+
+from logilab.common.decorators import clear_cache
+
+from cubicweb import validation_error
+from cubicweb.predicates import is_instance
+from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES,
+ CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP)
+from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql
+from cubicweb.server.sqlutils import SQL_PREFIX
+from cubicweb.hooks.synccomputed import RecomputeAttributeOperation
+
+# core entity and relation types which can't be removed
+CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set(
+ ('CWUser', 'CWGroup', 'login', 'upassword', 'name', 'in_group'))
+
+
+def get_constraints(cnx, entity):
+ constraints = []
+ for cstreid in cnx.transaction_data.get(entity.eid, ()):
+ cstrent = cnx.entity_from_eid(cstreid)
+ cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value)
+ cstr.eid = cstreid
+ constraints.append(cstr)
+ return constraints
+
+
+def group_mapping(cw):
+ try:
+ return cw.transaction_data['groupmap']
+ except KeyError:
+ cw.transaction_data['groupmap'] = gmap = ss.group_mapping(cw)
+ return gmap
+
+
+def add_inline_relation_column(cnx, etype, rtype):
+ """add necessary column and index for an inlined relation"""
+ attrkey = '%s.%s' % (etype, rtype)
+ createdattrs = cnx.transaction_data.setdefault('createdattrs', set())
+ if attrkey in createdattrs:
+ return
+ createdattrs.add(attrkey)
+ table = SQL_PREFIX + etype
+ column = SQL_PREFIX + rtype
+ try:
+ cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)'
+ % (table, column)),
+ rollback_on_failure=False)
+ cnx.info('added column %s to table %s', column, table)
+ except Exception:
+ # silent exception here, if this error has not been raised because the
+ # column already exists, index creation will fail anyway
+ cnx.exception('error while adding column %s to table %s',
+ table, column)
+ # create index before alter table which may expectingly fail during test
+ # (sqlite) while index creation should never fail (test for index existence
+ # is done by the dbhelper)
+ cnx.repo.system_source.create_index(cnx, table, column)
+ cnx.info('added index on %s(%s)', table, column)
+
+
+def insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props):
+ # XXX 'infered': True/False, not clear actually
+ props.update({'constraints': rdefdef.constraints,
+ 'description': rdefdef.description,
+ 'cardinality': rdefdef.cardinality,
+ 'permissions': rdefdef.get_permissions(),
+ 'order': rdefdef.order,
+ 'infered': False, 'eid': None
+ })
+ cstrtypemap = ss.cstrtype_mapping(cnx)
+ groupmap = group_mapping(cnx)
+ object = rschema.schema.eschema(rdefdef.object)
+ for specialization in eschema.specialized_by(False):
+ if (specialization, rdefdef.object) in rschema.rdefs:
+ continue
+ sperdef = RelationDefinitionSchema(specialization, rschema,
+ object, None, values=props)
+ ss.execschemarql(cnx.execute, sperdef,
+ ss.rdef2rql(sperdef, cstrtypemap, groupmap))
+
+
+def check_valid_changes(cnx, entity, ro_attrs=('name', 'final')):
+ errors = {}
+ # don't use getattr(entity, attr), we would get the modified value if any
+ for attr in entity.cw_edited:
+ if attr in ro_attrs:
+ origval, newval = entity.cw_edited.oldnewvalue(attr)
+ if newval != origval:
+ errors[attr] = _("can't change this attribute")
+ if errors:
+ raise validation_error(entity, errors)
+
+
+class _MockEntity(object): # XXX use a named tuple with python 2.6
+ def __init__(self, eid):
+ self.eid = eid
+
+
+class SyncSchemaHook(hook.Hook):
+ """abstract class for schema synchronization hooks (in the `syncschema`
+ category)
+ """
+ __abstract__ = True
+ category = 'syncschema'
+
+
+# operations for low-level database alteration ################################
+
+class DropTable(hook.Operation):
+ """actually remove a database from the instance's schema"""
+ table = None # make pylint happy
+
+ def precommit_event(self):
+ dropped = self.cnx.transaction_data.setdefault('droppedtables', set())
+ if self.table in dropped:
+ return # already processed
+ dropped.add(self.table)
+ self.cnx.system_sql('DROP TABLE %s' % self.table)
+ self.info('dropped table %s', self.table)
+
+ # XXX revertprecommit_event
+
+
+class DropRelationTable(DropTable):
+ def __init__(self, cnx, rtype):
+ super(DropRelationTable, self).__init__(
+ cnx, table='%s_relation' % rtype)
+ cnx.transaction_data.setdefault('pendingrtypes', set()).add(rtype)
+
+
+class DropColumn(hook.DataOperationMixIn, hook.Operation):
+ """actually remove the attribut's column from entity table in the system
+ database
+ """
+ def precommit_event(self):
+ cnx = self.cnx
+ for etype, attr in self.get_data():
+ table = SQL_PREFIX + etype
+ column = SQL_PREFIX + attr
+ source = cnx.repo.system_source
+ # drop index if any
+ source.drop_index(cnx, table, column)
+ if source.dbhelper.alter_column_support:
+ cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column),
+ rollback_on_failure=False)
+ self.info('dropped column %s from table %s', column, table)
+ else:
+ # not supported by sqlite for instance
+ self.error('dropping column not supported by the backend, handle '
+ 'it yourself (%s.%s)', table, column)
+
+ # XXX revertprecommit_event
+
+
+# base operations for in-memory schema synchronization ########################
+
+class MemSchemaNotifyChanges(hook.SingleLastOperation):
+ """the update schema operation:
+
+ special operation which should be called once and after all other schema
+ operations. It will trigger internal structures rebuilding to consider
+ schema changes.
+ """
+
+ def __init__(self, cnx):
+ hook.SingleLastOperation.__init__(self, cnx)
+
+ def precommit_event(self):
+ for eschema in self.cnx.repo.schema.entities():
+ if not eschema.final:
+ clear_cache(eschema, 'ordered_relations')
+
+ def postcommit_event(self):
+ repo = self.cnx.repo
+ # commit event should not raise error, while set_schema has chances to
+ # do so because it triggers full vreg reloading
+ try:
+ repo.schema.rebuild_infered_relations()
+ # trigger vreg reload
+ repo.set_schema(repo.schema)
+ # CWUser class might have changed, update current session users
+ cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser')
+ for session in repo._sessions.values():
+ session.user.__class__ = cwuser_cls
+ except Exception:
+ self.critical('error while setting schema', exc_info=True)
+
+ def rollback_event(self):
+ self.precommit_event()
+
+
+class MemSchemaOperation(hook.Operation):
+ """base class for schema operations"""
+ def __init__(self, cnx, **kwargs):
+ hook.Operation.__init__(self, cnx, **kwargs)
+ # every schema operation is triggering a schema update
+ MemSchemaNotifyChanges(cnx)
+
+
+# operations for high-level source database alteration ########################
+
+class CWETypeAddOp(MemSchemaOperation):
+ """after adding a CWEType entity:
+ * add it to the instance's schema
+ * create the necessary table
+ * set creation_date and modification_date by creating the necessary
+ CWAttribute entities
+ * add relation by creating the necessary CWRelation entity
+ """
+ entity = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ entity = self.entity
+ schema = cnx.vreg.schema
+ etype = ybo.EntityType(eid=entity.eid, name=entity.name,
+ description=entity.description)
+ eschema = schema.add_entity_type(etype)
+ # create the necessary table
+ for sql in y2sql.eschema2sql(cnx.repo.system_source.dbhelper,
+ eschema, prefix=SQL_PREFIX):
+ cnx.system_sql(sql)
+ # add meta relations
+ gmap = group_mapping(cnx)
+ cmap = ss.cstrtype_mapping(cnx)
+ for rtype in (META_RTYPES - VIRTUAL_RTYPES):
+ try:
+ rschema = schema[rtype]
+ except KeyError:
+ self.critical('rtype %s was not handled at cwetype creation time', rtype)
+ continue
+ if not rschema.rdefs:
+ self.warning('rtype %s has no relation definition yet', rtype)
+ continue
+ sampletype = rschema.subjects()[0]
+ desttype = rschema.objects()[0]
+ try:
+ rdef = copy(rschema.rdef(sampletype, desttype))
+ except KeyError:
+ # this combo does not exist because this is not a universal META_RTYPE
+ continue
+ rdef.subject = _MockEntity(eid=entity.eid)
+ mock = _MockEntity(eid=None)
+ ss.execschemarql(cnx.execute, mock, ss.rdef2rql(rdef, cmap, gmap))
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ self.cnx.vreg.schema.del_entity_type(self.entity.name)
+ # revert changes on database
+ self.cnx.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name))
+
+
+class CWETypeRenameOp(MemSchemaOperation):
+ """this operation updates physical storage accordingly"""
+
+ oldname = newname = None # make pylint happy
+
+ def rename(self, oldname, newname):
+ cnx = self.cnx
+ source = cnx.repo.system_source
+ dbhelper = source.dbhelper
+ # we need sql to operate physical changes on the system database
+ sqlexec = cnx.system_sql
+ cnx.vreg.schema.rename_entity_type(oldname, newname)
+ old_table = SQL_PREFIX + oldname
+ new_table = SQL_PREFIX + newname
+ eschema = cnx.vreg.schema.eschema(newname)
+ # drop old indexes before the renaming
+ for rschema in eschema.subject_relations():
+ if rschema.inlined or (rschema.final and eschema.rdef(rschema.type).indexed):
+ source.drop_index(cnx, old_table, SQL_PREFIX + rschema.type)
+ if rschema.final and any(isinstance(cstr, UniqueConstraint)
+ for cstr in eschema.rdef(rschema.type).constraints):
+ source.drop_index(cnx, old_table, SQL_PREFIX + rschema.type, unique=True)
+ sql = dbhelper.sql_rename_table(old_table, new_table)
+ sqlexec(sql)
+ self.info('renamed table %s to %s', oldname, newname)
+ sqlexec('UPDATE entities SET type=%(newname)s WHERE type=%(oldname)s',
+ {'newname': newname, 'oldname': oldname})
+ for eid, (etype, extid, auri) in cnx.repo._type_source_cache.items():
+ if etype == oldname:
+ cnx.repo._type_source_cache[eid] = (newname, extid, auri)
+ # recreate the indexes
+ for rschema in eschema.subject_relations():
+ if rschema.inlined or (rschema.final and eschema.rdef(rschema.type).indexed):
+ source.create_index(cnx, new_table, SQL_PREFIX + rschema.type)
+ if rschema.final and any(isinstance(cstr, UniqueConstraint)
+ for cstr in eschema.rdef(rschema.type).constraints):
+ source.create_index(cnx, new_table, SQL_PREFIX + rschema.type, unique=True)
+ for attrs in eschema._unique_together or ():
+ columns = ['%s%s' % (SQL_PREFIX, attr) for attr in attrs]
+ old_index_name = y2sql.unique_index_name(oldname, columns)
+ for sql in dbhelper.sqls_drop_multicol_unique_index(
+ new_table, columns, old_index_name):
+ sqlexec(sql)
+ new_index_name = y2sql.unique_index_name(newname, columns)
+ for sql in dbhelper.sqls_create_multicol_unique_index(
+ new_table, columns, new_index_name):
+ sqlexec(sql)
+ # XXX transaction records
+
+ def precommit_event(self):
+ self.rename(self.oldname, self.newname)
+
+ def revertprecommit_event(self):
+ self.rename(self.newname, self.oldname)
+
+
+class CWRTypeUpdateOp(MemSchemaOperation):
+ """actually update some properties of a relation definition"""
+
+ rschema = entity = values = None # make pylint happy
+ oldvalues = None
+
+ def precommit_event(self):
+ rschema = self.rschema
+ if rschema.final:
+ return # watched changes to final relation type are unexpected
+ cnx = self.cnx
+ if 'fulltext_container' in self.values:
+ op = UpdateFTIndexOp.get_instance(cnx)
+ for subjtype, objtype in rschema.rdefs:
+ if self.values['fulltext_container'] == 'subject':
+ op.add_data(subjtype)
+ op.add_data(objtype)
+ else:
+ op.add_data(objtype)
+ op.add_data(subjtype)
+ # update the in-memory schema first
+ self.oldvalues = dict((attr, getattr(rschema, attr)) for attr in self.values)
+ self.rschema.__dict__.update(self.values)
+ # then make necessary changes to the system source database
+ if 'inlined' not in self.values:
+ return # nothing to do
+ inlined = self.values['inlined']
+ # check in-lining is possible when inlined
+ if inlined:
+ self.entity.check_inlined_allowed()
+ # inlined changed, make necessary physical changes!
+ sqlexec = self.cnx.system_sql
+ rtype = rschema.type
+ eidcolumn = SQL_PREFIX + 'eid'
+ if not inlined:
+ # need to create the relation if it has not been already done by
+ # another event of the same transaction
+ if rschema.type not in cnx.transaction_data.get('createdtables', ()):
+ # create the necessary table
+ for sql in y2sql.rschema2sql(rschema):
+ sqlexec(sql)
+ cnx.transaction_data.setdefault('createdtables', []).append(
+ rschema.type)
+ # copy existant data
+ column = SQL_PREFIX + rtype
+ for etype in rschema.subjects():
+ table = SQL_PREFIX + str(etype)
+ sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL'
+ % (rtype, eidcolumn, column, table, column))
+ # drop existant columns
+ for etype in rschema.subjects():
+ DropColumn.get_instance(cnx).add_data((str(etype), rtype))
+ else:
+ for etype in rschema.subjects():
+ try:
+ add_inline_relation_column(cnx, str(etype), rtype)
+ except Exception as ex:
+ # the column probably already exists. this occurs when the
+ # entity's type has just been added or if the column has not
+ # been previously dropped (eg sqlite)
+ self.error('error while altering table %s: %s', etype, ex)
+ # copy existant data.
+ # XXX don't use, it's not supported by sqlite (at least at when i tried it)
+ #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to '
+ # 'FROM %(rtype)s_relation '
+ # 'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from'
+ # % locals())
+ table = SQL_PREFIX + str(etype)
+ cursor = sqlexec('SELECT eid_from, eid_to FROM %(table)s, '
+ '%(rtype)s_relation WHERE %(table)s.%(eidcolumn)s='
+ '%(rtype)s_relation.eid_from' % locals())
+ args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()]
+ if args:
+ column = SQL_PREFIX + rtype
+ cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s'
+ % (table, column, eidcolumn), args)
+ # drop existant table
+ DropRelationTable(cnx, rtype)
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ self.rschema.__dict__.update(self.oldvalues)
+ # XXX revert changes on database
+
+
+class CWComputedRTypeUpdateOp(MemSchemaOperation):
+ """actually update some properties of a computed relation definition"""
+ rschema = entity = rule = None # make pylint happy
+ old_rule = None
+
+ def precommit_event(self):
+ # update the in-memory schema first
+ self.old_rule = self.rschema.rule
+ self.rschema.rule = self.rule
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ self.rschema.rule = self.old_rule
+
+
+class CWAttributeAddOp(MemSchemaOperation):
+ """an attribute relation (CWAttribute) has been added:
+ * add the necessary column
+ * set default on this column if any and possible
+ * register an operation to add the relation definition to the
+ instance's schema on commit
+
+ constraints are handled by specific hooks
+ """
+ entity = None # make pylint happy
+
+ def init_rdef(self, **kwargs):
+ entity = self.entity
+ fromentity = entity.stype
+ rdefdef = self.rdefdef = ybo.RelationDefinition(
+ str(fromentity.name), entity.rtype.name, str(entity.otype.name),
+ description=entity.description, cardinality=entity.cardinality,
+ constraints=get_constraints(self.cnx, entity),
+ order=entity.ordernum, eid=entity.eid, **kwargs)
+ try:
+ self.cnx.vreg.schema.add_relation_def(rdefdef)
+ except BadSchemaDefinition:
+ # rdef has been infered then explicitly added (current consensus is
+ # not clear at all versus infered relation handling (and much
+ # probably buggy)
+ rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object]
+ assert rdef.infered
+ else:
+ rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object]
+
+ self.cnx.execute('SET X ordernum Y+1 '
+ 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, '
+ 'X ordernum >= %(order)s, NOT X eid %(x)s',
+ {'x': entity.eid, 'se': fromentity.eid,
+ 'order': entity.ordernum or 0})
+ return rdefdef, rdef
+
+ def precommit_event(self):
+ cnx = self.cnx
+ entity = self.entity
+ # entity.defaultval is a Binary or None, but we need a correctly typed
+ # value
+ default = entity.defaultval
+ if default is not None:
+ default = default.unzpickle()
+ props = {'default': default,
+ 'indexed': entity.indexed,
+ 'fulltextindexed': entity.fulltextindexed,
+ 'internationalizable': entity.internationalizable}
+ if entity.extra_props:
+ props.update(json.loads(entity.extra_props.getvalue().decode('ascii')))
+ # entity.formula may not exist yet if we're migrating to 3.20
+ if hasattr(entity, 'formula'):
+ props['formula'] = entity.formula
+ # update the in-memory schema first
+ rdefdef, rdef = self.init_rdef(**props)
+ # then make necessary changes to the system source database
+ syssource = cnx.repo.system_source
+ attrtype = y2sql.type_from_rdef(syssource.dbhelper, rdef)
+ # added some str() wrapping query since some backend (eg psycopg) don't
+ # allow unicode queries
+ table = SQL_PREFIX + rdefdef.subject
+ column = SQL_PREFIX + rdefdef.name
+ try:
+ cnx.system_sql(str('ALTER TABLE %s ADD %s %s'
+ % (table, column, attrtype)),
+ rollback_on_failure=False)
+ self.info('added column %s to table %s', column, table)
+ except Exception as ex:
+ # the column probably already exists. this occurs when
+ # the entity's type has just been added or if the column
+ # has not been previously dropped
+ self.error('error while altering table %s: %s', table, ex)
+ if entity.indexed:
+ try:
+ syssource.create_index(cnx, table, column, unique=False)
+ except Exception as ex:
+ self.error('error while creating index for %s.%s: %s',
+ table, column, ex)
+ # final relations are not infered, propagate
+ schema = cnx.vreg.schema
+ try:
+ eschema = schema.eschema(rdefdef.subject)
+ except KeyError:
+ return # entity type currently being added
+ # propagate attribute to children classes
+ rschema = schema.rschema(rdefdef.name)
+ # if relation type has been inserted in the same transaction, its final
+ # attribute is still set to False, so we've to ensure it's False
+ rschema.final = True
+ insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props)
+ # update existing entities with the default value of newly added attribute
+ if default is not None:
+ default = convert_default_value(self.rdefdef, default)
+ cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column),
+ {'default': default})
+ # if attribute is computed, compute it
+ if getattr(entity, 'formula', None):
+ # add rtype attribute for RelationDefinitionSchema api compat, this
+ # is what RecomputeAttributeOperation expect
+ rdefdef.rtype = rdefdef.name
+ RecomputeAttributeOperation.get_instance(cnx).add_data(rdefdef)
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ if getattr(self, 'rdefdef', None) is None:
+ return
+ self.cnx.vreg.schema.del_relation_def(
+ self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object)
+ # XXX revert changes on database
+
+
+class CWRelationAddOp(CWAttributeAddOp):
+ """an actual relation has been added:
+
+ * add the relation definition to the instance's schema
+
+ * if this is an inlined relation, add the necessary column else if it's the
+ first instance of this relation type, add the necessary table and set
+ default permissions
+
+ constraints are handled by specific hooks
+ """
+ entity = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ entity = self.entity
+ # update the in-memory schema first
+ rdefdef, rdef = self.init_rdef(composite=entity.composite)
+ # then make necessary changes to the system source database
+ schema = cnx.vreg.schema
+ rtype = rdefdef.name
+ rschema = schema.rschema(rtype)
+ # this have to be done before permissions setting
+ if rschema.inlined:
+ # need to add a column if the relation is inlined and if this is the
+ # first occurence of "Subject relation Something" whatever Something
+ if len(rschema.objects(rdefdef.subject)) == 1:
+ add_inline_relation_column(cnx, rdefdef.subject, rtype)
+ eschema = schema[rdefdef.subject]
+ insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef,
+ {'composite': entity.composite})
+ else:
+ if rschema.symmetric:
+ # for symmetric relations, rdefs will store relation definitions
+ # in both ways (i.e. (subj -> obj) and (obj -> subj))
+ relation_already_defined = len(rschema.rdefs) > 2
+ else:
+ relation_already_defined = len(rschema.rdefs) > 1
+ # need to create the relation if no relation definition in the
+ # schema and if it has not been added during other event of the same
+ # transaction
+ if not (relation_already_defined or
+ rtype in cnx.transaction_data.get('createdtables', ())):
+ rschema = schema.rschema(rtype)
+ # create the necessary table
+ for sql in y2sql.rschema2sql(rschema):
+ cnx.system_sql(sql)
+ cnx.transaction_data.setdefault('createdtables', []).append(
+ rtype)
+
+ # XXX revertprecommit_event
+
+
+class RDefDelOp(MemSchemaOperation):
+ """an actual relation has been removed"""
+ rdef = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ rdef = self.rdef
+ rschema = rdef.rtype
+ # make necessary changes to the system source database first
+ rdeftype = rschema.final and 'CWAttribute' or 'CWRelation'
+ execute = cnx.execute
+ rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,'
+ 'R eid %%(x)s' % rdeftype, {'x': rschema.eid})
+ lastrel = rset[0][0] == 0
+ # we have to update physical schema systematically for final and inlined
+ # relations, but only if it's the last instance for this relation type
+ # for other relations
+ if (rschema.final or rschema.inlined):
+ if not cnx.deleted_in_transaction(rdef.subject.eid):
+ rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, '
+ 'R eid %%(r)s, X from_entity E, E eid %%(e)s'
+ % rdeftype,
+ {'r': rschema.eid, 'e': rdef.subject.eid})
+ if rset[0][0] == 0:
+ ptypes = cnx.transaction_data.setdefault('pendingrtypes', set())
+ ptypes.add(rschema.type)
+ DropColumn.get_instance(cnx).add_data((str(rdef.subject), str(rschema)))
+ elif rschema.inlined:
+ cnx.system_sql('UPDATE %s%s SET %s%s=NULL WHERE '
+ 'EXISTS(SELECT 1 FROM entities '
+ ' WHERE eid=%s%s AND type=%%(to_etype)s)'
+ % (SQL_PREFIX, rdef.subject, SQL_PREFIX, rdef.rtype,
+ SQL_PREFIX, rdef.rtype),
+ {'to_etype': rdef.object.type})
+ elif lastrel:
+ DropRelationTable(cnx, str(rschema))
+ else:
+ cnx.system_sql('DELETE FROM %s_relation WHERE '
+ 'EXISTS(SELECT 1 FROM entities '
+ ' WHERE eid=eid_from AND type=%%(from_etype)s)'
+ ' AND EXISTS(SELECT 1 FROM entities '
+ ' WHERE eid=eid_to AND type=%%(to_etype)s)'
+ % rschema,
+ {'from_etype': rdef.subject.type, 'to_etype': rdef.object.type})
+ # then update the in-memory schema
+ if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP:
+ rschema.del_relation_def(rdef.subject, rdef.object)
+ # if this is the last relation definition of this type, drop associated
+ # relation type
+ if lastrel and not cnx.deleted_in_transaction(rschema.eid):
+ execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid})
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ #
+ # Note: add_relation_def takes a RelationDefinition, not a
+ # RelationDefinitionSchema, needs to fake it
+ rdef = self.rdef
+ rdef.name = str(rdef.rtype)
+ if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP:
+ self.cnx.vreg.schema.add_relation_def(rdef)
+
+
+class RDefUpdateOp(MemSchemaOperation):
+ """actually update some properties of a relation definition"""
+ rschema = rdefkey = values = None # make pylint happy
+ rdef = oldvalues = None
+ indexed_changed = null_allowed_changed = False
+
+ def precommit_event(self):
+ cnx = self.cnx
+ rdef = self.rdef = self.rschema.rdefs[self.rdefkey]
+ # update the in-memory schema first
+ self.oldvalues = dict((attr, getattr(rdef, attr)) for attr in self.values)
+ rdef.update(self.values)
+ # then make necessary changes to the system source database
+ syssource = cnx.repo.system_source
+ if 'indexed' in self.values:
+ syssource.update_rdef_indexed(cnx, rdef)
+ self.indexed_changed = True
+ if ('cardinality' in self.values and rdef.rtype.final
+ and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]):
+ syssource.update_rdef_null_allowed(self.cnx, rdef)
+ self.null_allowed_changed = True
+ if 'fulltextindexed' in self.values:
+ UpdateFTIndexOp.get_instance(cnx).add_data(rdef.subject)
+ if 'formula' in self.values:
+ RecomputeAttributeOperation.get_instance(cnx).add_data(rdef)
+
+ def revertprecommit_event(self):
+ if self.rdef is None:
+ return
+ # revert changes on in memory schema
+ self.rdef.update(self.oldvalues)
+ # revert changes on database
+ syssource = self.cnx.repo.system_source
+ if self.indexed_changed:
+ syssource.update_rdef_indexed(self.cnx, self.rdef)
+ if self.null_allowed_changed:
+ syssource.update_rdef_null_allowed(self.cnx, self.rdef)
+
+
+def _set_modifiable_constraints(rdef):
+ # for proper in-place modification of in-memory schema: if rdef.constraints
+ # is already a list, reuse it (we're updating multiple constraints of the
+ # same rdef in the same transaction)
+ if not isinstance(rdef.constraints, list):
+ rdef.constraints = list(rdef.constraints)
+
+
+class CWConstraintDelOp(MemSchemaOperation):
+ """actually remove a constraint of a relation definition"""
+ rdef = oldcstr = newcstr = None # make pylint happy
+ size_cstr_changed = unique_changed = False
+
+ def precommit_event(self):
+ cnx = self.cnx
+ rdef = self.rdef
+ # in-place modification of in-memory schema first
+ _set_modifiable_constraints(rdef)
+ if self.oldcstr in rdef.constraints:
+ rdef.constraints.remove(self.oldcstr)
+ else:
+ self.critical('constraint %s for rdef %s was missing or already removed',
+ self.oldcstr, rdef)
+ if cnx.deleted_in_transaction(rdef.eid):
+ # don't try to alter a table that's going away (or is already gone)
+ return
+ # then update database: alter the physical schema on size/unique
+ # constraint changes
+ syssource = cnx.repo.system_source
+ cstrtype = self.oldcstr.type()
+ if cstrtype == 'SizeConstraint':
+ # if the size constraint is being replaced with a new max size, we'll
+ # call update_rdef_column in CWConstraintAddOp, skip it here
+ for cstr in cnx.transaction_data.get('newsizecstr', ()):
+ rdefentity = cstr.reverse_constrained_by[0]
+ cstrrdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid)
+ if cstrrdef == rdef:
+ return
+
+ # we found that the size constraint for this rdef is really gone,
+ # not just replaced by another
+ syssource.update_rdef_column(cnx, rdef)
+ self.size_cstr_changed = True
+ elif cstrtype == 'UniqueConstraint':
+ syssource.update_rdef_unique(cnx, rdef)
+ self.unique_changed = True
+ elif cstrtype in ('BoundaryConstraint',
+ 'IntervalBoundConstraint',
+ 'StaticVocabularyConstraint'):
+ cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s'
+ % (SQL_PREFIX, rdef.subject, self.oldcstr.name_for(rdef)))
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ if self.newcstr is not None:
+ self.rdef.constraints.remove(self.newcstr)
+ if self.oldcstr is not None:
+ self.rdef.constraints.append(self.oldcstr)
+ # revert changes on database
+ syssource = self.cnx.repo.system_source
+ if self.size_cstr_changed:
+ syssource.update_rdef_column(self.cnx, self.rdef)
+ if self.unique_changed:
+ syssource.update_rdef_unique(self.cnx, self.rdef)
+
+
+class CWConstraintAddOp(CWConstraintDelOp):
+ """actually update constraint of a relation definition"""
+ entity = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ rdefentity = self.entity.reverse_constrained_by[0]
+ rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid)
+ cstrtype = self.entity.type
+ if cstrtype in UNIQUE_CONSTRAINTS:
+ oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype)
+ else:
+ oldcstr = None
+ newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value)
+ # in-place modification of in-memory schema first
+ _set_modifiable_constraints(rdef)
+ newcstr.eid = self.entity.eid
+ if oldcstr is not None:
+ rdef.constraints.remove(oldcstr)
+ rdef.constraints.append(newcstr)
+ # then update database: alter the physical schema on size/unique
+ # constraint changes
+ syssource = cnx.repo.system_source
+ if cstrtype == 'SizeConstraint' and (oldcstr is None or
+ oldcstr.max != newcstr.max):
+ syssource.update_rdef_column(cnx, rdef)
+ self.size_cstr_changed = True
+ elif cstrtype == 'UniqueConstraint' and oldcstr is None:
+ syssource.update_rdef_unique(cnx, rdef)
+ self.unique_changed = True
+ if cstrtype in ('BoundaryConstraint',
+ 'IntervalBoundConstraint',
+ 'StaticVocabularyConstraint'):
+ cstrname, check = y2sql.check_constraint(rdef, newcstr, syssource.dbhelper,
+ prefix=SQL_PREFIX)
+ # oldcstr is the new constraint when the attribute is being added in the same
+ # transaction or when constraint value is updated. So we've to take care...
+ if oldcstr is not None:
+ oldcstrname = self.oldcstr.name_for(rdef)
+ if oldcstrname != cstrname:
+ cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s'
+ % (SQL_PREFIX, rdef.subject, oldcstrname))
+ cnx.system_sql('ALTER TABLE %s%s ADD CONSTRAINT %s CHECK(%s)' %
+ (SQL_PREFIX, rdef.subject, cstrname, check))
+
+
+class CWUniqueTogetherConstraintAddOp(MemSchemaOperation):
+ entity = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ prefix = SQL_PREFIX
+ entity = self.entity
+ table = '%s%s' % (prefix, entity.constraint_of[0].name)
+ cols = ['%s%s' % (prefix, r.name) for r in entity.relations]
+ dbhelper = cnx.repo.system_source.dbhelper
+ sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, entity.name)
+ for sql in sqls:
+ cnx.system_sql(sql)
+
+ def postcommit_event(self):
+ entity = self.entity
+ eschema = self.cnx.vreg.schema.schema_by_eid(entity.constraint_of[0].eid)
+ attrs = [r.name for r in entity.relations]
+ eschema._unique_together.append(attrs)
+
+
+class CWUniqueTogetherConstraintDelOp(MemSchemaOperation):
+ entity = cstrname = None # make pylint happy
+ cols = () # make pylint happy
+
+ def insert_index(self):
+ # We need to run before CWConstraintDelOp: if a size constraint is
+ # removed and the column is part of a unique_together constraint, we
+ # remove the unique_together index before changing the column's type.
+ # SQL Server does not support unique indices on unlimited text columns.
+ return 0
+
+ def precommit_event(self):
+ cnx = self.cnx
+ prefix = SQL_PREFIX
+ table = '%s%s' % (prefix, self.entity.type)
+ dbhelper = cnx.repo.system_source.dbhelper
+ cols = ['%s%s' % (prefix, c) for c in self.cols]
+ sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols, self.cstrname)
+ for sql in sqls:
+ cnx.system_sql(sql)
+
+ def postcommit_event(self):
+ eschema = self.cnx.vreg.schema.schema_by_eid(self.entity.eid)
+ cols = set(self.cols)
+ unique_together = [ut for ut in eschema._unique_together
+ if set(ut) != cols]
+ eschema._unique_together = unique_together
+
+
+# operations for in-memory schema synchronization #############################
+
+class MemSchemaCWETypeDel(MemSchemaOperation):
+ """actually remove the entity type from the instance's schema"""
+ etype = None # make pylint happy
+
+ def postcommit_event(self):
+ # del_entity_type also removes entity's relations
+ self.cnx.vreg.schema.del_entity_type(self.etype)
+
+
+class MemSchemaCWRTypeAdd(MemSchemaOperation):
+ """actually add the relation type to the instance's schema"""
+ rtypedef = None # make pylint happy
+
+ def precommit_event(self):
+ self.cnx.vreg.schema.add_relation_type(self.rtypedef)
+
+ def revertprecommit_event(self):
+ self.cnx.vreg.schema.del_relation_type(self.rtypedef.name)
+
+
+class MemSchemaCWRTypeDel(MemSchemaOperation):
+ """actually remove the relation type from the instance's schema"""
+ rtype = None # make pylint happy
+
+ def postcommit_event(self):
+ try:
+ self.cnx.vreg.schema.del_relation_type(self.rtype)
+ except KeyError:
+ # s/o entity type have already been deleted
+ pass
+
+
+class MemSchemaPermissionAdd(MemSchemaOperation):
+ """synchronize schema when a *_permission relation has been added on a group
+ """
+ eid = action = group_eid = expr = None # make pylint happy
+
+ def precommit_event(self):
+ """the observed connections.cnxset has been commited"""
+ try:
+ erschema = self.cnx.vreg.schema.schema_by_eid(self.eid)
+ except KeyError:
+ # duh, schema not found, log error and skip operation
+ self.warning('no schema for %s', self.eid)
+ return
+ perms = list(erschema.action_permissions(self.action))
+ if self.group_eid is not None:
+ perm = self.cnx.entity_from_eid(self.group_eid).name
+ else:
+ perm = erschema.rql_expression(self.expr)
+ try:
+ perms.index(perm)
+ self.warning('%s already in permissions for %s on %s',
+ perm, self.action, erschema)
+ except ValueError:
+ perms.append(perm)
+ erschema.set_action_permissions(self.action, perms)
+
+ # XXX revertprecommit_event
+
+
+class MemSchemaPermissionDel(MemSchemaPermissionAdd):
+ """synchronize schema when a *_permission relation has been deleted from a
+ group
+ """
+
+ def precommit_event(self):
+ """the observed connections set has been commited"""
+ try:
+ erschema = self.cnx.vreg.schema.schema_by_eid(self.eid)
+ except KeyError:
+ # duh, schema not found, log error and skip operation
+ self.warning('no schema for %s', self.eid)
+ return
+ perms = list(erschema.action_permissions(self.action))
+ if self.group_eid is not None:
+ perm = self.cnx.entity_from_eid(self.group_eid).name
+ else:
+ perm = erschema.rql_expression(self.expr)
+ try:
+ perms.remove(perm)
+ erschema.set_action_permissions(self.action, perms)
+ except ValueError:
+ self.error('can\'t remove permission %s for %s on %s',
+ perm, self.action, erschema)
+
+ # XXX revertprecommit_event
+
+
+class MemSchemaSpecializesAdd(MemSchemaOperation):
+ etypeeid = parentetypeeid = None # make pylint happy
+
+ def precommit_event(self):
+ eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid)
+ parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid)
+ eschema._specialized_type = parenteschema.type
+ parenteschema._specialized_by.append(eschema.type)
+
+ # XXX revertprecommit_event
+
+
+class MemSchemaSpecializesDel(MemSchemaOperation):
+ etypeeid = parentetypeeid = None # make pylint happy
+
+ def precommit_event(self):
+ try:
+ eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid)
+ parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid)
+ except KeyError:
+ # etype removed, nothing to do
+ return
+ eschema._specialized_type = None
+ parenteschema._specialized_by.remove(eschema.type)
+
+ # XXX revertprecommit_event
+
+
+# CWEType hooks ################################################################
+
+class DelCWETypeHook(SyncSchemaHook):
+ """before deleting a CWEType entity:
+ * check that we don't remove a core entity type
+ * cascade to delete related CWAttribute and CWRelation entities
+ * instantiate an operation to delete the entity type on commit
+ """
+ __regid__ = 'syncdelcwetype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWEType')
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ # final entities can't be deleted, don't care about that
+ name = self.entity.name
+ if name in CORE_TYPES:
+ raise validation_error(self.entity, {None: _("can't be deleted")})
+ # delete every entities of this type
+ if name not in ETYPE_NAME_MAP:
+ MemSchemaCWETypeDel(self._cw, etype=name)
+ if not self.entity.final:
+ DropTable(self._cw, table=SQL_PREFIX + name)
+
+
+class AfterDelCWETypeHook(DelCWETypeHook):
+ __regid__ = 'wfcleanup'
+ events = ('after_delete_entity',)
+
+ def __call__(self):
+ # workflow cleanup
+ self._cw.execute('DELETE Workflow X WHERE NOT X workflow_of Y')
+
+
+class AfterAddCWETypeHook(DelCWETypeHook):
+ """after adding a CWEType entity:
+ * create the necessary table
+ * set creation_date and modification_date by creating the necessary
+ CWAttribute entities
+ * add owned_by relation by creating the necessary CWRelation entity
+ * register an operation to add the entity type to the instance's
+ schema on commit
+ """
+ __regid__ = 'syncaddcwetype'
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ if entity.cw_edited.get('final'):
+ # final entity types don't need a table in the database and are
+ # systematically added by yams at schema initialization time so
+ # there is no need to do further processing. Simply assign its eid.
+ self._cw.vreg.schema[entity.name].eid = entity.eid
+ return
+ CWETypeAddOp(self._cw, entity=entity)
+
+
+class BeforeUpdateCWETypeHook(DelCWETypeHook):
+ """check name change, handle final"""
+ __regid__ = 'syncupdatecwetype'
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ check_valid_changes(self._cw, entity, ro_attrs=('final',))
+ # don't use getattr(entity, attr), we would get the modified value if any
+ if 'name' in entity.cw_edited:
+ oldname, newname = entity.cw_edited.oldnewvalue('name')
+ if newname.lower() != oldname.lower():
+ CWETypeRenameOp(self._cw, oldname=oldname, newname=newname)
+
+
+# CWRType hooks ################################################################
+
+class DelCWRTypeHook(SyncSchemaHook):
+ """before deleting a CWRType entity:
+ * check that we don't remove a core relation type
+ * cascade to delete related CWAttribute and CWRelation entities
+ * instantiate an operation to delete the relation type on commit
+ """
+ __regid__ = 'syncdelcwrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ name = self.entity.name
+ if name in CORE_TYPES:
+ raise validation_error(self.entity, {None: _("can't be deleted")})
+ # delete relation definitions using this relation type
+ self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s',
+ {'x': self.entity.eid})
+ self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s',
+ {'x': self.entity.eid})
+ MemSchemaCWRTypeDel(self._cw, rtype=name)
+
+
+class AfterAddCWComputedRTypeHook(SyncSchemaHook):
+ """after a CWComputedRType entity has been added:
+ * register an operation to add the relation type to the instance's
+ schema on commit
+
+ We don't know yet this point if a table is necessary
+ """
+ __regid__ = 'syncaddcwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ rtypedef = ybo.ComputedRelation(name=entity.name,
+ eid=entity.eid,
+ rule=entity.rule)
+ MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
+
+
+class AfterAddCWRTypeHook(SyncSchemaHook):
+ """after a CWRType entity has been added:
+ * register an operation to add the relation type to the instance's
+ schema on commit
+
+ We don't know yet this point if a table is necessary
+ """
+ __regid__ = 'syncaddcwrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ rtypedef = ybo.RelationType(name=entity.name,
+ description=entity.description,
+ inlined=entity.cw_edited.get('inlined', False),
+ symmetric=entity.cw_edited.get('symmetric', False),
+ eid=entity.eid)
+ MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
+
+
+class BeforeUpdateCWRTypeHook(SyncSchemaHook):
+ """check name change, handle final"""
+ __regid__ = 'syncupdatecwrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ check_valid_changes(self._cw, entity)
+ newvalues = {}
+ for prop in ('symmetric', 'inlined', 'fulltext_container'):
+ if prop in entity.cw_edited:
+ old, new = entity.cw_edited.oldnewvalue(prop)
+ if old != new:
+ newvalues[prop] = new
+ if newvalues:
+ rschema = self._cw.vreg.schema.rschema(entity.name)
+ CWRTypeUpdateOp(self._cw, rschema=rschema, entity=entity,
+ values=newvalues)
+
+
+class BeforeUpdateCWComputedRTypeHook(SyncSchemaHook):
+ """check name change, handle final"""
+ __regid__ = 'syncupdatecwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ check_valid_changes(self._cw, entity)
+ if 'rule' in entity.cw_edited:
+ old, new = entity.cw_edited.oldnewvalue('rule')
+ if old != new:
+ rschema = self._cw.vreg.schema.rschema(entity.name)
+ CWComputedRTypeUpdateOp(self._cw, rschema=rschema,
+ entity=entity, rule=new)
+
+
+class AfterDelRelationTypeHook(SyncSchemaHook):
+ """before deleting a CWAttribute or CWRelation entity:
+ * if this is a final or inlined relation definition, instantiate an
+ operation to drop necessary column, else if this is the last instance
+ of a non final relation, instantiate an operation to drop necessary
+ table
+ * instantiate an operation to delete the relation definition on commit
+ * delete the associated relation type when necessary
+ """
+ __regid__ = 'syncdelrelationtype'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype('relation_type')
+ events = ('after_delete_relation',)
+
+ def __call__(self):
+ cnx = self._cw
+ try:
+ rdef = cnx.vreg.schema.schema_by_eid(self.eidfrom)
+ except KeyError:
+ self.critical('cant get schema rdef associated to %s', self.eidfrom)
+ return
+ subjschema, rschema, objschema = rdef.as_triple()
+ pendingrdefs = cnx.transaction_data.setdefault('pendingrdefs', set())
+ # first delete existing relation if necessary
+ if rschema.final:
+ pendingrdefs.add((subjschema, rschema))
+ else:
+ pendingrdefs.add((subjschema, rschema, objschema))
+ RDefDelOp(cnx, rdef=rdef)
+
+
+# CWComputedRType hooks #######################################################
+
+class DelCWComputedRTypeHook(SyncSchemaHook):
+ """before deleting a CWComputedRType entity:
+ * check that we don't remove a core relation type
+ * instantiate an operation to delete the relation type on commit
+ """
+ __regid__ = 'syncdelcwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ name = self.entity.name
+ if name in CORE_TYPES:
+ raise validation_error(self.entity, {None: _("can't be deleted")})
+ MemSchemaCWRTypeDel(self._cw, rtype=name)
+
+
+# CWAttribute / CWRelation hooks ###############################################
+
+class AfterAddCWAttributeHook(SyncSchemaHook):
+ __regid__ = 'syncaddcwattribute'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ CWAttributeAddOp(self._cw, entity=self.entity)
+
+
+class AfterAddCWRelationHook(AfterAddCWAttributeHook):
+ __regid__ = 'syncaddcwrelation'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRelation')
+
+ def __call__(self):
+ CWRelationAddOp(self._cw, entity=self.entity)
+
+
+class AfterUpdateCWRDefHook(SyncSchemaHook):
+ __regid__ = 'syncaddcwattribute'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute',
+ 'CWRelation')
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ if self._cw.deleted_in_transaction(entity.eid):
+ return
+ subjtype = entity.stype.name
+ objtype = entity.otype.name
+ if subjtype in ETYPE_NAME_MAP or objtype in ETYPE_NAME_MAP:
+ return
+ rschema = self._cw.vreg.schema[entity.rtype.name]
+ # note: do not access schema rdef here, it may be added later by an
+ # operation
+ newvalues = {}
+ for prop in RelationDefinitionSchema.rproperty_defs(objtype):
+ if prop == 'constraints':
+ continue
+ if prop == 'order':
+ attr = 'ordernum'
+ else:
+ attr = prop
+ if attr in entity.cw_edited:
+ old, new = entity.cw_edited.oldnewvalue(attr)
+ if old != new:
+ newvalues[prop] = new
+ if newvalues:
+ RDefUpdateOp(self._cw, rschema=rschema, rdefkey=(subjtype, objtype),
+ values=newvalues)
+
+
+# constraints synchronization hooks ############################################
+
+class AfterAddCWConstraintHook(SyncSchemaHook):
+ __regid__ = 'syncaddcwconstraint'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint')
+ events = ('after_add_entity', 'after_update_entity')
+
+ def __call__(self):
+ if self.entity.cstrtype[0].name == 'SizeConstraint':
+ txdata = self._cw.transaction_data
+ if 'newsizecstr' not in txdata:
+ txdata['newsizecstr'] = set()
+ txdata['newsizecstr'].add(self.entity)
+ CWConstraintAddOp(self._cw, entity=self.entity)
+
+
+class AfterAddConstrainedByHook(SyncSchemaHook):
+ __regid__ = 'syncaddconstrainedby'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ if self._cw.added_in_transaction(self.eidfrom):
+ # used by get_constraints() which is called in CWAttributeAddOp
+ self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto)
+
+
+class BeforeDeleteCWConstraintHook(SyncSchemaHook):
+ __regid__ = 'syncdelcwconstraint'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint')
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ schema = self._cw.vreg.schema
+ try:
+ # KeyError, e.g. composite chain deletion
+ rdef = schema.schema_by_eid(entity.reverse_constrained_by[0].eid)
+ # IndexError
+ cstr = rdef.constraint_by_eid(entity.eid)
+ except (KeyError, IndexError):
+ self._cw.critical('constraint type no more accessible')
+ else:
+ CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr)
+
+
+# unique_together constraints
+# XXX: use setoperations and before_add_relation here (on constraint_of and relations)
+class AfterAddCWUniqueTogetherConstraintHook(SyncSchemaHook):
+ __regid__ = 'syncadd_cwuniquetogether_constraint'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWUniqueTogetherConstraint')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ CWUniqueTogetherConstraintAddOp(self._cw, entity=self.entity)
+
+
+class BeforeDeleteConstraintOfHook(SyncSchemaHook):
+ __regid__ = 'syncdelconstraintof'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constraint_of')
+ events = ('before_delete_relation',)
+
+ def __call__(self):
+ if self._cw.deleted_in_transaction(self.eidto):
+ return
+ schema = self._cw.vreg.schema
+ cstr = self._cw.entity_from_eid(self.eidfrom)
+ entity = schema.schema_by_eid(self.eidto)
+ cols = tuple(r.name for r in cstr.relations)
+ CWUniqueTogetherConstraintDelOp(self._cw, entity=entity,
+ cstrname=cstr.name, cols=cols)
+
+
+# permissions synchronization hooks ############################################
+
+class AfterAddPermissionHook(SyncSchemaHook):
+ """added entity/relation *_permission, need to update schema"""
+ __regid__ = 'syncaddperm'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype(
+ 'read_permission', 'add_permission', 'delete_permission',
+ 'update_permission')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ action = self.rtype.split('_', 1)[0]
+ if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup':
+ MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom,
+ group_eid=self.eidto)
+ else: # RQLExpression
+ expr = self._cw.entity_from_eid(self.eidto).expression
+ MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom,
+ expr=expr)
+
+
+class BeforeDelPermissionHook(AfterAddPermissionHook):
+ """delete entity/relation *_permission, need to update schema
+
+ skip the operation if the related type is being deleted
+ """
+ __regid__ = 'syncdelperm'
+ events = ('before_delete_relation',)
+
+ def __call__(self):
+ if self._cw.deleted_in_transaction(self.eidfrom):
+ return
+ action = self.rtype.split('_', 1)[0]
+ if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup':
+ MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom,
+ group_eid=self.eidto)
+ else: # RQLExpression
+ expr = self._cw.entity_from_eid(self.eidto).expression
+ MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom,
+ expr=expr)
+
+
+class UpdateFTIndexOp(hook.DataOperationMixIn, hook.SingleLastOperation):
+ """operation to update full text indexation of entity whose schema change
+
+ We wait after the commit to as the schema in memory is only updated after
+ the commit.
+ """
+ containercls = list
+
+ def postcommit_event(self):
+ cnx = self.cnx
+ source = cnx.repo.system_source
+ schema = cnx.repo.vreg.schema
+ to_reindex = self.get_data()
+ self.info('%i etypes need full text indexed reindexation',
+ len(to_reindex))
+ for etype in to_reindex:
+ rset = cnx.execute('Any X WHERE X is %s' % etype)
+ self.info('Reindexing full text index for %i entity of type %s',
+ len(rset), etype)
+ still_fti = list(schema[etype].indexable_attributes())
+ for entity in rset.entities():
+ source.fti_unindex_entities(cnx, [entity])
+ for container in entity.cw_adapt_to('IFTIndexable').fti_containers():
+ if still_fti or container is not entity:
+ source.fti_unindex_entities(cnx, [container])
+ source.fti_index_entities(cnx, [container])
+ if to_reindex:
+ # Transaction has already been committed
+ cnx.cnxset.commit()
+
+
+# specializes synchronization hooks ############################################
+
+class AfterAddSpecializesHook(SyncSchemaHook):
+ __regid__ = 'syncaddspecializes'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ MemSchemaSpecializesAdd(self._cw, etypeeid=self.eidfrom,
+ parentetypeeid=self.eidto)
+
+
+class AfterDelSpecializesHook(SyncSchemaHook):
+ __regid__ = 'syncdelspecializes'
+ __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes')
+ events = ('after_delete_relation',)
+
+ def __call__(self):
+ MemSchemaSpecializesDel(self._cw, etypeeid=self.eidfrom,
+ parentetypeeid=self.eidto)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/syncsession.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/syncsession.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,254 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Core hooks: synchronize living session on persistent data changes"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb import _
+from cubicweb import UnknownProperty, BadConnectionId, validation_error
+from cubicweb.predicates import is_instance
+from cubicweb.server import hook
+
+
+def get_user_sessions(repo, ueid):
+ for session in repo._sessions.values():
+ if ueid == session.user.eid:
+ yield session
+
+
+class SyncSessionHook(hook.Hook):
+ __abstract__ = True
+ category = 'syncsession'
+
+
+# user/groups synchronisation #################################################
+
+class _GroupOperation(hook.Operation):
+ """base class for group operation"""
+ cnxuser = None # make pylint happy
+
+ def __init__(self, cnx, *args, **kwargs):
+ """override to get the group name before actual groups manipulation:
+
+ we may temporarily loose right access during a commit event, so
+ no query should be emitted while comitting
+ """
+ rql = 'Any N WHERE G eid %(x)s, G name N'
+ result = cnx.execute(rql, {'x': kwargs['geid']}, build_descr=False)
+ hook.Operation.__init__(self, cnx, *args, **kwargs)
+ self.group = result[0][0]
+
+
+class _DeleteGroupOp(_GroupOperation):
+ """Synchronize user when a in_group relation has been deleted"""
+
+ def postcommit_event(self):
+ """the observed connections set has been commited"""
+ groups = self.cnxuser.groups
+ try:
+ groups.remove(self.group)
+ except KeyError:
+ self.error('user %s not in group %s', self.cnxuser, self.group)
+
+
+class _AddGroupOp(_GroupOperation):
+ """Synchronize user when a in_group relation has been added"""
+
+ def postcommit_event(self):
+ """the observed connections set has been commited"""
+ groups = self.cnxuser.groups
+ if self.group in groups:
+ self.warning('user %s already in group %s', self.cnxuser,
+ self.group)
+ else:
+ groups.add(self.group)
+
+
+class SyncInGroupHook(SyncSessionHook):
+ """Watch addition/removal of in_group relation to synchronize living sessions accordingly"""
+ __regid__ = 'syncingroup'
+ __select__ = SyncSessionHook.__select__ & hook.match_rtype('in_group')
+ events = ('after_delete_relation', 'after_add_relation')
+
+ def __call__(self):
+ if self.event == 'after_delete_relation':
+ opcls = _DeleteGroupOp
+ else:
+ opcls = _AddGroupOp
+ for session in get_user_sessions(self._cw.repo, self.eidfrom):
+ opcls(self._cw, cnxuser=session.user, geid=self.eidto)
+
+
+class _DelUserOp(hook.Operation):
+ """close associated user's session when it is deleted"""
+ def __init__(self, cnx, sessionid):
+ self.sessionid = sessionid
+ hook.Operation.__init__(self, cnx)
+
+ def postcommit_event(self):
+ try:
+ self.cnx.repo.close(self.sessionid)
+ except BadConnectionId:
+ pass # already closed
+
+
+class CloseDeletedUserSessionsHook(SyncSessionHook):
+ __regid__ = 'closession'
+ __select__ = SyncSessionHook.__select__ & is_instance('CWUser')
+ events = ('after_delete_entity',)
+
+ def __call__(self):
+ for session in get_user_sessions(self._cw.repo, self.entity.eid):
+ _DelUserOp(self._cw, session.sessionid)
+
+
+# CWProperty hooks #############################################################
+
+class _DelCWPropertyOp(hook.Operation):
+ """a user's custom properties has been deleted"""
+ cwpropdict = key = None # make pylint happy
+
+ def postcommit_event(self):
+ """the observed connections set has been commited"""
+ try:
+ del self.cwpropdict[self.key]
+ except KeyError:
+ self.error('%s has no associated value', self.key)
+
+
+class _ChangeCWPropertyOp(hook.Operation):
+ """a user's custom properties has been added/changed"""
+ cwpropdict = key = value = None # make pylint happy
+
+ def postcommit_event(self):
+ """the observed connections set has been commited"""
+ self.cwpropdict[self.key] = self.value
+
+
+class _AddCWPropertyOp(hook.Operation):
+ """a user's custom properties has been added/changed"""
+ cwprop = None # make pylint happy
+
+ def postcommit_event(self):
+ """the observed connections set has been commited"""
+ cwprop = self.cwprop
+ if not cwprop.for_user:
+ self.cnx.vreg['propertyvalues'][cwprop.pkey] = \
+ self.cnx.vreg.typed_value(cwprop.pkey, cwprop.value)
+ # if for_user is set, update is handled by a ChangeCWPropertyOp operation
+
+
+class AddCWPropertyHook(SyncSessionHook):
+ __regid__ = 'addcwprop'
+ __select__ = SyncSessionHook.__select__ & is_instance('CWProperty')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ key, value = self.entity.pkey, self.entity.value
+ if key.startswith('sources.'):
+ return
+ cnx = self._cw
+ try:
+ value = cnx.vreg.typed_value(key, value)
+ except UnknownProperty:
+ msg = _('unknown property key %s')
+ raise validation_error(self.entity, {('pkey', 'subject'): msg}, (key,))
+ except ValueError as ex:
+ raise validation_error(self.entity,
+ {('value', 'subject'): str(ex)})
+ if not cnx.user.matching_groups('managers'):
+ cnx.add_relation(self.entity.eid, 'for_user', cnx.user.eid)
+ else:
+ _AddCWPropertyOp(cnx, cwprop=self.entity)
+
+
+class UpdateCWPropertyHook(AddCWPropertyHook):
+ __regid__ = 'updatecwprop'
+ events = ('after_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ if not ('pkey' in entity.cw_edited or
+ 'value' in entity.cw_edited):
+ return
+ key, value = entity.pkey, entity.value
+ if key.startswith('sources.'):
+ return
+ cnx = self._cw
+ try:
+ value = cnx.vreg.typed_value(key, value)
+ except UnknownProperty:
+ return
+ except ValueError as ex:
+ raise validation_error(entity, {('value', 'subject'): str(ex)})
+ if entity.for_user:
+ for session in get_user_sessions(cnx.repo, entity.for_user[0].eid):
+ _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties,
+ key=key, value=value)
+ else:
+ # site wide properties
+ _ChangeCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'],
+ key=key, value=value)
+
+
+class DeleteCWPropertyHook(AddCWPropertyHook):
+ __regid__ = 'delcwprop'
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ cnx = self._cw
+ for eidfrom, rtype, eidto in cnx.transaction_data.get('pendingrelations', ()):
+ if rtype == 'for_user' and eidfrom == self.entity.eid:
+ # if for_user was set, delete already handled by hook on for_user deletion
+ break
+ else:
+ _DelCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'],
+ key=self.entity.pkey)
+
+
+class AddForUserRelationHook(SyncSessionHook):
+ __regid__ = 'addcwpropforuser'
+ __select__ = SyncSessionHook.__select__ & hook.match_rtype('for_user')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ cnx = self._cw
+ eidfrom = self.eidfrom
+ if not cnx.entity_metas(eidfrom)['type'] == 'CWProperty':
+ return
+ key, value = cnx.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V',
+ {'x': eidfrom})[0]
+ if cnx.vreg.property_info(key)['sitewide']:
+ msg = _("site-wide property can't be set for user")
+ raise validation_error(eidfrom, {('for_user', 'subject'): msg})
+ for session in get_user_sessions(cnx.repo, self.eidto):
+ _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties,
+ key=key, value=value)
+
+
+class DelForUserRelationHook(AddForUserRelationHook):
+ __regid__ = 'delcwpropforuser'
+ events = ('after_delete_relation',)
+
+ def __call__(self):
+ cnx = self._cw
+ key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', {'x': self.eidfrom})[0][0]
+ cnx.transaction_data.setdefault('pendingrelations', []).append(
+ (self.eidfrom, self.rtype, self.eidto))
+ for session in get_user_sessions(cnx.repo, self.eidto):
+ _DelCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/syncsources.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/syncsources.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,208 @@
+# copyright 2010-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""hooks for repository sources synchronization"""
+
+from cubicweb import _
+
+from socket import gethostname
+
+from logilab.common.decorators import clear_cache
+
+from cubicweb import validation_error
+from cubicweb.predicates import is_instance
+from cubicweb.server import SOURCE_TYPES, hook
+
+class SourceHook(hook.Hook):
+ __abstract__ = True
+ category = 'cw.sources'
+
+
+# repo sources synchronization #################################################
+
+class SourceAddedOp(hook.Operation):
+ entity = None # make pylint happy
+ def postcommit_event(self):
+ self.cnx.repo.add_source(self.entity)
+
+class SourceAddedHook(SourceHook):
+ __regid__ = 'cw.sources.added'
+ __select__ = SourceHook.__select__ & is_instance('CWSource')
+ events = ('after_add_entity',)
+ def __call__(self):
+ try:
+ sourcecls = SOURCE_TYPES[self.entity.type]
+ except KeyError:
+ msg = _('Unknown source type')
+ raise validation_error(self.entity, {('type', 'subject'): msg})
+ # ignore creation of the system source done during database
+ # initialisation, as config for this source is in a file and handling
+ # is done separatly (no need for the operation either)
+ if self.entity.name != 'system':
+ sourcecls.check_conf_dict(self.entity.eid, self.entity.host_config,
+ fail_if_unknown=not self._cw.vreg.config.repairing)
+ SourceAddedOp(self._cw, entity=self.entity)
+
+
+class SourceRemovedOp(hook.Operation):
+ uri = None # make pylint happy
+ def postcommit_event(self):
+ self.cnx.repo.remove_source(self.uri)
+
+class SourceRemovedHook(SourceHook):
+ __regid__ = 'cw.sources.removed'
+ __select__ = SourceHook.__select__ & is_instance('CWSource')
+ events = ('before_delete_entity',)
+ def __call__(self):
+ if self.entity.name == 'system':
+ msg = _("You cannot remove the system source")
+ raise validation_error(self.entity, {None: msg})
+ SourceRemovedOp(self._cw, uri=self.entity.name)
+
+
+class SourceConfigUpdatedOp(hook.DataOperationMixIn, hook.Operation):
+
+ def precommit_event(self):
+ self.__processed = []
+ for source in self.get_data():
+ if not self.cnx.deleted_in_transaction(source.eid):
+ conf = source.repo_source.check_config(source)
+ self.__processed.append( (source, conf) )
+
+ def postcommit_event(self):
+ for source, conf in self.__processed:
+ source.repo_source.update_config(source, conf)
+
+
+class SourceRenamedOp(hook.LateOperation):
+ oldname = newname = None # make pylint happy
+
+ def precommit_event(self):
+ source = self.cnx.repo.sources_by_uri[self.oldname]
+ sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s'
+ self.cnx.system_sql(sql, {'oldname': self.oldname,
+ 'newname': self.newname})
+
+ def postcommit_event(self):
+ repo = self.cnx.repo
+ # XXX race condition
+ source = repo.sources_by_uri.pop(self.oldname)
+ source.uri = self.newname
+ source.public_config['uri'] = self.newname
+ repo.sources_by_uri[self.newname] = source
+ repo._type_source_cache.clear()
+ clear_cache(repo, 'source_defs')
+
+
+class SourceUpdatedHook(SourceHook):
+ __regid__ = 'cw.sources.configupdate'
+ __select__ = SourceHook.__select__ & is_instance('CWSource')
+ events = ('before_update_entity',)
+ def __call__(self):
+ if 'name' in self.entity.cw_edited:
+ oldname, newname = self.entity.cw_edited.oldnewvalue('name')
+ if oldname == 'system':
+ msg = _("You cannot rename the system source")
+ raise validation_error(self.entity, {('name', 'subject'): msg})
+ SourceRenamedOp(self._cw, oldname=oldname, newname=newname)
+ if 'config' in self.entity.cw_edited or 'url' in self.entity.cw_edited:
+ if self.entity.name == 'system' and self.entity.config:
+ msg = _("Configuration of the system source goes to "
+ "the 'sources' file, not in the database")
+ raise validation_error(self.entity, {('config', 'subject'): msg})
+ SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity)
+
+
+class SourceHostConfigUpdatedHook(SourceHook):
+ __regid__ = 'cw.sources.hostconfigupdate'
+ __select__ = SourceHook.__select__ & is_instance('CWSourceHostConfig')
+ events = ('after_add_entity', 'after_update_entity', 'before_delete_entity',)
+ def __call__(self):
+ if self.entity.match(gethostname()):
+ if self.event == 'after_update_entity' and \
+ not 'config' in self.entity.cw_edited:
+ return
+ try:
+ SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource)
+ except IndexError:
+ # XXX no source linked to the host config yet
+ pass
+
+
+# source mapping synchronization ###############################################
+#
+# Expect cw_for_source/cw_schema are immutable relations (i.e. can't change from
+# a source or schema to another).
+
+class SourceMappingImmutableHook(SourceHook):
+ """check cw_for_source and cw_schema are immutable relations
+
+ XXX empty delete perms would be enough?
+ """
+ __regid__ = 'cw.sources.mapping.immutable'
+ __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source', 'cw_schema')
+ events = ('before_add_relation',)
+ def __call__(self):
+ if not self._cw.added_in_transaction(self.eidfrom):
+ msg = _("You can't change this relation")
+ raise validation_error(self.eidfrom, {self.rtype: msg})
+
+
+class SourceMappingChangedOp(hook.DataOperationMixIn, hook.Operation):
+ def check_or_update(self, checkonly):
+ cnx = self.cnx
+ # take care, can't call get_data() twice
+ try:
+ data = self.__data
+ except AttributeError:
+ data = self.__data = self.get_data()
+ for schemacfg, source in data:
+ if source is None:
+ source = schemacfg.cwsource.repo_source
+ if cnx.added_in_transaction(schemacfg.eid):
+ if not cnx.deleted_in_transaction(schemacfg.eid):
+ source.add_schema_config(schemacfg, checkonly=checkonly)
+ elif cnx.deleted_in_transaction(schemacfg.eid):
+ source.del_schema_config(schemacfg, checkonly=checkonly)
+ else:
+ source.update_schema_config(schemacfg, checkonly=checkonly)
+
+ def precommit_event(self):
+ self.check_or_update(True)
+
+ def postcommit_event(self):
+ self.check_or_update(False)
+
+
+class SourceMappingChangedHook(SourceHook):
+ __regid__ = 'cw.sources.schemaconfig'
+ __select__ = SourceHook.__select__ & is_instance('CWSourceSchemaConfig')
+ events = ('after_add_entity', 'after_update_entity')
+ def __call__(self):
+ if self.event == 'after_add_entity' or (
+ self.event == 'after_update_entity' and 'options' in self.entity.cw_edited):
+ SourceMappingChangedOp.get_instance(self._cw).add_data(
+ (self.entity, None) )
+
+class SourceMappingDeleteHook(SourceHook):
+ __regid__ = 'cw.sources.delschemaconfig'
+ __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source')
+ events = ('before_delete_relation',)
+ def __call__(self):
+ SourceMappingChangedOp.get_instance(self._cw).add_data(
+ (self._cw.entity_from_eid(self.eidfrom),
+ self._cw.entity_from_eid(self.eidto).repo_source) )
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/data-computed/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/data-computed/schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,46 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from yams.buildobjs import EntityType, String, Int, SubjectRelation, RelationDefinition
+
+THISYEAR = 2014
+
+class Person(EntityType):
+ name = String()
+ salaire = Int()
+ birth_year = Int(required=True)
+ travaille = SubjectRelation('Societe')
+ age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR)
+
+class Societe(EntityType):
+ nom = String()
+ salaire_total = Int(formula='Any SUM(SA) GROUPBY X WHERE P travaille X, P salaire SA')
+
+
+class Agent(EntityType):
+ asalae_id = String(formula='Any E WHERE M mirror_of X, M extid E')
+
+class MirrorEntity(EntityType):
+ extid = String(required=True, unique=True,
+ description=_('external identifier of the object'))
+
+
+class mirror_of(RelationDefinition):
+ subject = 'MirrorEntity'
+ object = ('Agent', 'Societe')
+ cardinality = '?*'
+ inlined = True
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/data/hooks.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/data/hooks.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,8 @@
+from cubicweb.predicates import is_instance
+from cubicweb.hooks import notification
+
+
+class FolderUpdateHook(notification.EntityUpdateHook):
+ __select__ = (notification.EntityUpdateHook.__select__ &
+ is_instance('Folder'))
+ order = 100 # late trigger so that metadata hooks come before.
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/data/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/data/schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,85 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+from yams.buildobjs import (RelationDefinition, RelationType, EntityType,
+ String, Datetime, Int)
+from yams.reader import context
+
+from cubicweb.schema import ERQLExpression
+
+from cubicweb import _
+
+class friend(RelationDefinition):
+ subject = ('CWUser', 'CWGroup')
+ object = ('CWUser', 'CWGroup')
+ symmetric = True
+
+class Folder(EntityType):
+ name = String()
+
+class parent(RelationDefinition):
+ subject = 'Folder'
+ object = 'Folder'
+ composite = 'object'
+ cardinality = '?*'
+
+class children(RelationDefinition):
+ subject = 'Folder'
+ object = 'Folder'
+ composite = 'subject'
+
+
+class Email(EntityType):
+ """electronic mail"""
+ subject = String(fulltextindexed=True)
+ date = Datetime(description=_('UTC time on which the mail was sent'))
+ messageid = String(required=True, indexed=True)
+ headers = String(description=_('raw headers'))
+
+
+
+class EmailPart(EntityType):
+ """an email attachment"""
+ __permissions__ = {
+ 'read': ('managers', 'users', 'guests',), # XXX if E parts X, U has_read_permission E
+ 'add': ('managers', ERQLExpression('E parts X, U has_update_permission E'),),
+ 'delete': ('managers', ERQLExpression('E parts X, U has_update_permission E')),
+ 'update': ('managers', 'owners',),
+ }
+
+ content = String(fulltextindexed=True)
+ content_format = String(required=True, maxsize=50)
+ ordernum = Int(required=True)
+
+
+class parts(RelationType):
+ subject = 'Email'
+ object = 'EmailPart'
+ cardinality = '*1'
+ composite = 'subject'
+ fulltext_container = 'subject'
+
+class sender(RelationDefinition):
+ subject = 'Email'
+ object = 'EmailAddress'
+ cardinality = '?*'
+ inlined = True
+
+class recipients(RelationDefinition):
+ subject = 'Email'
+ object = 'EmailAddress'
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_bookmarks.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_bookmarks.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,38 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+from logilab.common.testlib import unittest_main
+from cubicweb.devtools.testlib import CubicWebTC
+
+class BookmarkHooksTC(CubicWebTC):
+
+
+ def test_auto_delete_bookmarks(self):
+ with self.admin_access.repo_cnx() as cnx:
+ beid = cnx.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U '
+ 'WHERE U login "admin"')[0][0]
+ cnx.execute('SET X bookmarked_by U WHERE U login "anon"')
+ cnx.commit()
+ cnx.execute('DELETE X bookmarked_by U WHERE U login "admin"')
+ cnx.commit()
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid}))
+ cnx.execute('DELETE X bookmarked_by U WHERE U login "anon"')
+ cnx.commit()
+ self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid}))
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_hooks.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_hooks.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,222 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""functional tests for core hooks
+
+Note:
+ syncschema.py hooks are mostly tested in server/test/unittest_migrations.py
+"""
+
+from datetime import datetime
+
+from six import text_type
+
+from pytz import utc
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class CoreHooksTC(CubicWebTC):
+
+ def test_inlined(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(self.repo.schema['sender'].inlined, True)
+ cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+ cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, '
+ 'X content "this is a test"')
+ eeid = cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", '
+ 'X sender Y, X recipients Y, X parts P '
+ 'WHERE Y is EmailAddress, P is EmailPart')[0][0]
+ cnx.execute('SET X sender Y WHERE X is Email, Y is EmailAddress')
+ rset = cnx.execute('Any S WHERE X sender S, X eid %s' % eeid)
+ self.assertEqual(len(rset), 1)
+
+ def test_symmetric(self):
+ with self.admin_access.repo_cnx() as cnx:
+ u1 = self.create_user(cnx, u'1')
+ u2 = self.create_user(cnx, u'2')
+ u3 = self.create_user(cnx, u'3')
+ ga = cnx.create_entity('CWGroup', name=u'A')
+ gb = cnx.create_entity('CWGroup', name=u'B')
+ u1.cw_set(friend=u2)
+ u2.cw_set(friend=u3)
+ ga.cw_set(friend=gb)
+ ga.cw_set(friend=u1)
+ cnx.commit()
+ for l1, l2 in ((u'1', u'2'),
+ (u'2', u'3')):
+ self.assertTrue(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s',
+ {'l1': l1, 'l2': l2}))
+ self.assertTrue(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s',
+ {'l1': l1, 'l2': l2}))
+ self.assertTrue(cnx.execute('Any GA,GB WHERE GA friend GB, GA name "A", GB name "B"'))
+ self.assertTrue(cnx.execute('Any GA,GB WHERE GB friend GA, GA name "A", GB name "B"'))
+ self.assertTrue(cnx.execute('Any GA,U1 WHERE GA friend U1, GA name "A", U1 login "1"'))
+ self.assertTrue(cnx.execute('Any GA,U1 WHERE U1 friend GA, GA name "A", U1 login "1"'))
+ self.assertFalse(cnx.execute('Any GA,U WHERE GA friend U, GA name "A", U login "2"'))
+ for l1, l2 in ((u'1', u'3'),
+ (u'3', u'1')):
+ self.assertFalse(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s',
+ {'l1': l1, 'l2': l2}))
+ self.assertFalse(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s',
+ {'l1': l1, 'l2': l2}))
+
+ def test_html_tidy_hook(self):
+ with self.admin_access.client_cnx() as cnx:
+ entity = cnx.create_entity('Workflow', name=u'wf1',
+ description_format=u'text/html',
+ description=u'yo')
+ self.assertEqual(u'yo', entity.description)
+ entity = cnx.create_entity('Workflow', name=u'wf2',
+ description_format=u'text/html',
+ description=u'yo')
+ self.assertEqual(u'yo', entity.description)
+ entity = cnx.create_entity('Workflow', name=u'wf3',
+ description_format=u'text/html',
+ description=u'yo')
+ self.assertEqual(u'yo', entity.description)
+ entity = cnx.create_entity('Workflow', name=u'wf4',
+ description_format=u'text/html',
+ description=u'R&D')
+ self.assertEqual(u'R&D', entity.description, )
+ entity = cnx.create_entity('Workflow', name=u'wf5',
+ description_format=u'text/html',
+ description=u"
')
+
+ def test_metadata_cwuri(self):
+ with self.admin_access.repo_cnx() as cnx:
+ entity = cnx.create_entity('Workflow', name=u'wf1')
+ self.assertEqual(entity.cwuri, self.repo.config['base-url'] + str(entity.eid))
+
+ def test_metadata_creation_modification_date(self):
+ with self.admin_access.repo_cnx() as cnx:
+ _now = datetime.now(utc)
+ entity = cnx.create_entity('Workflow', name=u'wf1')
+ self.assertEqual((entity.creation_date - _now).seconds, 0)
+ self.assertEqual((entity.modification_date - _now).seconds, 0)
+
+ def test_metadata_created_by(self):
+ with self.admin_access.repo_cnx() as cnx:
+ entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view')
+ cnx.commit() # fire operations
+ self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator
+ self.assertEqual(entity.created_by[0].eid, cnx.user.eid)
+
+ def test_metadata_owned_by(self):
+ with self.admin_access.repo_cnx() as cnx:
+ entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view')
+ cnx.commit() # fire operations
+ self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner
+ self.assertEqual(entity.owned_by[0].eid, cnx.user.eid)
+
+ def test_user_login_stripped(self):
+ with self.admin_access.repo_cnx() as cnx:
+ u = self.create_user(cnx, ' joe ')
+ tname = cnx.execute('Any L WHERE E login L, E eid %(e)s',
+ {'e': u.eid})[0][0]
+ self.assertEqual(tname, 'joe')
+ cnx.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid})
+ tname = cnx.execute('Any L WHERE E login L, E eid %(e)s',
+ {'e': u.eid})[0][0]
+ self.assertEqual(tname, 'jijoe')
+
+
+class UserGroupHooksTC(CubicWebTC):
+
+ def test_user_group_synchronization(self):
+ with self.admin_access.repo_cnx() as cnx:
+ user = cnx.user
+ self.assertEqual(user.groups, set(('managers',)))
+ cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+ self.assertEqual(user.groups, set(('managers',)))
+ cnx.commit()
+ self.assertEqual(user.groups, set(('managers', 'guests')))
+ cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid)
+ self.assertEqual(user.groups, set(('managers', 'guests')))
+ cnx.commit()
+ self.assertEqual(user.groups, set(('managers',)))
+
+ def test_user_composite_owner(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.create_user(cnx, 'toto').eid
+ # composite of euser should be owned by the euser regardless of who created it
+ cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X '
+ 'WHERE U login "toto"')
+ cnx.commit()
+ self.assertEqual(cnx.execute('Any A WHERE X owned_by U, U use_email X,'
+ 'U login "toto", X address A')[0][0],
+ 'toto@logilab.fr')
+
+ def test_user_composite_no_owner_on_deleted_entity(self):
+ with self.admin_access.repo_cnx() as cnx:
+ u = self.create_user(cnx, 'toto').eid
+ cnx.commit()
+ e = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr', reverse_use_email=u)
+ e.cw_delete()
+ cnx.commit()
+ self.assertFalse(cnx.system_sql(
+ 'SELECT * FROM owned_by_relation '
+ 'WHERE eid_from NOT IN (SELECT eid FROM entities)').fetchall())
+
+ def test_no_created_by_on_deleted_entity(self):
+ with self.admin_access.repo_cnx() as cnx:
+ eid = cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0]
+ cnx.execute('DELETE EmailAddress X WHERE X eid %s' % eid)
+ cnx.commit()
+ self.assertFalse(cnx.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid}))
+
+
+class SchemaHooksTC(CubicWebTC):
+
+ def test_duplicate_etype_error(self):
+ with self.admin_access.repo_cnx() as cnx:
+ # check we can't add a CWEType or CWRType entity if it already exists one
+ # with the same name
+ self.assertRaises(ValidationError,
+ cnx.execute, 'INSERT CWEType X: X name "CWUser"')
+ cnx.rollback()
+ self.assertRaises(ValidationError,
+ cnx.execute, 'INSERT CWRType X: X name "in_group"')
+
+ def test_validation_unique_constraint(self):
+ with self.admin_access.repo_cnx() as cnx:
+ with self.assertRaises(ValidationError) as cm:
+ cnx.execute('INSERT CWUser X: X login "admin", X upassword "admin"')
+ ex = cm.exception
+ ex.translate(text_type)
+ self.assertIsInstance(ex.entity, int)
+ self.assertEqual(ex.errors,
+ {'': u'some relations violate a unicity constraint',
+ 'login': u'login is part of violated unicity constraint'})
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_integrity.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_integrity.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,162 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""functional tests for integrity hooks"""
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+class CoreHooksTC(CubicWebTC):
+
+ def test_delete_internal_entities(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertRaises(ValidationError, cnx.execute,
+ 'DELETE CWEType X WHERE X name "CWEType"')
+ cnx.rollback()
+ self.assertRaises(ValidationError, cnx.execute,
+ 'DELETE CWRType X WHERE X name "relation_type"')
+ cnx.rollback()
+ self.assertRaises(ValidationError, cnx.execute,
+ 'DELETE CWGroup X WHERE X name "owners"')
+
+ def test_delete_required_relations_subject(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y '
+ 'WHERE Y name "users"')
+ cnx.commit()
+ cnx.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"')
+ self.assertRaises(ValidationError, cnx.commit)
+ cnx.rollback()
+ cnx.execute('DELETE X in_group Y WHERE X login "toto"')
+ cnx.execute('SET X in_group Y WHERE X login "toto", Y name "guests"')
+ cnx.commit()
+
+ def test_static_vocabulary_check(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertRaises(ValidationError,
+ cnx.execute,
+ 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", '
+ 'X relation_type RT, RT name "in_group"')
+
+ def test_missing_required_relations_subject_inline(self):
+ with self.admin_access.repo_cnx() as cnx:
+ # missing in_group relation
+ cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop"')
+ self.assertRaises(ValidationError, cnx.commit)
+
+ def test_composite_1(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+ cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, '
+ 'X content "this is a test"')
+ cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, '
+ 'X recipients Y, X parts P '
+ 'WHERE Y is EmailAddress, P is EmailPart')
+ self.assertTrue(cnx.execute('Email X WHERE X sender Y'))
+ cnx.commit()
+ cnx.execute('DELETE Email X')
+ rset = cnx.execute('Any X WHERE X is EmailPart')
+ self.assertEqual(len(rset), 0)
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X is EmailPart')
+ self.assertEqual(len(rset), 0)
+
+ def test_composite_2(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+ cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, '
+ 'X content "this is a test"')
+ cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, '
+ 'X recipients Y, X parts P '
+ 'WHERE Y is EmailAddress, P is EmailPart')
+ cnx.commit()
+ cnx.execute('DELETE Email X')
+ cnx.execute('DELETE EmailPart X')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X is EmailPart')
+ self.assertEqual(len(rset), 0)
+
+ def test_composite_redirection(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+ cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, '
+ 'X content "this is a test"')
+ cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, '
+ 'X recipients Y, X parts P '
+ 'WHERE Y is EmailAddress, P is EmailPart')
+ cnx.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, '
+ 'X recipients Y '
+ 'WHERE Y is EmailAddress')
+ cnx.commit()
+ cnx.execute('DELETE X parts Y WHERE X messageid "<1234>"')
+ cnx.execute('SET X parts Y WHERE X messageid "<2345>"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X is EmailPart')
+ self.assertEqual(len(rset), 1)
+ self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>')
+
+ def test_composite_object_relation_deletion(self):
+ with self.admin_access.repo_cnx() as cnx:
+ root = cnx.create_entity('Folder', name=u'root')
+ a = cnx.create_entity('Folder', name=u'a', parent=root)
+ cnx.create_entity('Folder', name=u'b', parent=a)
+ cnx.create_entity('Folder', name=u'c', parent=root)
+ cnx.commit()
+ cnx.execute('DELETE Folder F WHERE F name "a"')
+ cnx.execute('DELETE F parent R WHERE R name "root"')
+ cnx.commit()
+ self.assertEqual([['root'], ['c']],
+ cnx.execute('Any NF WHERE F is Folder, F name NF').rows)
+ self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows)
+
+ def test_composite_subject_relation_deletion(self):
+ with self.admin_access.repo_cnx() as cnx:
+ root = cnx.create_entity('Folder', name=u'root')
+ a = cnx.create_entity('Folder', name=u'a')
+ b = cnx.create_entity('Folder', name=u'b')
+ c = cnx.create_entity('Folder', name=u'c')
+ root.cw_set(children=(a, c))
+ a.cw_set(children=b)
+ cnx.commit()
+ cnx.execute('DELETE Folder F WHERE F name "a"')
+ cnx.execute('DELETE R children F WHERE R name "root"')
+ cnx.commit()
+ self.assertEqual([['root'], ['c']],
+ cnx.execute('Any NF WHERE F is Folder, F name NF').rows)
+ self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows)
+
+ def test_unsatisfied_constraints(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0]
+ with self.assertRaises(ValidationError) as cm:
+ cnx.commit()
+ self.assertEqual(cm.exception.errors,
+ {'in_group-object': u'RQLConstraint NOT O name "owners" failed'})
+
+ def test_unique_constraint(self):
+ with self.admin_access.repo_cnx() as cnx:
+ entity = cnx.create_entity('CWGroup', name=u'trout')
+ cnx.commit()
+ self.assertRaises(ValidationError, cnx.create_entity, 'CWGroup', name=u'trout')
+ cnx.rollback()
+ cnx.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid})
+ cnx.commit()
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_notificationhooks.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_notificationhooks.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,39 @@
+# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""tests for notification hooks"""
+
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class NotificationHooksTC(CubicWebTC):
+
+ def test_entity_update(self):
+ """Check transaction_data['changes'] filled by "notifentityupdated" hook.
+ """
+ with self.admin_access.repo_cnx() as cnx:
+ root = cnx.create_entity('Folder', name=u'a')
+ cnx.commit()
+ root.cw_set(name=u'b')
+ self.assertIn('changes', cnx.transaction_data)
+ self.assertEqual(cnx.transaction_data['changes'],
+ {root.eid: set([('name', u'a', u'b')])})
+
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_security.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_security.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,56 @@
+# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.server import hook
+from cubicweb.predicates import is_instance
+
+
+class SecurityHooksTC(CubicWebTC):
+ def setup_database(self):
+ with self.admin_access.repo_cnx() as cnx:
+ self.add_eid = cnx.create_entity('EmailAddress',
+ address=u'hop@perdu.com',
+ reverse_use_email=cnx.user.eid).eid
+ cnx.commit()
+
+ def test_inlined_cw_edited_relation(self):
+ """modification of cw_edited to add an inlined relation shouldn't trigger a security error.
+
+ Test for https://www.cubicweb.org/ticket/5477315
+ """
+ sender = self.repo.schema['Email'].rdef('sender')
+ with self.temporary_permissions((sender, {'add': ()})):
+
+ class MyHook(hook.Hook):
+ __regid__ = 'test.pouet'
+ __select__ = hook.Hook.__select__ & is_instance('Email')
+ events = ('before_add_entity',)
+
+ def __call__(self):
+ self.entity.cw_edited['sender'] = self._cw.user.primary_email[0].eid
+
+ with self.temporary_appobjects(MyHook):
+ with self.admin_access.repo_cnx() as cnx:
+ email = cnx.create_entity('Email', messageid=u'1234')
+ cnx.commit()
+ self.assertEqual(email.sender[0].eid, self.add_eid)
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_synccomputed.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_synccomputed.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,146 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""unit tests for computed attributes/relations hooks"""
+
+from unittest import TestCase
+
+from yams.buildobjs import EntityType, String, Int, SubjectRelation
+
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.schema import build_schema_from_namespace
+
+
+class FormulaDependenciesMatrixTC(TestCase):
+
+ def simple_schema(self):
+ THISYEAR = 2014
+
+ class Person(EntityType):
+ name = String()
+ salary = Int()
+ birth_year = Int(required=True)
+ works_for = SubjectRelation('Company')
+ age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR)
+
+ class Company(EntityType):
+ name = String()
+ total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA')
+
+ schema = build_schema_from_namespace(vars().items())
+ return schema
+
+ def setUp(self):
+ from cubicweb.hooks.synccomputed import _FormulaDependenciesMatrix
+ self.schema = self.simple_schema()
+ self.dependencies = _FormulaDependenciesMatrix(self.schema)
+
+ def test_computed_attributes_by_etype(self):
+ comp_by_etype = self.dependencies.computed_attribute_by_etype
+ self.assertEqual(len(comp_by_etype), 2)
+ values = comp_by_etype['Person']
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'age')
+ values = comp_by_etype['Company']
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'total_salary')
+
+ def test_computed_attribute_by_relation(self):
+ comp_by_rdef = self.dependencies.computed_attribute_by_relation
+ self.assertEqual(len(comp_by_rdef), 1)
+ key, values = next(iter(comp_by_rdef.items()))
+ self.assertEqual(key.rtype, 'works_for')
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'total_salary')
+
+ def test_computed_attribute_by_etype_attrs(self):
+ comp_by_attr = self.dependencies.computed_attribute_by_etype_attrs
+ self.assertEqual(len(comp_by_attr), 1)
+ values = comp_by_attr['Person']
+ self.assertEqual(len(values), 2)
+ values = set((rdef.formula, tuple(v))
+ for rdef, v in values.items())
+ self.assertEquals(values,
+ set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))),
+ ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',)))))
+ )
+
+
+class ComputedAttributeTC(CubicWebTC):
+ appid = 'data-computed'
+
+ def setup_entities(self, req):
+ self.societe = req.create_entity('Societe', nom=u'Foo')
+ req.create_entity('Person', name=u'Titi', salaire=1000,
+ travaille=self.societe, birth_year=2001)
+ self.tata = req.create_entity('Person', name=u'Tata', salaire=2000,
+ travaille=self.societe, birth_year=1990)
+
+
+ def test_update_on_add_remove_relation(self):
+ """check the rewriting of a computed attribute"""
+ with self.admin_access.web_request() as req:
+ self.setup_entities(req)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+ # Add relation.
+ toto = req.create_entity('Person', name=u'Toto', salaire=1500,
+ travaille=self.societe, birth_year=1988)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 4500)
+ # Delete relation.
+ toto.cw_set(travaille=None)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+
+ def test_recompute_on_attribute_update(self):
+ """check the modification of an attribute triggers the update of the
+ computed attributes that depend on it"""
+ with self.admin_access.web_request() as req:
+ self.setup_entities(req)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+ # Update attribute.
+ self.tata.cw_set(salaire=1000)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 2000)
+
+ def test_init_on_entity_creation(self):
+ """check the computed attribute is initialized on entity creation"""
+ with self.admin_access.web_request() as req:
+ p = req.create_entity('Person', name=u'Tata', salaire=2000,
+ birth_year=1990)
+ req.cnx.commit()
+ rset = req.execute('Any A, X WHERE X age A, X name "Tata"')
+ self.assertEqual(rset[0][0], 2014 - 1990)
+
+
+ def test_recompute_on_ambiguous_relation(self):
+ # check we don't end up with TypeResolverException as in #4901163
+ with self.admin_access.client_cnx() as cnx:
+ societe = cnx.create_entity('Societe', nom=u'Foo')
+ cnx.create_entity('MirrorEntity', mirror_of=societe, extid=u'1')
+ cnx.commit()
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_syncschema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_syncschema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,406 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""cubicweb.server.hooks.syncschema unit and functional tests"""
+
+from yams.constraints import BoundaryConstraint
+
+from cubicweb import ValidationError, Binary
+from cubicweb.schema import META_RTYPES
+from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.server.sqlutils import SQL_PREFIX
+from cubicweb.devtools.repotest import schema_eids_idx
+
+
+def setUpModule():
+ startpgcluster(__file__)
+
+
+def tearDownModule(*args):
+ stoppgcluster(__file__)
+ del SchemaModificationHooksTC.schema_eids
+
+
+class SchemaModificationHooksTC(CubicWebTC):
+ configcls = PostgresApptestConfiguration
+
+ def setUp(self):
+ super(SchemaModificationHooksTC, self).setUp()
+ self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False)
+ self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
+
+ def index_exists(self, cnx, etype, attr, unique=False):
+ dbhelper = self.repo.system_source.dbhelper
+ sqlcursor = cnx.cnxset.cu
+ return dbhelper.index_exists(sqlcursor,
+ SQL_PREFIX + etype,
+ SQL_PREFIX + attr,
+ unique=unique)
+
+ def _set_perms(self, cnx, eid):
+ cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
+ {'x': eid})
+ cnx.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, '
+ 'G name "managers"', {'x': eid})
+ cnx.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, '
+ 'G name "owners"', {'x': eid})
+
+ def _set_attr_perms(self, cnx, eid):
+ cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
+ {'x': eid})
+ cnx.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"',
+ {'x': eid})
+
+ def test_base(self):
+ with self.admin_access.repo_cnx() as cnx:
+ schema = self.repo.schema
+ self.assertFalse(schema.has_entity('Societe2'))
+ self.assertFalse(schema.has_entity('concerne2'))
+ # schema should be update on insertion (after commit)
+ eeid = cnx.execute('INSERT CWEType X: X name "Societe2", '
+ 'X description "", X final FALSE')[0][0]
+ self._set_perms(cnx, eeid)
+ cnx.execute('INSERT CWRType X: X name "concerne2", X description "", '
+ 'X final FALSE, X symmetric FALSE')
+ self.assertFalse(schema.has_entity('Societe2'))
+ self.assertFalse(schema.has_entity('concerne2'))
+ # have to commit before adding definition relations
+ cnx.commit()
+ self.assertTrue(schema.has_entity('Societe2'))
+ self.assertTrue(schema.has_relation('concerne2'))
+ attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", '
+ 'X defaultval %(default)s, X indexed TRUE, '
+ 'X relation_type RT, X from_entity E, X to_entity F '
+ 'WHERE RT name "name", E name "Societe2", '
+ 'F name "String"',
+ {'default': Binary.zpickle('noname')})[0][0]
+ self._set_attr_perms(cnx, attreid)
+ concerne2_rdef_eid = cnx.execute(
+ 'INSERT CWRelation X: X cardinality "**", X relation_type RT, '
+ 'X from_entity E, X to_entity E '
+ 'WHERE RT name "concerne2", E name "Societe2"')[0][0]
+ self._set_perms(cnx, concerne2_rdef_eid)
+ self.assertNotIn('name', schema['Societe2'].subject_relations())
+ self.assertNotIn('concerne2', schema['Societe2'].subject_relations())
+ self.assertFalse(self.index_exists(cnx, 'Societe2', 'name'))
+ cnx.commit()
+ self.assertIn('name', schema['Societe2'].subject_relations())
+ self.assertIn('concerne2', schema['Societe2'].subject_relations())
+ self.assertTrue(self.index_exists(cnx, 'Societe2', 'name'))
+ # now we should be able to insert and query Societe2
+ s2eid = cnx.execute('INSERT Societe2 X: X name "logilab"')[0][0]
+ cnx.execute('Societe2 X WHERE X name "logilab"')
+ cnx.execute('SET X concerne2 X WHERE X name "logilab"')
+ rset = cnx.execute('Any X WHERE X concerne2 Y')
+ self.assertEqual(rset.rows, [[s2eid]])
+ # check that when a relation definition is deleted, existing relations are deleted
+ rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, '
+ ' X from_entity E, X to_entity E '
+ 'WHERE RT name "concerne2", E name "CWUser"')[0][0]
+ self._set_perms(cnx, rdefeid)
+ cnx.commit()
+ cnx.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid})
+ cnx.commit()
+ self.assertIn('concerne2', schema['CWUser'].subject_relations())
+ self.assertNotIn('concerne2', schema['Societe2'].subject_relations())
+ self.assertFalse(cnx.execute('Any X WHERE X concerne2 Y'))
+ # schema should be cleaned on delete (after commit)
+ cnx.execute('DELETE CWEType X WHERE X name "Societe2"')
+ cnx.execute('DELETE CWRType X WHERE X name "concerne2"')
+ self.assertTrue(self.index_exists(cnx, 'Societe2', 'name'))
+ self.assertTrue(schema.has_entity('Societe2'))
+ self.assertTrue(schema.has_relation('concerne2'))
+ cnx.commit()
+ self.assertFalse(self.index_exists(cnx, 'Societe2', 'name'))
+ self.assertFalse(schema.has_entity('Societe2'))
+ self.assertFalse(schema.has_entity('concerne2'))
+ self.assertNotIn('concerne2', schema['CWUser'].subject_relations())
+
+ def test_metartype_with_nordefs(self):
+ with self.admin_access.repo_cnx() as cnx:
+ META_RTYPES.add('custom_meta')
+ cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", '
+ 'X final FALSE, X symmetric FALSE')
+ cnx.commit()
+ eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", '
+ 'X description "", X final FALSE')[0][0]
+ self._set_perms(cnx, eeid)
+ cnx.commit()
+ META_RTYPES.remove('custom_meta')
+
+ def test_metartype_with_somerdefs(self):
+ with self.admin_access.repo_cnx() as cnx:
+ META_RTYPES.add('custom_meta')
+ cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", '
+ 'X final FALSE, X symmetric FALSE')
+ cnx.commit()
+ rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, '
+ ' X from_entity E, X to_entity E '
+ 'WHERE RT name "custom_meta", E name "CWUser"')[0][0]
+ self._set_perms(cnx, rdefeid)
+ cnx.commit()
+ eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", '
+ 'X description "", X final FALSE')[0][0]
+ self._set_perms(cnx, eeid)
+ cnx.commit()
+ META_RTYPES.remove('custom_meta')
+
+ def test_is_instance_of_insertions(self):
+ with self.admin_access.repo_cnx() as cnx:
+ seid = cnx.execute('INSERT Transition T: T name "subdiv"')[0][0]
+ is_etypes = [etype for etype, in cnx.execute('Any ETN WHERE X eid %s, '
+ 'X is ET, ET name ETN' % seid)]
+ self.assertEqual(is_etypes, ['Transition'])
+ instanceof_etypes = [etype
+ for etype, in cnx.execute('Any ETN WHERE X eid %s, '
+ 'X is_instance_of ET, ET name ETN'
+ % seid)]
+ self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition'])
+ snames = [name for name, in cnx.execute('Any N WHERE S is BaseTransition, S name N')]
+ self.assertNotIn('subdiv', snames)
+ snames = [name for name, in cnx.execute('Any N WHERE S is_instance_of BaseTransition, '
+ 'S name N')]
+ self.assertIn('subdiv', snames)
+
+ def test_perms_synchronization_1(self):
+ with self.admin_access.repo_cnx() as cnx:
+ schema = self.repo.schema
+ self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users')))
+ self.assertTrue(cnx.execute('Any X, Y WHERE X is CWEType, X name "CWUser", '
+ 'Y is CWGroup, Y name "users"')[0])
+ cnx.execute('DELETE X read_permission Y '
+ 'WHERE X is CWEType, X name "CWUser", Y name "users"')
+ self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', )))
+ cnx.commit()
+ self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',)))
+ cnx.execute('SET X read_permission Y WHERE X is CWEType, '
+ 'X name "CWUser", Y name "users"')
+ cnx.commit()
+ self.assertEqual(schema['CWUser'].get_groups('read'),
+ set(('managers', 'users',)))
+
+ def test_perms_synchronization_2(self):
+ with self.admin_access.repo_cnx() as cnx:
+ schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')]
+ self.assertEqual(schema.get_groups('read'),
+ set(('managers', 'users', 'guests')))
+ cnx.execute('DELETE X read_permission Y WHERE X relation_type RT, '
+ 'RT name "in_group", Y name "guests"')
+ self.assertEqual(schema.get_groups('read'),
+ set(('managers', 'users', 'guests')))
+ cnx.commit()
+ self.assertEqual(schema.get_groups('read'),
+ set(('managers', 'users')))
+ cnx.execute('SET X read_permission Y WHERE X relation_type RT, '
+ 'RT name "in_group", Y name "guests"')
+ self.assertEqual(schema.get_groups('read'),
+ set(('managers', 'users')))
+ cnx.commit()
+ self.assertEqual(schema.get_groups('read'),
+ set(('managers', 'users', 'guests')))
+
+ def test_nonregr_user_edit_itself(self):
+ with self.admin_access.repo_cnx() as cnx:
+ ueid = cnx.user.eid
+ groupeids = [eid for eid, in cnx.execute('CWGroup G WHERE G name '
+ 'in ("managers", "users")')]
+ cnx.execute('DELETE X in_group Y WHERE X eid %s' % ueid)
+ cnx.execute('SET X surname "toto" WHERE X eid %s' % ueid)
+ cnx.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid)
+ cnx.commit()
+ eeid = cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0]
+ cnx.execute('DELETE X read_permission Y WHERE X eid %s' % eeid)
+ cnx.execute('SET X final FALSE WHERE X eid %s' % eeid)
+ cnx.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)'
+ % (eeid, groupeids[0], groupeids[1]))
+ cnx.commit()
+ cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')
+
+ # schema modification hooks tests #########################################
+
+ def test_uninline_relation(self):
+ with self.admin_access.repo_cnx() as cnx:
+ try:
+ self.assertTrue(self.schema['state_of'].inlined)
+ cnx.execute('SET X inlined FALSE WHERE X name "state_of"')
+ self.assertTrue(self.schema['state_of'].inlined)
+ cnx.commit()
+ self.assertFalse(self.schema['state_of'].inlined)
+ self.assertFalse(self.index_exists(cnx, 'State', 'state_of'))
+ rset = cnx.execute('Any X, Y WHERE X state_of Y')
+ self.assertEqual(len(rset), 2) # user states
+ finally:
+ cnx.execute('SET X inlined TRUE WHERE X name "state_of"')
+ self.assertFalse(self.schema['state_of'].inlined)
+ cnx.commit()
+ self.assertTrue(self.schema['state_of'].inlined)
+ self.assertTrue(self.index_exists(cnx, 'State', 'state_of'))
+ rset = cnx.execute('Any X, Y WHERE X state_of Y')
+ self.assertEqual(len(rset), 2)
+
+ def test_indexed_change(self):
+ with self.admin_access.repo_cnx() as cnx:
+ try:
+ cnx.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"')
+ self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed)
+ self.assertTrue(self.index_exists(cnx, 'Workflow', 'name'))
+ cnx.commit()
+ self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed)
+ self.assertFalse(self.index_exists(cnx, 'Workflow', 'name'))
+ finally:
+ cnx.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"')
+ self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed)
+ self.assertFalse(self.index_exists(cnx, 'Workflow', 'name'))
+ cnx.commit()
+ self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed)
+ self.assertTrue(self.index_exists(cnx, 'Workflow', 'name'))
+
+ def test_unique_change(self):
+ with self.admin_access.repo_cnx() as cnx:
+ try:
+ eid = cnx.execute('INSERT CWConstraint X: X cstrtype CT, X value "{}", '
+ ' DEF constrained_by X '
+ 'WHERE CT name "UniqueConstraint", DEF relation_type RT, '
+ 'DEF from_entity E, RT name "name", '
+ 'E name "Workflow"').rows[0][0]
+ self.assertFalse(self.schema['Workflow'].has_unique_values('name'))
+ self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True))
+ cnx.commit()
+ self.assertTrue(self.schema['Workflow'].has_unique_values('name'))
+ self.assertTrue(self.index_exists(cnx, 'Workflow', 'name', unique=True))
+ finally:
+ cnx.execute('DELETE CWConstraint C WHERE C eid %(eid)s', {'eid': eid})
+ cnx.commit()
+ self.assertFalse(self.schema['Workflow'].has_unique_values('name'))
+ self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True))
+
+ def test_required_change_1(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('SET DEF cardinality "?1" '
+ 'WHERE DEF relation_type RT, DEF from_entity E,'
+ 'RT name "title", E name "Bookmark"')
+ cnx.commit()
+ # should now be able to add bookmark without title
+ cnx.execute('INSERT Bookmark X: X path "/view"')
+ cnx.commit()
+
+ def test_required_change_2(self):
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('SET DEF cardinality "11" '
+ 'WHERE DEF relation_type RT, DEF from_entity E,'
+ 'RT name "surname", E name "CWUser"')
+ cnx.execute('SET U surname "Doe" WHERE U surname NULL')
+ cnx.commit()
+ # should not be able anymore to add cwuser without surname
+ self.assertRaises(ValidationError, self.create_user, cnx, "toto")
+ cnx.rollback()
+ cnx.execute('SET DEF cardinality "?1" '
+ 'WHERE DEF relation_type RT, DEF from_entity E,'
+ 'RT name "surname", E name "CWUser"')
+ cnx.commit()
+
+ def test_add_attribute_to_base_class(self):
+ with self.admin_access.repo_cnx() as cnx:
+ attreid = cnx.execute(
+ 'INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, '
+ 'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
+ 'WHERE RT name "messageid", E name "BaseTransition", F name "String"',
+ {'default': Binary.zpickle('noname')})[0][0]
+ assert cnx.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"',
+ {'x': attreid})
+ cnx.commit()
+ self.schema.rebuild_infered_relations()
+ self.assertIn('Transition', self.schema['messageid'].subjects())
+ self.assertIn('WorkflowTransition', self.schema['messageid'].subjects())
+ cnx.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"')
+
+ def test_change_fulltextindexed(self):
+ with self.admin_access.repo_cnx() as cnx:
+ target = cnx.create_entity(u'Email', messageid=u'1234',
+ subject=u'rick.roll@dance.com')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertIn(target.eid, [item[0] for item in rset])
+ assert cnx.execute('SET A fulltextindexed FALSE '
+ 'WHERE E is CWEType, E name "Email", A is CWAttribute,'
+ 'A from_entity E, A relation_type R, R name "subject"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertFalse(rset)
+ assert cnx.execute('SET A fulltextindexed TRUE '
+ 'WHERE A from_entity E, A relation_type R, '
+ 'E name "Email", R name "subject"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertIn(target.eid, [item[0] for item in rset])
+
+ def test_change_fulltext_container(self):
+ with self.admin_access.repo_cnx() as cnx:
+ target = cnx.create_entity(u'EmailAddress', address=u'rick.roll@dance.com')
+ target.cw_set(reverse_use_email=cnx.user)
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertIn(cnx.user.eid, [item[0] for item in rset])
+ assert cnx.execute('SET R fulltext_container NULL '
+ 'WHERE R name "use_email"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertIn(target.eid, [item[0] for item in rset])
+ assert cnx.execute('SET R fulltext_container "subject" '
+ 'WHERE R name "use_email"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
+ self.assertIn(cnx.user.eid, [item[0] for item in rset])
+
+ def test_update_constraint(self):
+ with self.admin_access.repo_cnx() as cnx:
+ rdef = self.schema['Transition'].rdef('type')
+ cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
+ cnx.execute('SET X value %(v)s WHERE X eid %(x)s',
+ {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"})
+ cnx.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, '
+ 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
+ {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid})
+ cnx.commit()
+ cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
+ self.assertEqual(cstr.values, (u'normal', u'auto', u'new'))
+ cnx.execute('INSERT Transition T: T name "hop", T type "new"')
+
+ def test_add_constraint(self):
+ with self.admin_access.repo_cnx() as cnx:
+ rdef = self.schema['EmailPart'].rdef('ordernum')
+ cstr = BoundaryConstraint('>=', 0)
+ cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, '
+ 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
+ {'ct': cstr.__class__.__name__, 'v': cstr.serialize(), 'x': rdef.eid})
+ cnx.commit()
+ cstr2 = rdef.constraint_by_type('BoundaryConstraint')
+ self.assertEqual(cstr, cstr2)
+ cstr3 = BoundaryConstraint('<=', 1000)
+ cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, '
+ 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
+ {'ct': cstr3.__class__.__name__, 'v': cstr3.serialize(), 'x': rdef.eid})
+ cnx.commit()
+ # Do not use assertCountEqual as it does "strange" equality
+ # comparison on Python 2.
+ self.assertEqual(set(rdef.constraints), set([cstr, cstr3]))
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/test/unittest_syncsession.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_syncsession.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""functional tests for core hooks
+
+Note:
+ syncschema.py hooks are mostly tested in server/test/unittest_migrations.py
+"""
+
+from six import text_type
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class CWPropertyHooksTC(CubicWebTC):
+
+ def test_unexistant_cwproperty(self):
+ with self.admin_access.web_request() as req:
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "bla.bla", '
+ 'X value "hop", X for_user U')
+ cm.exception.translate(text_type)
+ self.assertEqual(cm.exception.errors,
+ {'pkey-subject': 'unknown property key bla.bla'})
+
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"')
+ cm.exception.translate(text_type)
+ self.assertEqual(cm.exception.errors,
+ {'pkey-subject': 'unknown property key bla.bla'})
+
+ def test_site_wide_cwproperty(self):
+ with self.admin_access.web_request() as req:
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "ui.site-title", '
+ 'X value "hop", X for_user U')
+ self.assertEqual(cm.exception.errors,
+ {'for_user-subject': "site-wide property can't be set for user"})
+
+ def test_system_cwproperty(self):
+ with self.admin_access.web_request() as req:
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", '
+ 'X value "hop", X for_user U')
+ self.assertEqual(cm.exception.errors,
+ {'for_user-subject': "site-wide property can't be set for user"})
+
+ def test_bad_type_cwproperty(self):
+ with self.admin_access.web_request() as req:
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "ui.language", '
+ 'X value "hop", X for_user U')
+ self.assertEqual(cm.exception.errors,
+ {'value-subject': u'unauthorized value'})
+ with self.assertRaises(ValidationError) as cm:
+ req.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"')
+ self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'})
+
+ def test_vreg_propertyvalues_update(self):
+ self.vreg.register_property(
+ 'test.int', type='Int', help='', sitewide=True)
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT CWProperty X: X pkey "test.int", X value "42"')
+ cnx.commit()
+ self.assertEqual(self.vreg.property_value('test.int'), 42)
+
+ def test_sync_user_props(self):
+ with self.admin_access.client_cnx() as cnx:
+ self.assertNotIn('ui.language', cnx.user.properties)
+ cnx.user.set_property(u'ui.language', u'fr')
+ self.assertNotIn('ui.language', cnx.user.properties)
+ cnx.commit()
+ self.assertEqual(cnx.user.properties['ui.language'], 'fr')
+ cnx.user.set_property(u'ui.language', u'en')
+ self.assertEqual(cnx.user.properties['ui.language'], 'fr')
+ cnx.commit()
+ self.assertEqual(cnx.user.properties['ui.language'], 'en')
+ cnx.execute('DELETE CWProperty X WHERE X for_user U, U eid %(u)s',
+ {'u': cnx.user.eid})
+ self.assertEqual(cnx.user.properties['ui.language'], 'en')
+ cnx.commit()
+ self.assertNotIn('ui.language', cnx.user.properties)
+
+ def test_sync_sitewide_props(self):
+ with self.admin_access.client_cnx() as cnx:
+ self.assertNotIn('ui.language', cnx.vreg['propertyvalues'])
+ cwprop = cnx.create_entity('CWProperty', pkey=u'ui.language', value=u'fr')
+ self.assertNotIn('ui.language', cnx.vreg['propertyvalues'])
+ cnx.commit()
+ self.assertEqual(cnx.vreg['propertyvalues']['ui.language'], 'fr')
+ cwprop.cw_set(value=u'en')
+ self.assertEqual(cnx.vreg['propertyvalues']['ui.language'], 'fr')
+ cnx.commit()
+ self.assertEqual(cnx.vreg['propertyvalues']['ui.language'], 'en')
+ cwprop.cw_delete()
+ self.assertEqual(cnx.vreg['propertyvalues']['ui.language'], 'en')
+ cnx.commit()
+ self.assertNotIn('ui.language', cnx.vreg['propertyvalues'])
+
+
+class UserGroupsSyncTC(CubicWebTC):
+
+ def test_sync_groups(self):
+ with self.admin_access.client_cnx() as cnx:
+ cnx.execute('SET U in_group G WHERE G name "users", U eid %(u)s',
+ {'u': cnx.user.eid})
+ self.assertEqual(cnx.user.groups, set(['managers']))
+ cnx.commit()
+ self.assertEqual(cnx.user.groups, set(['managers', 'users']))
+ cnx.execute('DELETE U in_group G WHERE G name "users", U eid %(u)s',
+ {'u': cnx.user.eid})
+ self.assertEqual(cnx.user.groups, set(['managers', 'users']))
+ cnx.commit()
+ self.assertEqual(cnx.user.groups, set(['managers']))
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/workflow.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/workflow.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,357 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Core hooks: workflow related hooks"""
+
+__docformat__ = "restructuredtext en"
+from cubicweb import _
+
+from datetime import datetime
+
+
+from cubicweb import RepositoryError, validation_error
+from cubicweb.predicates import is_instance, adaptable
+from cubicweb.server import hook
+
+
+def _change_state(cnx, x, oldstate, newstate):
+ nocheck = cnx.transaction_data.setdefault('skip-security', set())
+ nocheck.add((x, 'in_state', oldstate))
+ nocheck.add((x, 'in_state', newstate))
+ # delete previous state first
+ cnx.delete_relation(x, 'in_state', oldstate)
+ cnx.add_relation(x, 'in_state', newstate)
+
+
+# operations ###################################################################
+
+class _SetInitialStateOp(hook.Operation):
+ """make initial state be a default state"""
+ eid = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ entity = cnx.entity_from_eid(self.eid)
+ iworkflowable = entity.cw_adapt_to('IWorkflowable')
+ # if there is an initial state and the entity's state is not set,
+ # use the initial state as a default state
+ if not (cnx.deleted_in_transaction(entity.eid) or entity.in_state) \
+ and iworkflowable.current_workflow:
+ state = iworkflowable.current_workflow.initial
+ if state:
+ cnx.add_relation(self.eid, 'in_state', state.eid)
+ _FireAutotransitionOp(cnx, eid=self.eid)
+
+class _FireAutotransitionOp(hook.Operation):
+ """try to fire auto transition after state changes"""
+ eid = None # make pylint happy
+
+ def precommit_event(self):
+ entity = self.cnx.entity_from_eid(self.eid)
+ iworkflowable = entity.cw_adapt_to('IWorkflowable')
+ autotrs = list(iworkflowable.possible_transitions('auto'))
+ if autotrs:
+ assert len(autotrs) == 1
+ iworkflowable.fire_transition(autotrs[0])
+
+
+class _WorkflowChangedOp(hook.Operation):
+ """fix entity current state when changing its workflow"""
+ eid = wfeid = None # make pylint happy
+
+ def precommit_event(self):
+ # notice that enforcement that new workflow apply to the entity's type is
+ # done by schema rule, no need to check it here
+ cnx = self.cnx
+ pendingeids = cnx.transaction_data.get('pendingeids', ())
+ if self.eid in pendingeids:
+ return
+ entity = cnx.entity_from_eid(self.eid)
+ iworkflowable = entity.cw_adapt_to('IWorkflowable')
+ # check custom workflow has not been rechanged to another one in the same
+ # transaction
+ mainwf = iworkflowable.main_workflow
+ if mainwf.eid == self.wfeid:
+ deststate = mainwf.initial
+ if not deststate:
+ msg = _('workflow has no initial state')
+ raise validation_error(entity, {('custom_workflow', 'subject'): msg})
+ if mainwf.state_by_eid(iworkflowable.current_state.eid):
+ # nothing to do
+ return
+ # if there are no history, simply go to new workflow's initial state
+ if not iworkflowable.workflow_history:
+ if iworkflowable.current_state.eid != deststate.eid:
+ _change_state(cnx, entity.eid,
+ iworkflowable.current_state.eid, deststate.eid)
+ _FireAutotransitionOp(cnx, eid=entity.eid)
+ return
+ msg = cnx._('workflow changed to "%s"')
+ msg %= cnx._(mainwf.name)
+ cnx.transaction_data[(entity.eid, 'customwf')] = self.wfeid
+ iworkflowable.change_state(deststate, msg, u'text/plain')
+
+
+class _CheckTrExitPoint(hook.Operation):
+ treid = None # make pylint happy
+
+ def precommit_event(self):
+ tr = self.cnx.entity_from_eid(self.treid)
+ outputs = set()
+ for ep in tr.subworkflow_exit:
+ if ep.subwf_state.eid in outputs:
+ msg = _("can't have multiple exits on the same state")
+ raise validation_error(self.treid, {('subworkflow_exit', 'subject'): msg})
+ outputs.add(ep.subwf_state.eid)
+
+
+class _SubWorkflowExitOp(hook.Operation):
+ foreid = trinfo = None # make pylint happy
+
+ def precommit_event(self):
+ cnx = self.cnx
+ forentity = cnx.entity_from_eid(self.foreid)
+ iworkflowable = forentity.cw_adapt_to('IWorkflowable')
+ trinfo = self.trinfo
+ # we're in a subworkflow, check if we've reached an exit point
+ wftr = iworkflowable.subworkflow_input_transition()
+ if wftr is None:
+ # inconsistency detected
+ msg = _("state doesn't belong to entity's current workflow")
+ raise validation_error(self.trinfo, {('to_state', 'subject'): msg})
+ tostate = wftr.get_exit_point(forentity, trinfo.cw_attr_cache['to_state'])
+ if tostate is not None:
+ # reached an exit point
+ msg = _('exiting from subworkflow %s')
+ msg %= cnx._(iworkflowable.current_workflow.name)
+ cnx.transaction_data[(forentity.eid, 'subwfentrytr')] = True
+ iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr)
+
+
+# hooks ########################################################################
+
+class WorkflowHook(hook.Hook):
+ __abstract__ = True
+ category = 'metadata'
+
+
+class SetInitialStateHook(WorkflowHook):
+ __regid__ = 'wfsetinitial'
+ __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ _SetInitialStateOp(self._cw, eid=self.entity.eid)
+
+
+class FireTransitionHook(WorkflowHook):
+ """check the transition is allowed and add missing information into the
+ TrInfo entity.
+
+ Expect that:
+ * wf_info_for inlined relation is set
+ * by_transition or to_state (managers only) inlined relation is set
+
+ Check for automatic transition to be fired at the end
+ """
+ __regid__ = 'wffiretransition'
+ __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
+ events = ('before_add_entity',)
+
+ def __call__(self):
+ cnx = self._cw
+ entity = self.entity
+ # first retreive entity to which the state change apply
+ try:
+ foreid = entity.cw_attr_cache['wf_info_for']
+ except KeyError:
+ msg = _('mandatory relation')
+ raise validation_error(entity, {('wf_info_for', 'subject'): msg})
+ forentity = cnx.entity_from_eid(foreid)
+ # see comment in the TrInfo entity definition
+ entity.cw_edited['tr_count']=len(forentity.reverse_wf_info_for)
+ iworkflowable = forentity.cw_adapt_to('IWorkflowable')
+ # then check it has a workflow set, unless we're in the process of changing
+ # entity's workflow
+ if cnx.transaction_data.get((forentity.eid, 'customwf')):
+ wfeid = cnx.transaction_data[(forentity.eid, 'customwf')]
+ wf = cnx.entity_from_eid(wfeid)
+ else:
+ wf = iworkflowable.current_workflow
+ if wf is None:
+ msg = _('related entity has no workflow set')
+ raise validation_error(entity, {None: msg})
+ # then check it has a state set
+ fromstate = iworkflowable.current_state
+ if fromstate is None:
+ msg = _('related entity has no state')
+ raise validation_error(entity, {None: msg})
+ # True if we are coming back from subworkflow
+ swtr = cnx.transaction_data.pop((forentity.eid, 'subwfentrytr'), None)
+ cowpowers = (cnx.user.is_in_group('managers')
+ or not cnx.write_security)
+ # no investigate the requested state change...
+ try:
+ treid = entity.cw_attr_cache['by_transition']
+ except KeyError:
+ # no transition set, check user is a manager and destination state
+ # is specified (and valid)
+ if not cowpowers:
+ msg = _('mandatory relation')
+ raise validation_error(entity, {('by_transition', 'subject'): msg})
+ deststateeid = entity.cw_attr_cache.get('to_state')
+ if not deststateeid:
+ msg = _('mandatory relation')
+ raise validation_error(entity, {('by_transition', 'subject'): msg})
+ deststate = wf.state_by_eid(deststateeid)
+ if deststate is None:
+ msg = _("state doesn't belong to entity's workflow")
+ raise validation_error(entity, {('to_state', 'subject'): msg})
+ else:
+ # check transition is valid and allowed, unless we're coming back
+ # from subworkflow
+ tr = cnx.entity_from_eid(treid)
+ if swtr is None:
+ qname = ('by_transition', 'subject')
+ if tr is None:
+ msg = _("transition doesn't belong to entity's workflow")
+ raise validation_error(entity, {qname: msg})
+ if not tr.has_input_state(fromstate):
+ msg = _("transition %(tr)s isn't allowed from %(st)s")
+ raise validation_error(entity, {qname: msg}, {
+ 'tr': tr.name, 'st': fromstate.name}, ['tr', 'st'])
+ if not tr.may_be_fired(foreid):
+ msg = _("transition may not be fired")
+ raise validation_error(entity, {qname: msg})
+ deststateeid = entity.cw_attr_cache.get('to_state')
+ if deststateeid is not None:
+ if not cowpowers and deststateeid != tr.destination(forentity).eid:
+ msg = _("transition isn't allowed")
+ raise validation_error(entity, {('by_transition', 'subject'): msg})
+ if swtr is None:
+ deststate = cnx.entity_from_eid(deststateeid)
+ if not cowpowers and deststate is None:
+ msg = _("state doesn't belong to entity's workflow")
+ raise validation_error(entity, {('to_state', 'subject'): msg})
+ else:
+ deststateeid = tr.destination(forentity).eid
+ # everything is ok, add missing information on the trinfo entity
+ entity.cw_edited['from_state'] = fromstate.eid
+ entity.cw_edited['to_state'] = deststateeid
+ nocheck = cnx.transaction_data.setdefault('skip-security', set())
+ nocheck.add((entity.eid, 'from_state', fromstate.eid))
+ nocheck.add((entity.eid, 'to_state', deststateeid))
+ _FireAutotransitionOp(cnx, eid=forentity.eid)
+
+
+class FiredTransitionHook(WorkflowHook):
+ """change related entity state and handle exit of subworkflow"""
+ __regid__ = 'wffiretransition'
+ __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ trinfo = self.entity
+ rcache = trinfo.cw_attr_cache
+ _change_state(self._cw, rcache['wf_info_for'], rcache['from_state'],
+ rcache['to_state'])
+ forentity = self._cw.entity_from_eid(rcache['wf_info_for'])
+ iworkflowable = forentity.cw_adapt_to('IWorkflowable')
+ assert iworkflowable.current_state.eid == rcache['to_state']
+ if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid:
+ _SubWorkflowExitOp(self._cw, foreid=forentity.eid, trinfo=trinfo)
+
+
+class CheckInStateChangeAllowed(WorkflowHook):
+ """check state apply, in case of direct in_state change using unsafe execute
+ """
+ __regid__ = 'wfcheckinstate'
+ __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
+ events = ('before_add_relation',)
+ category = 'integrity'
+
+ def __call__(self):
+ cnx = self._cw
+ nocheck = cnx.transaction_data.get('skip-security', ())
+ if (self.eidfrom, 'in_state', self.eidto) in nocheck:
+ # state changed through TrInfo insertion, so we already know it's ok
+ return
+ entity = cnx.entity_from_eid(self.eidfrom)
+ iworkflowable = entity.cw_adapt_to('IWorkflowable')
+ mainwf = iworkflowable.main_workflow
+ if mainwf is None:
+ msg = _('entity has no workflow set')
+ raise validation_error(entity, {None: msg})
+ for wf in mainwf.iter_workflows():
+ if wf.state_by_eid(self.eidto):
+ break
+ else:
+ msg = _("state doesn't belong to entity's workflow. You may "
+ "want to set a custom workflow for this entity first.")
+ raise validation_error(self.eidfrom, {('in_state', 'subject'): msg})
+ if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid:
+ msg = _("state doesn't belong to entity's current workflow")
+ raise validation_error(self.eidfrom, {('in_state', 'subject'): msg})
+
+
+class SetModificationDateOnStateChange(WorkflowHook):
+ """update entity's modification date after changing its state"""
+ __regid__ = 'wfsyncmdate'
+ __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ if self._cw.added_in_transaction(self.eidfrom):
+ # new entity, not needed
+ return
+ entity = self._cw.entity_from_eid(self.eidfrom)
+ try:
+ entity.cw_set(modification_date=datetime.utcnow())
+ except RepositoryError as ex:
+ # usually occurs if entity is coming from a read-only source
+ # (eg ldap user)
+ self.warning('cant change modification date for %s: %s', entity, ex)
+
+
+class CheckWorkflowTransitionExitPoint(WorkflowHook):
+ """check that there is no multiple exits from the same state"""
+ __regid__ = 'wfcheckwftrexit'
+ __select__ = WorkflowHook.__select__ & hook.match_rtype('subworkflow_exit')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ _CheckTrExitPoint(self._cw, treid=self.eidfrom)
+
+
+class SetCustomWorkflow(WorkflowHook):
+ __regid__ = 'wfsetcustom'
+ __select__ = WorkflowHook.__select__ & hook.match_rtype('custom_workflow')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=self.eidto)
+
+
+class DelCustomWorkflow(SetCustomWorkflow):
+ __regid__ = 'wfdelcustom'
+ events = ('after_delete_relation',)
+
+ def __call__(self):
+ entity = self._cw.entity_from_eid(self.eidfrom)
+ typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow()
+ if typewf is not None:
+ _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/hooks/zmq.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/zmq.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+from cubicweb.server import hook
+
+class ZMQStopHook(hook.Hook):
+ __regid__ = 'zmqstop'
+ events = ('server_shutdown',)
+
+ def __call__(self):
+ self.repo.app_instances_bus.stop()
+
+class ZMQStartHook(hook.Hook):
+ __regid__ = 'zmqstart'
+ events = ('server_startup',)
+ order = -1
+
+ def __call__(self):
+ config = self.repo.config
+ address_pub = config.get('zmq-address-pub')
+ address_sub = config.get('zmq-address-sub')
+ if not address_pub and not address_sub:
+ return
+ from cubicweb.server import cwzmq
+ self.repo.app_instances_bus = cwzmq.ZMQComm()
+ if address_pub:
+ self.repo.app_instances_bus.add_publisher(address_pub)
+ def clear_cache_callback(msg):
+ self.debug('clear_caches: %s', ' '.join(msg))
+ self.repo.clear_caches(msg[1:])
+ self.repo.app_instances_bus.add_subscription('delete', clear_cache_callback)
+ for address in address_sub:
+ self.repo.app_instances_bus.add_subscriber(address)
+ self.repo.app_instances_bus.start()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/i18n.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/i18n.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,117 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Some i18n/gettext utilities."""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import re
+import os
+from os.path import join, basename, splitext, exists
+from glob import glob
+
+from six import PY2
+
+from cubicweb.toolsutils import create_dir
+
+def extract_from_tal(files, output_file):
+ """extract i18n strings from tal and write them into the given output file
+ using standard python gettext marker (_)
+ """
+ output = open(output_file, 'w')
+ for filepath in files:
+ for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()):
+ output.write('_("%s")' % match.group(2))
+ output.close()
+
+
+def add_msg(w, msgid, msgctx=None):
+ """write an empty pot msgid definition"""
+ if PY2 and isinstance(msgid, unicode):
+ msgid = msgid.encode('utf-8')
+ if msgctx:
+ if PY2 and isinstance(msgctx, unicode):
+ msgctx = msgctx.encode('utf-8')
+ w('msgctxt "%s"\n' % msgctx)
+ msgid = msgid.replace('"', r'\"').splitlines()
+ if len(msgid) > 1:
+ w('msgid ""\n')
+ for line in msgid:
+ w('"%s"' % line.replace('"', r'\"'))
+ else:
+ w('msgid "%s"\n' % msgid[0])
+ w('msgstr ""\n\n')
+
+def execute2(args):
+ # XXX replace this with check_output in Python 2.7
+ from subprocess import Popen, PIPE, CalledProcessError
+ p = Popen(args, stdout=PIPE, stderr=PIPE)
+ out, err = p.communicate()
+ if p.returncode != 0:
+ exc = CalledProcessError(p.returncode, args[0])
+ exc.cmd = args
+ exc.data = (out, err)
+ raise exc
+
+def available_catalogs(i18ndir=None):
+ if i18ndir is None:
+ wildcard = '*.po'
+ else:
+ wildcard = join(i18ndir, '*.po')
+ for popath in glob(wildcard):
+ lang = splitext(basename(popath))[0]
+ yield lang, popath
+
+
+def compile_i18n_catalogs(sourcedirs, destdir, langs):
+ """generate .mo files for a set of languages into the `destdir` i18n directory
+ """
+ from subprocess import CalledProcessError
+ from logilab.common.fileutils import ensure_fs_mode
+ print('-> compiling message catalogs to %s' % destdir)
+ errors = []
+ for lang in langs:
+ langdir = join(destdir, lang, 'LC_MESSAGES')
+ if not exists(langdir):
+ create_dir(langdir)
+ pofiles = [join(path, '%s.po' % lang) for path in sourcedirs]
+ pofiles = [pof for pof in pofiles if exists(pof)]
+ mergedpo = join(destdir, '%s_merged.po' % lang)
+ try:
+ # merge instance/cubes messages catalogs with the stdlib's one
+ cmd = ['msgcat', '--use-first', '--sort-output', '--strict',
+ '-o', mergedpo] + pofiles
+ execute2(cmd)
+ # make sure the .mo file is writeable and compiles with *msgfmt*
+ applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo')
+ try:
+ ensure_fs_mode(applmo)
+ except OSError:
+ pass # suppose not exists
+ execute2(['msgfmt', mergedpo, '-o', applmo])
+ except CalledProcessError as exc:
+ errors.append(u'while handling language %s:\ncmd:\n%s\nstdout:\n%s\nstderr:\n%s\n' %
+ (lang, exc.cmd, repr(exc.data[0]), repr(exc.data[1])))
+ except Exception as exc:
+ errors.append(u'while handling language %s: %s' % (lang, exc))
+ try:
+ # clean everything
+ os.unlink(mergedpo)
+ except Exception:
+ continue
+ return errors
diff -r 1400aee10df4 -r faf279e33298 cubicweb/i18n/de.po
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/i18n/de.po Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4708 @@
+# cubicweb i18n catalog
+# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Logilab
+msgid ""
+msgstr ""
+"Project-Id-Version: 2.0\n"
+"POT-Creation-Date: 2006-01-12 17:35+CET\n"
+"PO-Revision-Date: 2010-09-15 14:55+0200\n"
+"Last-Translator: Dr. Leo \n"
+"Language-Team: English \n"
+"Language: de\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+#, python-format
+msgid ""
+"\n"
+"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for "
+"entity\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+msgstr ""
+"\n"
+"%(user)s hat den Zustand geändert von <%(previous_state)s> in <"
+"%(current_state)s> für die Entität\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+
+#, python-format
+msgid " from state %(fromstate)s to state %(tostate)s\n"
+msgstr " aus dem Zustand %(fromstate)s in den Zustand %(tostate)s\n"
+
+msgid " :"
+msgstr " :"
+
+#, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+
+#, python-format
+msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r"
+msgstr ""
+
+#, python-format
+msgid "%(KEY-rtype)s is part of violated unicity constraint"
+msgstr ""
+
+#, python-format
+msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression"
+msgstr ""
+
+#, python-format
+msgid "%(attr)s set to %(newvalue)s"
+msgstr "%(attr)s geändert in %(newvalue)s"
+
+#, python-format
+msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s"
+msgstr "%(attr)s geändert von %(oldvalue)s in %(newvalue)s"
+
+#, python-format
+msgid "%(etype)s by %(author)s"
+msgstr ""
+
+#, python-format
+msgid "%(firstname)s %(surname)s"
+msgstr "%(firstname)s %(surname)s"
+
+#, python-format
+msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+
+#, python-format
+msgid "%d days"
+msgstr "%d Tage"
+
+#, python-format
+msgid "%d hours"
+msgstr "%d Stunden"
+
+#, python-format
+msgid "%d minutes"
+msgstr "%d Minuten"
+
+#, python-format
+msgid "%d months"
+msgstr "%d Monate"
+
+#, python-format
+msgid "%d seconds"
+msgstr "%d Sekunden"
+
+#, python-format
+msgid "%d weeks"
+msgstr "%d Wochen"
+
+#, python-format
+msgid "%d years"
+msgstr "%d Jahre"
+
+#, python-format
+msgid "%s could be supported"
+msgstr ""
+
+#, python-format
+msgid "%s error report"
+msgstr "%s Fehlerbericht"
+
+#, python-format
+msgid "%s software version of the database"
+msgstr "Software-Version der Datenbank %s"
+
+#, python-format
+msgid "%s updated"
+msgstr "%s aktualisiert"
+
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr ""
+
+#, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+
+#, python-format
+msgid "'%s' action requires 'linkattr' option"
+msgstr ""
+
+msgid "(UNEXISTANT EID)"
+msgstr "(EID nicht gefunden)"
+
+#, python-format
+msgid "(suppressed) entity #%d"
+msgstr ""
+
+msgid "**"
+msgstr "0..n 0..n"
+
+msgid "*+"
+msgstr "0..n 1..n"
+
+msgid "*1"
+msgstr "0..n 1"
+
+msgid "*?"
+msgstr "0..n 0..1"
+
+msgid "+*"
+msgstr "1..n 0..n"
+
+msgid "++"
+msgstr "1..n 1..n"
+
+msgid "+1"
+msgstr "1..n 1"
+
+msgid "+?"
+msgstr "1..n 0..1"
+
+msgid "1*"
+msgstr "1 0..n"
+
+msgid "1+"
+msgstr "1 1..n"
+
+msgid "11"
+msgstr "1 1"
+
+msgid "1?"
+msgstr "1 0..1"
+
+#, python-format
+msgid "<%s not specified>"
+msgstr "<%s nicht spezifiziert>"
+
+#, python-format
+msgid ""
+"
Este esquema del modelo de datos no incluye los meta-datos, "
+"pero se puede ver a un modelo completo con meta-datos."
+"div>"
+
+msgid ""
+msgstr ""
+
+msgid ""
+msgstr ""
+
+msgid "?*"
+msgstr "0..1 0..n"
+
+msgid "?+"
+msgstr "0..1 1..n"
+
+msgid "?1"
+msgstr "0..1 1"
+
+msgid "??"
+msgstr "0..1 0..1"
+
+msgid "AND"
+msgstr "Y"
+
+msgid "About this site"
+msgstr "Información del Sistema"
+
+#, python-format
+msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s"
+msgstr "Relación agregada : %(entity_from)s %(rtype)s %(entity_to)s"
+
+msgid "Attributes permissions:"
+msgstr "Permisos de atributos:"
+
+# schema pot file, generated on 2009-09-16 16:46:55
+#
+# singular and plural forms for each entity type
+msgid "BaseTransition"
+msgstr "Transición (abstracta)"
+
+msgid "BaseTransition_plural"
+msgstr "Transiciones (abstractas)"
+
+msgid "BigInt"
+msgstr "Big integer"
+
+msgid "BigInt_plural"
+msgstr "Big integers"
+
+msgid "Bookmark"
+msgstr "Favorito"
+
+msgid "Bookmark_plural"
+msgstr "Favoritos"
+
+msgid "Boolean"
+msgstr "Booleano"
+
+msgid "Boolean_plural"
+msgstr "Booleanos"
+
+msgid "BoundConstraint"
+msgstr "Restricción de límite"
+
+msgid "BoundaryConstraint"
+msgstr "Restricción de límite"
+
+msgid "Browse by entity type"
+msgstr "Busca por tipo de entidad"
+
+#, python-format
+msgid "By %(user)s on %(dt)s [%(undo_link)s]"
+msgstr "Por %(user)s en %(dt)s [%(undo_link)s]"
+
+msgid "Bytes"
+msgstr "Bytes"
+
+msgid "Bytes_plural"
+msgstr "Bytes"
+
+msgid "CWAttribute"
+msgstr "Atributo"
+
+msgid "CWAttribute_plural"
+msgstr "Atributos"
+
+msgid "CWCache"
+msgstr "Cache"
+
+msgid "CWCache_plural"
+msgstr "Caches"
+
+msgid "CWComputedRType"
+msgstr ""
+
+msgid "CWComputedRType_plural"
+msgstr ""
+
+msgid "CWConstraint"
+msgstr "Restricción"
+
+msgid "CWConstraintType"
+msgstr "Tipo de Restricción"
+
+msgid "CWConstraintType_plural"
+msgstr "Tipos de Restricción"
+
+msgid "CWConstraint_plural"
+msgstr "Restricciones"
+
+msgid "CWDataImport"
+msgstr "Importación de Datos"
+
+msgid "CWDataImport_plural"
+msgstr "Importaciones de Datos"
+
+msgid "CWEType"
+msgstr "Tipo de entidad"
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "CWEType"
+msgstr "Tipo de entidad"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "CWEType"
+msgstr "Tipo de entidad"
+
+msgid "CWEType_plural"
+msgstr "Tipos de entidades"
+
+msgid "CWGroup"
+msgstr "Groupo"
+
+msgid "CWGroup_plural"
+msgstr "Grupos"
+
+msgid "CWProperty"
+msgstr "Propiedad"
+
+msgid "CWProperty_plural"
+msgstr "Propiedades"
+
+msgid "CWRType"
+msgstr "Tipo de relación"
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "CWRType"
+msgstr "Tipo de relación"
+
+msgid "CWRType_plural"
+msgstr "Tipos de relación"
+
+msgid "CWRelation"
+msgstr "Relación"
+
+msgid "CWRelation_plural"
+msgstr "Relaciones"
+
+msgid "CWSource"
+msgstr "Fuente de datos"
+
+msgid "CWSourceHostConfig"
+msgstr "Configuración de Fuente"
+
+msgid "CWSourceHostConfig_plural"
+msgstr "Configuraciones de fuente"
+
+msgid "CWSourceSchemaConfig"
+msgstr "Configuraciones de Esquema de Fuente"
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr "Configuraciones de Esquema de Fuente"
+
+msgid "CWSource_plural"
+msgstr "Fuentes de Datos"
+
+msgid "CWUniqueTogetherConstraint"
+msgstr "Restricción de Singularidad"
+
+msgid "CWUniqueTogetherConstraint_plural"
+msgstr "Restricciones de Singularidad"
+
+msgid "CWUser"
+msgstr "Usuario"
+
+msgid "CWUser_plural"
+msgstr "Usuarios"
+
+#, python-format
+msgid ""
+"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already "
+"linked using this relation."
+msgstr ""
+"No puede restaurar la relación %(role)s %(rtype)s en la entidad %(eid)s pues "
+"ya esta ligada a otra entidad usando esa relación."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation "
+"does not exists anymore in the schema."
+msgstr ""
+"No puede restaurar la relación %(rtype)s entre %(subj)s y %(obj)s, esta "
+"relación ya no existe en el esquema."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist "
+"anymore."
+msgstr ""
+"No puede restaurar la relación %(rtype)s, la entidad %(role)s %(eid)s ya no "
+"existe."
+
+#, python-format
+msgid ""
+"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't "
+"exist anymore"
+msgstr ""
+"No puede anular el agregar la relación %(rtype)s de %(subj)s a %(obj)s, esta "
+"relación ya no existe"
+
+#, python-format
+msgid ""
+"Can't undo creation of entity %(eid)s of type %(etype)s, type no more "
+"supported"
+msgstr ""
+"No puede anular la creación de la entidad %(eid)s de tipo %(etype)s, este "
+"tipo ya no existe"
+
+msgid "Click to sort on this column"
+msgstr "Seleccione para ordenar columna"
+
+msgid ""
+"Configuration of the system source goes to the 'sources' file, not in the "
+"database"
+msgstr ""
+"La configuración de la fuente sistema va en el archivo \"Sources\"/Fuentes, "
+"y no en la base de datos."
+
+#, python-format
+msgid "Created %(etype)s : %(entity)s"
+msgstr "Se creó %(etype)s : %(entity)s"
+
+msgid "DEBUG"
+msgstr "DEPURAR"
+
+msgid "Date"
+msgstr "Fecha"
+
+msgid "Date_plural"
+msgstr "Fechas"
+
+msgid "Datetime"
+msgstr "Fecha y hora"
+
+msgid "Datetime_plural"
+msgstr "Fechas y horas"
+
+msgid "Decimal"
+msgstr "Decimal"
+
+msgid "Decimal_plural"
+msgstr "Decimales"
+
+#, python-format
+msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s"
+msgstr "Eliminar relación : %(entity_from)s %(rtype)s %(entity_to)s"
+
+#, python-format
+msgid "Deleted %(etype)s : %(entity)s"
+msgstr "Se eliminó %(etype)s : %(entity)s"
+
+msgid "Detected problems"
+msgstr "Problemas detectados"
+
+msgid "Do you want to delete the following element(s)?"
+msgstr "Desea eliminar el/los elemento(s) a continuación?"
+
+msgid "Download schema as OWL"
+msgstr "Descargar el esquema en formato OWL"
+
+msgid "ERROR"
+msgstr "ERROR"
+
+msgid "EmailAddress"
+msgstr "Correo Electrónico"
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "EmailAddress"
+msgstr "Correo Electrónico"
+
+msgid "EmailAddress_plural"
+msgstr "Direcciones de Correo Electrónico"
+
+msgid "Entities"
+msgstr "Entidades"
+
+#, python-format
+msgid ""
+"Entity %(eid)s has changed since you started to edit it. Reload the page and "
+"reapply your changes."
+msgstr ""
+
+msgid "Entity and relation supported by this source"
+msgstr "Entidades y relaciones aceptadas por esta fuente"
+
+msgid "ExternalUri"
+msgstr "Uri externo"
+
+msgid "ExternalUri_plural"
+msgstr "Uris externos"
+
+msgid "FATAL"
+msgstr "FATAL"
+
+msgid "Float"
+msgstr "Número flotante"
+
+msgid "Float_plural"
+msgstr "Números flotantes"
+
+# schema pot file, generated on 2009-12-03 09:22:35
+#
+# singular and plural forms for each entity type
+msgid "FormatConstraint"
+msgstr "Restricción de Formato"
+
+msgid "Garbage collection information"
+msgstr "Recolector de basura en memoria"
+
+msgid "Help"
+msgstr "Ayuda"
+
+msgid "INFO"
+msgstr "INFO"
+
+msgid "Instance"
+msgstr "Instancia"
+
+msgid "Int"
+msgstr "Número entero"
+
+msgid "Int_plural"
+msgstr "Números enteros"
+
+msgid "Interval"
+msgstr "Duración"
+
+msgid "IntervalBoundConstraint"
+msgstr "Restricción de intervalo"
+
+msgid "Interval_plural"
+msgstr "Duraciones"
+
+msgid "Link:"
+msgstr "Liga:"
+
+msgid "Looked up classes"
+msgstr "Clases buscadas"
+
+msgid "Manage"
+msgstr "Administración"
+
+msgid "Manage security"
+msgstr "Gestión de seguridad"
+
+msgid "Message threshold"
+msgstr "Límite de mensajes"
+
+msgid "Most referenced classes"
+msgstr "Clases más referenciadas"
+
+msgid "New BaseTransition"
+msgstr "XXX"
+
+msgid "New Bookmark"
+msgstr "Agregar a Favoritos"
+
+msgid "New CWAttribute"
+msgstr "Nueva definición de relación final"
+
+msgid "New CWCache"
+msgstr "Agregar Caché"
+
+msgid "New CWComputedRType"
+msgstr ""
+
+msgid "New CWConstraint"
+msgstr "Agregar Restricción"
+
+msgid "New CWConstraintType"
+msgstr "Agregar tipo de Restricción"
+
+msgid "New CWDataImport"
+msgstr "Nueva importación de datos"
+
+msgid "New CWEType"
+msgstr "Agregar tipo de entidad"
+
+msgid "New CWGroup"
+msgstr "Nuevo grupo"
+
+msgid "New CWProperty"
+msgstr "Agregar Propiedad"
+
+msgid "New CWRType"
+msgstr "Agregar tipo de relación"
+
+msgid "New CWRelation"
+msgstr "Nueva definición de relación final"
+
+msgid "New CWSource"
+msgstr "Nueva fuente"
+
+msgid "New CWSourceHostConfig"
+msgstr "Nueva configuración de fuente"
+
+msgid "New CWSourceSchemaConfig"
+msgstr "Nueva parte de mapeo de fuente"
+
+msgid "New CWUniqueTogetherConstraint"
+msgstr "Nueva restricción de singularidad"
+
+msgid "New CWUser"
+msgstr "Agregar usuario"
+
+msgid "New EmailAddress"
+msgstr "Agregar Email"
+
+msgid "New ExternalUri"
+msgstr "Agregar Uri externa"
+
+msgid "New RQLExpression"
+msgstr "Agregar expresión rql"
+
+msgid "New State"
+msgstr "Agregar Estado"
+
+msgid "New SubWorkflowExitPoint"
+msgstr "Agregar salida de sub-Workflow"
+
+msgid "New TrInfo"
+msgstr "Agregar Información de Transición"
+
+msgid "New Transition"
+msgstr "Agregar transición"
+
+msgid "New Workflow"
+msgstr "Agregar Workflow"
+
+msgid "New WorkflowTransition"
+msgstr "Agregar transición de Workflow"
+
+msgid "No result matching query"
+msgstr "Ningún resultado corresponde a su búsqueda"
+
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr "Lista no exhaustiva de vistas aplicables a este tipo de entidad"
+
+msgid "OR"
+msgstr "O"
+
+msgid "Ownership"
+msgstr "Propiedad"
+
+msgid "Parent class:"
+msgstr "Clase padre:"
+
+msgid "Password"
+msgstr "Contraseña"
+
+msgid "Password_plural"
+msgstr "Contraseñas"
+
+msgid "Please note that this is only a shallow copy"
+msgstr "Recuerde que sólo es una copia superficial"
+
+msgid "Powered by CubicWeb"
+msgstr "Potenciado en CubicWeb"
+
+msgid "RQLConstraint"
+msgstr "Restricción RQL"
+
+msgid "RQLExpression"
+msgstr "Expresión RQL"
+
+msgid "RQLExpression_plural"
+msgstr "Expresiones RQL"
+
+msgid "RQLUniqueConstraint"
+msgstr "Restricción RQL de Unicidad"
+
+msgid "RQLVocabularyConstraint"
+msgstr "Restricción RQL de Vocabulario"
+
+msgid "RegexpConstraint"
+msgstr "restricción expresión regular"
+
+msgid "Registry's content"
+msgstr "Contenido del registro"
+
+msgid "Relations"
+msgstr "Relaciones"
+
+msgid "Repository"
+msgstr "Repositorio"
+
+#, python-format
+msgid "Schema %s"
+msgstr "Esquema %s"
+
+msgid "Schema's permissions definitions"
+msgstr "Definiciones de permisos del esquema"
+
+msgid "Search for"
+msgstr "Buscar"
+
+msgid "Site information"
+msgstr "Información del Sitio"
+
+msgid "SizeConstraint"
+msgstr "Restricción de tamaño"
+
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+"Configuración de la fuente por un \"host\" específico. Una clave=valor por "
+"línea, las claves permitidas dependen del tipo de fuente. Estos valores son "
+"prioritarios a los valores definidos en la fuente."
+
+msgid "Startup views"
+msgstr "Vistas de inicio"
+
+msgid "State"
+msgstr "Estado"
+
+msgid "State_plural"
+msgstr "Estados"
+
+msgid "StaticVocabularyConstraint"
+msgstr "Restricción de vocabulario"
+
+msgid "String"
+msgstr "Cadena de caracteres"
+
+msgid "String_plural"
+msgstr "Cadenas de caracteres"
+
+msgid "Sub-classes:"
+msgstr "Clases hijas:"
+
+msgid "SubWorkflowExitPoint"
+msgstr "Salida de sub-workflow"
+
+msgid "SubWorkflowExitPoint_plural"
+msgstr "Salidas de sub-workflow"
+
+msgid "Submit bug report"
+msgstr "Enviar un reporte de error (bug)"
+
+msgid "Submit bug report by mail"
+msgstr "Enviar este reporte por email"
+
+msgid "Synchronization has been requested, refresh this page in a few minutes."
+msgstr ""
+
+msgid "TZDatetime"
+msgstr "Fecha y hora internacional"
+
+msgid "TZDatetime_plural"
+msgstr "Fechas y horas internacionales"
+
+msgid "TZTime"
+msgstr "Hora internacional"
+
+msgid "TZTime_plural"
+msgstr "Horas internacionales"
+
+#, python-format
+msgid "The view %s can not be applied to this query"
+msgstr "La vista %s no puede ser aplicada a esta búsqueda"
+
+#, python-format
+msgid "The view %s could not be found"
+msgstr "La vista %s no ha podido ser encontrada"
+
+msgid "There is no default workflow"
+msgstr "Esta entidad no posee workflow por defecto"
+
+msgid "This BaseTransition:"
+msgstr "Esta transición abstracta:"
+
+msgid "This Bookmark:"
+msgstr "Este favorito:"
+
+msgid "This CWAttribute:"
+msgstr "Esta definición de relación final:"
+
+msgid "This CWCache:"
+msgstr "Este Caché:"
+
+msgid "This CWComputedRType:"
+msgstr ""
+
+msgid "This CWConstraint:"
+msgstr "Esta Restricción:"
+
+msgid "This CWConstraintType:"
+msgstr "Este tipo de Restricción:"
+
+msgid "This CWDataImport:"
+msgstr "Esta importación de datos:"
+
+msgid "This CWEType:"
+msgstr "Este tipo de Entidad:"
+
+msgid "This CWGroup:"
+msgstr "Este grupo:"
+
+msgid "This CWProperty:"
+msgstr "Esta propiedad:"
+
+msgid "This CWRType:"
+msgstr "Este tipo de relación:"
+
+msgid "This CWRelation:"
+msgstr "Esta definición de relación no final:"
+
+msgid "This CWSource:"
+msgstr "Esta fuente:"
+
+msgid "This CWSourceHostConfig:"
+msgstr "Esta configuración de fuente:"
+
+msgid "This CWSourceSchemaConfig:"
+msgstr "Esta parte de mapeo de fuente:"
+
+msgid "This CWUniqueTogetherConstraint:"
+msgstr "Esta restricción de singularidad:"
+
+msgid "This CWUser:"
+msgstr "Este usuario:"
+
+msgid "This EmailAddress:"
+msgstr "Esta dirección electrónica:"
+
+msgid "This ExternalUri:"
+msgstr "Este Uri externo:"
+
+msgid "This RQLExpression:"
+msgstr "Esta expresión RQL:"
+
+msgid "This State:"
+msgstr "Este estado:"
+
+msgid "This SubWorkflowExitPoint:"
+msgstr "Esta Salida de sub-workflow:"
+
+msgid "This TrInfo:"
+msgstr "Esta información de transición:"
+
+msgid "This Transition:"
+msgstr "Esta transición:"
+
+msgid "This Workflow:"
+msgstr "Este Workflow:"
+
+msgid "This WorkflowTransition:"
+msgstr "Esta transición de Workflow:"
+
+msgid ""
+"This action is forbidden. If you think it should be allowed, please contact "
+"the site administrator."
+msgstr ""
+"Esta acción le es prohibida. Si cree que Ud. debería de tener autorización, "
+"favor de contactar al administrador del sitio. "
+
+msgid "This entity type permissions:"
+msgstr "Permisos para este tipo de entidad:"
+
+msgid "Time"
+msgstr "Hora"
+
+msgid "Time_plural"
+msgstr "Horas"
+
+msgid "TrInfo"
+msgstr "Información Transición"
+
+msgid "TrInfo_plural"
+msgstr "Información Transiciones"
+
+msgid "Transition"
+msgstr "Transición"
+
+msgid "Transition_plural"
+msgstr "Transiciones"
+
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+"URLs desde el cual el contenido sera importado. Usted puede incluir un URL "
+"por línea."
+
+msgid "Undoable actions"
+msgstr "Acciones irreversibles"
+
+msgid "Undoing"
+msgstr "Deshaciendo"
+
+msgid "UniqueConstraint"
+msgstr "Restricción de Unicidad"
+
+msgid "Unknown source type"
+msgstr "tipo de fuente desconocida"
+
+msgid "Unreachable objects"
+msgstr "Objetos inaccesibles"
+
+#, python-format
+msgid "Updated %(etype)s : %(entity)s"
+msgstr "Se actualizó %(etype)s : %(entity)s"
+
+msgid "Used by:"
+msgstr "Utilizado por :"
+
+msgid "Users and groups management"
+msgstr "Usuarios y grupos de administradores"
+
+msgid "WARNING"
+msgstr "ADVERTENCIA"
+
+msgid "Web server"
+msgstr "Servidor web"
+
+msgid "Workflow"
+msgstr "Workflow"
+
+msgid "Workflow history"
+msgstr "Histórico del Workflow"
+
+msgid "WorkflowTransition"
+msgstr "Transición de Workflow"
+
+msgid "WorkflowTransition_plural"
+msgstr "Transiciones de Workflow"
+
+msgid "Workflow_plural"
+msgstr "work flows"
+
+msgid ""
+"You can either submit a new file using the browse button above, or choose to "
+"remove already uploaded file by checking the \"detach attached file\" check-"
+"box, or edit file content online with the widget below."
+msgstr ""
+"Usted puede proponer un nuevo archivo utilizando el botón\n"
+"\"buscar\" aquí arriba, o eliminar el archivo ya elegido al\n"
+"seleccionar el cuadro \"soltar archivo adjunto\", o editar el contenido\n"
+"del archivo en línea con el componente inferior."
+
+msgid ""
+"You can either submit a new file using the browse button above, or edit file "
+"content online with the widget below."
+msgstr ""
+"Puede proponer un nuevo archivo utilizando el botón buscar \n"
+"\"buscar\" en la parte superior, o editar el contenido del archivo en línea\n"
+"en el campo siguiente."
+
+msgid "You can't change this relation"
+msgstr "Usted no puede modificar esta relación"
+
+msgid "You cannot remove the system source"
+msgstr "Usted no puede eliminar la fuente sistema"
+
+msgid "You cannot rename the system source"
+msgstr "Usted no puede Renombrar la fuente sistema"
+
+msgid ""
+"You have no access to this view or it can not be used to display the current "
+"data."
+msgstr ""
+"No tiene permisos para accesar esta vista o No puede utilizarse para "
+"desplegar los datos seleccionados."
+
+msgid ""
+"You're not authorized to access this page. If you think you should, please "
+"contact the site administrator."
+msgstr ""
+"Usted no esta autorizado a acceder a esta página. Si Usted cree que \n"
+"hay un error, favor de contactar al administrador del Sistema."
+
+#, python-format
+msgid "[%s supervision] changes summary"
+msgstr "[%s supervision] descripción de cambios"
+
+msgid ""
+"a RQL expression which should return some results, else the transition won't "
+"be available. This query may use X and U variables that will respectivly "
+"represents the current entity and the current user."
+msgstr ""
+"una expresión RQL que debe haber enviado resultados, para que la transición "
+"pueda ser realizada. Esta expresión puede utilizar las variables X y U que "
+"representan respectivamente la entidad en transición y el usuario actual. "
+
+msgid "a URI representing an object in external data store"
+msgstr "una URI designando un objeto en un repositorio de datos externo"
+
+msgid "a float is expected"
+msgstr "un número flotante es requerido"
+
+msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected"
+msgstr "se espera un número (en segundos) ó 20s, 10min, 24h ó 4d "
+
+msgid ""
+"a simple cache entity characterized by a name and a validity date. The "
+"target application is responsible for updating timestamp when necessary to "
+"invalidate the cache (typically in hooks). Also, checkout the AppObject."
+"get_cache() method."
+msgstr ""
+"un caché simple caracterizado por un nombre y una fecha de validez. Es\n"
+"el código de la instancia quién es responsable de actualizar la fecha de\n"
+"validez mientras el caché debe ser invalidado (en general en un hook).\n"
+"Para recuperar un caché, hace falta utilizar el método\n"
+"get_cache(cachename)."
+
+msgid "abstract base class for transitions"
+msgstr "Clase de base abstracta para la transiciones"
+
+msgid "action menu"
+msgstr "acciones"
+
+msgid "action(s) on this selection"
+msgstr "Acción(es) en esta selección"
+
+msgid "actions"
+msgstr "Acciones"
+
+msgid "activate"
+msgstr "Activar"
+
+msgid "activated"
+msgstr "Activado"
+
+msgid "add"
+msgstr "Agregar"
+
+msgid "add Bookmark bookmarked_by CWUser object"
+msgstr "Agregar a los favoritos "
+
+msgid "add CWAttribute add_permission RQLExpression subject"
+msgstr "Expresión RQL de agregación"
+
+msgid "add CWAttribute constrained_by CWConstraint subject"
+msgstr "Restricción"
+
+msgid "add CWAttribute read_permission RQLExpression subject"
+msgstr "Expresión RQL de lectura"
+
+msgid "add CWAttribute relation_type CWRType object"
+msgstr "Definición de atributo"
+
+msgid "add CWAttribute update_permission RQLExpression subject"
+msgstr "Permiso de actualización"
+
+msgid "add CWComputedRType read_permission RQLExpression subject"
+msgstr ""
+
+msgid "add CWEType add_permission RQLExpression subject"
+msgstr "Expresión RQL de agregación"
+
+msgid "add CWEType delete_permission RQLExpression subject"
+msgstr "Expresión RQL de eliminación"
+
+msgid "add CWEType read_permission RQLExpression subject"
+msgstr "Expresión RQL de lectura"
+
+msgid "add CWEType update_permission RQLExpression subject"
+msgstr "Definir una expresión RQL de actualización"
+
+msgid "add CWProperty for_user CWUser object"
+msgstr "Propiedad"
+
+msgid "add CWRelation add_permission RQLExpression subject"
+msgstr "Expresión RQL de agregar"
+
+msgid "add CWRelation constrained_by CWConstraint subject"
+msgstr "Restricción"
+
+msgid "add CWRelation delete_permission RQLExpression subject"
+msgstr "Expresión RQL de supresión"
+
+msgid "add CWRelation read_permission RQLExpression subject"
+msgstr "Expresión RQL de lectura"
+
+msgid "add CWRelation relation_type CWRType object"
+msgstr "Definición de relación"
+
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr "configuración del host"
+
+msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
+msgstr "restricción de singularidad"
+
+msgid "add CWUser in_group CWGroup object"
+msgstr "Usuario"
+
+msgid "add CWUser use_email EmailAddress subject"
+msgstr "Email"
+
+msgid "add State allowed_transition Transition object"
+msgstr "Estado en entrada"
+
+msgid "add State allowed_transition Transition subject"
+msgstr "Transición en salida"
+
+msgid "add State allowed_transition WorkflowTransition subject"
+msgstr "Transición workflow en salida"
+
+msgid "add State state_of Workflow object"
+msgstr "Estado"
+
+msgid "add Transition condition RQLExpression subject"
+msgstr "Restricción"
+
+msgid "add Transition destination_state State object"
+msgstr "Transición de entrada"
+
+msgid "add Transition destination_state State subject"
+msgstr "Estado de salida"
+
+msgid "add Transition transition_of Workflow object"
+msgstr "Transición"
+
+msgid "add WorkflowTransition condition RQLExpression subject"
+msgstr "Condición"
+
+msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject"
+msgstr "Salida de sub-workflow"
+
+msgid "add WorkflowTransition transition_of Workflow object"
+msgstr "Transición Workflow"
+
+msgid "add a BaseTransition"
+msgstr ""
+
+msgid "add a Bookmark"
+msgstr ""
+
+msgid "add a CWAttribute"
+msgstr ""
+
+msgid "add a CWCache"
+msgstr ""
+
+msgid "add a CWComputedRType"
+msgstr ""
+
+msgid "add a CWConstraint"
+msgstr ""
+
+msgid "add a CWConstraintType"
+msgstr ""
+
+msgid "add a CWDataImport"
+msgstr ""
+
+msgid "add a CWEType"
+msgstr ""
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "add a CWEType"
+msgstr "Agregar un tipo de entidad"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "add a CWEType"
+msgstr "Agregar un tipo de entidad"
+
+msgid "add a CWGroup"
+msgstr ""
+
+msgid "add a CWProperty"
+msgstr ""
+
+msgid "add a CWRType"
+msgstr ""
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "add a CWRType"
+msgstr "Agregar un tipo de relación"
+
+msgid "add a CWRelation"
+msgstr ""
+
+msgid "add a CWSource"
+msgstr ""
+
+msgid "add a CWSourceHostConfig"
+msgstr ""
+
+msgid "add a CWSourceSchemaConfig"
+msgstr ""
+
+msgid "add a CWUniqueTogetherConstraint"
+msgstr ""
+
+msgid "add a CWUser"
+msgstr ""
+
+msgid "add a EmailAddress"
+msgstr ""
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "add a EmailAddress"
+msgstr "Agregar correo electrónico"
+
+msgid "add a ExternalUri"
+msgstr ""
+
+msgid "add a RQLExpression"
+msgstr ""
+
+msgid "add a State"
+msgstr ""
+
+msgid "add a SubWorkflowExitPoint"
+msgstr ""
+
+msgid "add a TrInfo"
+msgstr ""
+
+msgid "add a Transition"
+msgstr ""
+
+msgid "add a Workflow"
+msgstr ""
+
+msgid "add a WorkflowTransition"
+msgstr ""
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgid "add_permission"
+msgstr "Autorización para agregar"
+
+msgctxt "CWAttribute"
+msgid "add_permission"
+msgstr "Permiso de agregar"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgctxt "CWEType"
+msgid "add_permission"
+msgstr "Permiso de agregar"
+
+msgctxt "CWRelation"
+msgid "add_permission"
+msgstr "Permiso de agregar"
+
+msgid "add_permission_object"
+msgstr "tiene permiso de agregar"
+
+msgctxt "CWGroup"
+msgid "add_permission_object"
+msgstr "tiene permiso de agregar"
+
+msgctxt "RQLExpression"
+msgid "add_permission_object"
+msgstr "tiene permiso de agregar"
+
+msgid "add_relation"
+msgstr "agregar"
+
+#, python-format
+msgid "added %(etype)s #%(eid)s (%(title)s)"
+msgstr "se agregó %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"la relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s "
+"ha sido agregada"
+
+msgid "additional type specific properties"
+msgstr "propiedades adicionales específicas al tipo"
+
+msgid "addrelated"
+msgstr "Agregar"
+
+msgid "address"
+msgstr "correo electrónico"
+
+msgctxt "EmailAddress"
+msgid "address"
+msgstr "correo electrónico"
+
+msgid "alias"
+msgstr "alias"
+
+msgctxt "EmailAddress"
+msgid "alias"
+msgstr "alias"
+
+msgid "allow to set a specific workflow for an entity"
+msgstr "permite definir un Workflow específico para una entidad"
+
+msgid "allowed options depends on the source type"
+msgstr "las opciones permitidas dependen del tipo de fuente"
+
+msgid "allowed transitions from this state"
+msgstr "transiciones autorizadas desde este estado"
+
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr "los valores permitidos por \"acción\" son %s"
+
+msgid "allowed_transition"
+msgstr "transiciones autorizadas"
+
+msgctxt "State"
+msgid "allowed_transition"
+msgstr "transiciones autorizadas"
+
+msgid "allowed_transition_object"
+msgstr "Estados de entrada"
+
+msgctxt "BaseTransition"
+msgid "allowed_transition_object"
+msgstr "transición autorizada de"
+
+msgctxt "Transition"
+msgid "allowed_transition_object"
+msgstr "transición autorizada de"
+
+msgctxt "WorkflowTransition"
+msgid "allowed_transition_object"
+msgstr "transición autorizada de"
+
+msgid "an electronic mail address associated to a short alias"
+msgstr "una dirección electrónica asociada a este alias"
+
+msgid "an error occurred"
+msgstr "Ha ocurrido un error"
+
+msgid "an error occurred while processing your request"
+msgstr "un error ocurrió al procesar su demanda"
+
+msgid "an error occurred, the request cannot be fulfilled"
+msgstr "un error ha ocurrido, la búsqueda no ha podido ser realizada"
+
+msgid "an integer is expected"
+msgstr "un número entero es esperado"
+
+msgid "and linked"
+msgstr "y relacionada"
+
+msgid "and/or between different values"
+msgstr "y/o entre los diferentes valores"
+
+msgid "anyrsetview"
+msgstr "vistas rset"
+
+msgid "april"
+msgstr "Abril"
+
+#, python-format
+msgid "archive for %(author)s"
+msgstr "archivo de %(author)s"
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr "archivo del %(month)s/%(year)s"
+
+#, python-format
+msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
+msgstr ""
+"La entidad #%(eid)s de tipo %(etype)s debe necesariamente tener almenos una "
+"relación de tipo %(rtype)s"
+
+msgid "attribute"
+msgstr "Atributo"
+
+msgid "august"
+msgstr "Agosto"
+
+msgid "authentication failure"
+msgstr "Usuario o contraseña incorrecta"
+
+msgid "auto"
+msgstr "Automático"
+
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+"Atributo automatizado usado para asegurar la coherencia en la transición"
+
+msgid "automatic"
+msgstr "Automático"
+
+#, python-format
+msgid "back to pagination (%s results)"
+msgstr "regresar a paginación (%s resultados)"
+
+msgid "bad value"
+msgstr "Valor erróneo"
+
+msgid "badly formatted url"
+msgstr "url mal formateado"
+
+msgid "base url"
+msgstr "Url de base"
+
+msgid "bookmark has been removed"
+msgstr "Ha sido eliminado de sus favoritos"
+
+msgid "bookmark this page"
+msgstr "Agregar esta página a los favoritos"
+
+msgid "bookmark this search"
+msgstr "Guardar esta búsqueda"
+
+msgid "bookmarked_by"
+msgstr "está en los Favoritos de"
+
+msgctxt "Bookmark"
+msgid "bookmarked_by"
+msgstr "está en los Favoritos de"
+
+msgid "bookmarked_by_object"
+msgstr "tiene como Favoritos"
+
+msgctxt "CWUser"
+msgid "bookmarked_by_object"
+msgstr "tiene como Favoritos"
+
+msgid "bookmarks"
+msgstr "Favoritos"
+
+msgid "bookmarks are used to have user's specific internal links"
+msgstr "los Favoritos son ligas directas a espacios guardados por el usuario"
+
+msgid "boxes"
+msgstr "Cajas"
+
+msgid "bug report sent"
+msgstr "Reporte de error enviado"
+
+msgid "button_apply"
+msgstr "Aplicar"
+
+msgid "button_cancel"
+msgstr "Cancelar"
+
+msgid "button_delete"
+msgstr "Eliminar"
+
+msgid "button_ok"
+msgstr "Validar"
+
+msgid "by"
+msgstr "por"
+
+msgid "by relation"
+msgstr "por la relación"
+
+msgid "by_transition"
+msgstr "transición"
+
+msgctxt "TrInfo"
+msgid "by_transition"
+msgstr "transición"
+
+msgid "by_transition_object"
+msgstr "cambio de estados"
+
+msgctxt "BaseTransition"
+msgid "by_transition_object"
+msgstr "tiene como información"
+
+msgctxt "Transition"
+msgid "by_transition_object"
+msgstr "tiene como información"
+
+msgctxt "WorkflowTransition"
+msgid "by_transition_object"
+msgstr "tiene como información"
+
+msgid "calendar"
+msgstr "mostrar un calendario"
+
+msgid "can not resolve entity types:"
+msgstr "Imposible de interpretar los tipos de entidades:"
+
+msgid "can only have one url"
+msgstr "solo puede tener un URL"
+
+msgid "can't be changed"
+msgstr "No puede ser modificado"
+
+msgid "can't be deleted"
+msgstr "No puede ser eliminado"
+
+msgid "can't change this attribute"
+msgstr "no puede modificar este atributo"
+
+#, python-format
+msgid "can't display data, unexpected error: %s"
+msgstr "imposible de mostrar los datos, a causa del siguiente error: %s"
+
+msgid "can't have multiple exits on the same state"
+msgstr "no puede tener varias salidas en el mismo estado"
+
+#, python-format
+msgid "can't parse %(value)r (expected %(format)s)"
+msgstr "no puede analizar %(value)r (formato requerido : %(format)s)"
+
+#, python-format
+msgid ""
+"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid "
+"%(value)s) does not exist any longer"
+msgstr ""
+"no se pudo restaurar la entidad %(eid)s del tipo %(eschema)s, objetivo de "
+"%(rtype)s (eid %(value)s) pues ésta ya no existe"
+
+#, python-format
+msgid ""
+"can't restore relation %(rtype)s of entity %(eid)s, this relation does not "
+"exist in the schema anymore."
+msgstr ""
+"no se pudo restaurar la relación %(rtype)s de la entidad %(eid)s, esta "
+"relación ya no existe en el esquema. "
+
+#, python-format
+msgid "can't restore state of entity %s, it has been deleted inbetween"
+msgstr ""
+"no se puede restaurar el estado de la entidad %s, se ha borrado desde "
+"entonces"
+
+#, python-format
+msgid ""
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
+"%(card)s"
+msgstr ""
+"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s tiene "
+"cardinalidad %(card)s"
+
+msgid "cancel"
+msgstr "anular"
+
+msgid "cancel select"
+msgstr "Cancelar la selección"
+
+msgid "cancel this insert"
+msgstr "Cancelar esta inserción"
+
+msgid "cardinality"
+msgstr "cardinalidad"
+
+msgctxt "CWAttribute"
+msgid "cardinality"
+msgstr "cardinalidad"
+
+msgctxt "CWRelation"
+msgid "cardinality"
+msgstr "cardinalidad"
+
+msgid "category"
+msgstr "categoría"
+
+#, python-format
+msgid "changed state of %(etype)s #%(eid)s (%(title)s)"
+msgstr "Cambiar del estado de %(etype)s #%(eid)s (%(title)s)"
+
+msgid "changes applied"
+msgstr "Cambios realizados"
+
+msgid "click here to see created entity"
+msgstr "Ver la entidad creada"
+
+msgid "click here to see edited entity"
+msgstr "seleccione aquí para ver la entidad modificada"
+
+msgid "click on the box to cancel the deletion"
+msgstr "Seleccione la zona de edición para cancelar la eliminación"
+
+msgid "click to add a value"
+msgstr "seleccione para agregar un valor"
+
+msgid "click to delete this value"
+msgstr "seleccione para eliminar este valor"
+
+msgid "click to edit this field"
+msgstr "seleccione para editar este campo"
+
+msgid "close all"
+msgstr "cerrar todos"
+
+msgid "comment"
+msgstr "Comentario"
+
+msgctxt "TrInfo"
+msgid "comment"
+msgstr "Comentario"
+
+msgid "comment_format"
+msgstr "Formato"
+
+msgctxt "TrInfo"
+msgid "comment_format"
+msgstr "Formato"
+
+msgid "components"
+msgstr "Componentes"
+
+msgid "components_navigation"
+msgstr "Navigación por página"
+
+msgid "components_navigation_description"
+msgstr ""
+"Componente que permite presentar en varias páginas los resultados de "
+"búsqueda cuando son mayores a un número predeterminado "
+
+msgid "components_rqlinput"
+msgstr "Barra RQL"
+
+msgid "components_rqlinput_description"
+msgstr "La barra para realizar consultas en RQL, en el encabezado de página"
+
+msgid "composite"
+msgstr "composite"
+
+msgctxt "CWRelation"
+msgid "composite"
+msgstr "composite"
+
+msgid "condition"
+msgstr "condición"
+
+msgctxt "BaseTransition"
+msgid "condition"
+msgstr "condición"
+
+msgctxt "Transition"
+msgid "condition"
+msgstr "condición"
+
+msgctxt "WorkflowTransition"
+msgid "condition"
+msgstr "condición"
+
+msgid "condition_object"
+msgstr "condición de"
+
+msgctxt "RQLExpression"
+msgid "condition_object"
+msgstr "condición de"
+
+msgid "conditions"
+msgstr "condiciones"
+
+msgid "config"
+msgstr "configuración"
+
+msgctxt "CWSource"
+msgid "config"
+msgstr "configuración"
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr "configuración"
+
+msgid "config mode"
+msgstr "Modo de configuración"
+
+msgid "config type"
+msgstr "Tipo de configuración"
+
+msgid "confirm password"
+msgstr "Confirmar contraseña"
+
+msgid "constrained_by"
+msgstr "Restricción impuesta por"
+
+msgctxt "CWAttribute"
+msgid "constrained_by"
+msgstr "Restricción impuesta por"
+
+msgctxt "CWRelation"
+msgid "constrained_by"
+msgstr "Restricción impuesta por"
+
+msgid "constrained_by_object"
+msgstr "Restricción de"
+
+msgctxt "CWConstraint"
+msgid "constrained_by_object"
+msgstr "Restricción de"
+
+msgid "constraint factory"
+msgstr "Fábrica de restricciones"
+
+msgid "constraint_of"
+msgstr "restricción de"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "constraint_of"
+msgstr "restricción de"
+
+msgid "constraint_of_object"
+msgstr "restringida por"
+
+msgctxt "CWEType"
+msgid "constraint_of_object"
+msgstr "restringida por"
+
+msgid "constraints"
+msgstr "Restricciones"
+
+msgid "constraints applying on this relation"
+msgstr "Restricciones que se aplican a esta relación"
+
+msgid "content type"
+msgstr "tipo MIME"
+
+msgid "context"
+msgstr "Contexto"
+
+msgid "context where this box should be displayed"
+msgstr "Contexto en el cual la caja debe aparecer en el sistema"
+
+msgid "context where this component should be displayed"
+msgstr "Contexto en el cual el componente debe aparecer en el sistema"
+
+msgid "context where this facet should be displayed, leave empty for both"
+msgstr ""
+"Contexto en el cual esta faceta debe ser mostrada, dejar vacia para ambos"
+
+msgid "control subject entity's relations order"
+msgstr "Controla el orden de relaciones de la entidad sujeto"
+
+msgid "copy"
+msgstr "Copiar"
+
+msgid "core relation indicating a user's groups"
+msgstr ""
+"Relación sistema que indica los grupos a los cuales pertenece un usuario"
+
+msgid ""
+"core relation indicating owners of an entity. This relation implicitly put "
+"the owner into the owners group for the entity"
+msgstr ""
+"Relación sistema que indica el(los) propietario(s) de una entidad. Esta "
+"relación pone de manera implícita al propietario en el grupo de propietarios "
+"de una entidad."
+
+msgid "core relation indicating the original creator of an entity"
+msgstr "Relación sistema que indica el creador de una entidad."
+
+msgid "core relation indicating the type of an entity"
+msgstr "Relación sistema que indica el tipo de entidad."
+
+msgid ""
+"core relation indicating the types (including specialized types) of an entity"
+msgstr ""
+"Relación sistema indicando los tipos (incluídos los tipos padres) de una "
+"entidad"
+
+msgid "could not connect to the SMTP server"
+msgstr "Imposible de conectarse al servidor SMTP"
+
+msgid "create an index for quick search on this attribute"
+msgstr "Crear un índice para accelerar las búsquedas sobre este atributo"
+
+msgid "created on"
+msgstr "creado el"
+
+msgid "created_by"
+msgstr "creado por"
+
+msgid "created_by_object"
+msgstr "ha creado"
+
+msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)"
+msgstr "Creando Favorito"
+
+msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)"
+msgstr "Creación del atributo %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)"
+msgstr "Creación condicionada por el atributo %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)"
+msgstr "Creación condicionada por la relación %(linkto)s"
+
+msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)"
+msgstr "Creación de una propiedad por el usuario %(linkto)s"
+
+msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)"
+msgstr "Creación de la relación %(linkto)s"
+
+msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr "creación de una configuración host para la fuente %(linkto)s"
+
+msgid ""
+"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
+"constraint_of CWEType %(linkto)s)"
+msgstr "creación de una restricción de singularidad en %(linkto)s"
+
+msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)"
+msgstr "Creación de un usuario para agregar al grupo %(linkto)s"
+
+msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)"
+msgstr "Creación de una dirección electrónica para el usuario %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)"
+msgstr "creación de una expresión RQL por el derecho de lectura de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s update_permission "
+"RQLExpression)"
+msgstr ""
+"creación de una expresión RQL por el derecho de actualización de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWComputedRType %(linkto)s read_permission "
+"RQLExpression)"
+msgstr ""
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)"
+msgstr ""
+"Creación de una expresión RQL para la autorización de agregar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)"
+msgstr ""
+"Creación de una expresión RQL para la autorización de eliminar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir leer %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir actualizar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s delete_permission "
+"RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir eliminar %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)"
+msgstr "Creación de una expresión RQL para permitir leer %(linkto)s"
+
+msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)"
+msgstr "Creación de una expresión RQL para la transición %(linkto)s"
+
+msgid ""
+"creating RQLExpression (WorkflowTransition %(linkto)s condition "
+"RQLExpression)"
+msgstr "Creación de una expresión RQL para la transición Workflow %(linkto)s"
+
+msgid "creating State (State allowed_transition Transition %(linkto)s)"
+msgstr "Creación de un estado que puede ir hacia la transición %(linkto)s"
+
+msgid "creating State (State state_of Workflow %(linkto)s)"
+msgstr "Creando un Estado del Workflow"
+
+msgid "creating State (Transition %(linkto)s destination_state State)"
+msgstr "Creación de un Estado Destinación de la Transición %(linkto)s"
+
+msgid ""
+"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s "
+"subworkflow_exit SubWorkflowExitPoint)"
+msgstr "creación de un punto de Salida de la Transición Workflow %(linkto)s"
+
+msgid "creating Transition (State %(linkto)s allowed_transition Transition)"
+msgstr "Creación de una transición autorizada desde el Estado %(linkto)s"
+
+msgid "creating Transition (Transition destination_state State %(linkto)s)"
+msgstr "Creación de un transición hacia el Estado %(linkto)s"
+
+msgid "creating Transition (Transition transition_of Workflow %(linkto)s)"
+msgstr "Creación de una Transición Workflow %(linkto)s"
+
+msgid ""
+"creating WorkflowTransition (State %(linkto)s allowed_transition "
+"WorkflowTransition)"
+msgstr ""
+"Creación de una Transición Workflow permitida desde el estado %(linkto)s"
+
+msgid ""
+"creating WorkflowTransition (WorkflowTransition transition_of Workflow "
+"%(linkto)s)"
+msgstr "Creación de una Transición Workflow del Workflow %(linkto)s"
+
+msgid "creation"
+msgstr "Creación"
+
+msgid "creation date"
+msgstr "Fecha de Creación"
+
+msgid "creation time of an entity"
+msgstr "Fecha de creación de una entidad"
+
+msgid "creation_date"
+msgstr "Fecha de Creación"
+
+msgid "cstrtype"
+msgstr "Tipo de restricción"
+
+msgctxt "CWConstraint"
+msgid "cstrtype"
+msgstr "Tipo"
+
+msgid "cstrtype_object"
+msgstr "utilizado por"
+
+msgctxt "CWConstraintType"
+msgid "cstrtype_object"
+msgstr "Tipo de restricciones"
+
+msgid "csv export"
+msgstr "Exportar en CSV"
+
+msgid "csv export (entities)"
+msgstr "Exportar a CSV (entidades)"
+
+msgid "ctxcomponents"
+msgstr "Componentes contextuales"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr "Liga usuario"
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+"Muestra un enlace hacia el formulario de conexión para los usuarios "
+"anónimos, o una caja que contiene los enlaces del usuario conectado. "
+
+msgid "ctxcomponents_appliname"
+msgstr "Nombre de la aplicación"
+
+msgid "ctxcomponents_appliname_description"
+msgstr "Muestra el nombre de la aplicación en el encabezado de la página"
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "Caja de Favoritos"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "Muestra y permite administrar los favoritos del usuario"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "Ruta de Navegación"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr "Muestra la ruta que permite localizar la página actual en el Sistema"
+
+msgid "ctxcomponents_download_box"
+msgstr "Configuración de caja de descargas"
+
+msgid "ctxcomponents_download_box_description"
+msgstr "Caja que contiene los elementos descargados"
+
+msgid "ctxcomponents_edit_box"
+msgstr "Caja de Acciones"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados"
+
+msgid "ctxcomponents_facet.filterbox"
+msgstr "Filtros"
+
+msgid "ctxcomponents_facet.filterbox_description"
+msgstr "Muestra los filtros aplicables a una búsqueda realizada"
+
+msgid "ctxcomponents_logo"
+msgstr "logo"
+
+msgid "ctxcomponents_logo_description"
+msgstr "El logo de la aplicación, en el encabezado de página"
+
+msgid "ctxcomponents_metadata"
+msgstr "Metadatos de la Entidad"
+
+msgid "ctxcomponents_metadata_description"
+msgstr "espacio que incluye los metadatos de la entidad actual"
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "Caja de Vistas Posibles"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados"
+
+msgid "ctxcomponents_prevnext"
+msgstr "Elemento anterior / siguiente"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"Muestra las ligas que permiten pasar de una entidad a otra en las entidades "
+"que implementan la interface \"anterior/siguiente\"."
+
+msgid "ctxcomponents_rss"
+msgstr "Ícono RSS"
+
+msgid "ctxcomponents_rss_description"
+msgstr "Muestra el ícono RSS para vistas RSS"
+
+msgid "ctxcomponents_search_box"
+msgstr "Caja de búsqueda"
+
+msgid "ctxcomponents_search_box_description"
+msgstr ""
+"Permite realizar una búsqueda simple para cualquier tipo de dato en la "
+"aplicación"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "Caja Vistas de inicio"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "Muestra las vistas de inicio de la aplicación"
+
+msgid "ctxcomponents_userstatus"
+msgstr "estado del usuario"
+
+msgid "ctxcomponents_userstatus_description"
+msgstr "establece el estado del usuario"
+
+msgid "ctxcomponents_wfhistory"
+msgstr "Histórico del workflow."
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr ""
+"Sección que muestra el reporte histórico de las transiciones del workflow. "
+"Aplica solo en entidades con workflow."
+
+msgid "ctxtoolbar"
+msgstr "Barra de herramientas"
+
+msgid "custom_workflow"
+msgstr "Workflow específico"
+
+msgid "custom_workflow_object"
+msgstr "Workflow de"
+
+msgid "cw.groups-management"
+msgstr "grupos"
+
+msgid "cw.users-management"
+msgstr "usuarios"
+
+msgid "cw_for_source"
+msgstr "fuente"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr "fuente"
+
+msgid "cw_for_source_object"
+msgstr "elemento de mapeo"
+
+msgctxt "CWSource"
+msgid "cw_for_source_object"
+msgstr "elemento de mapeo"
+
+msgid "cw_host_config_of"
+msgstr "configuración del host de"
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr "configuración del host de"
+
+msgid "cw_host_config_of_object"
+msgstr "tiene la configuración del host"
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr "tiene la configuración del host"
+
+msgid "cw_import_of"
+msgstr "fuente"
+
+msgctxt "CWDataImport"
+msgid "cw_import_of"
+msgstr "fuente"
+
+msgid "cw_import_of_object"
+msgstr "importación"
+
+msgctxt "CWSource"
+msgid "cw_import_of_object"
+msgstr "importación"
+
+msgid "cw_schema"
+msgstr "esquema"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
+msgstr "esquema"
+
+msgid "cw_schema_object"
+msgstr "mapeado por"
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
+msgstr "mapeado por"
+
+msgctxt "CWRType"
+msgid "cw_schema_object"
+msgstr "mapeado por"
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
+msgstr "mapeado por"
+
+msgid "cw_source"
+msgstr "desde la fuente de datos"
+
+msgid "cw_source_object"
+msgstr "entidades"
+
+msgid "cwetype-box"
+msgstr "Vista \"caja\""
+
+msgid "cwetype-description"
+msgstr "Descripción"
+
+msgid "cwetype-permissions"
+msgstr "Permisos"
+
+msgid "cwetype-views"
+msgstr "Vistas"
+
+msgid "cwetype-workflow"
+msgstr "Workflow"
+
+msgid "cwgroup-main"
+msgstr "Descripción"
+
+msgid "cwgroup-permissions"
+msgstr "Permisos"
+
+msgid "cwrtype-description"
+msgstr "Descripción"
+
+msgid "cwrtype-permissions"
+msgstr "Permisos"
+
+msgid "cwsource-imports"
+msgstr "importación"
+
+msgid "cwsource-main"
+msgstr "descripción"
+
+msgid "cwsource-mapping"
+msgstr "mapeo"
+
+msgid "cwuri"
+msgstr "Uri Interna"
+
+msgid "data directory url"
+msgstr "Url del repertorio de datos"
+
+msgid "data model schema"
+msgstr "Esquema del Sistema"
+
+msgid "data sources"
+msgstr "fuente de datos"
+
+msgid "data sources management"
+msgstr "administración de fuentes de datos"
+
+msgid "date"
+msgstr "Fecha"
+
+msgid "deactivate"
+msgstr "Desactivar"
+
+msgid "deactivated"
+msgstr "Desactivado"
+
+msgid "december"
+msgstr "Diciembre"
+
+msgid "default"
+msgstr "Valor por defecto"
+
+msgid "default text format for rich text fields."
+msgstr ""
+"Formato de texto que se utilizará por defecto para los campos de tipo texto"
+
+msgid "default user workflow"
+msgstr "Workflow por defecto de los usuarios"
+
+msgid "default value"
+msgstr "Valor por defecto"
+
+msgid "default value as gziped pickled python object"
+msgstr "valor por defecto, en la forma de objeto python, al usar pickle y gzip"
+
+msgid "default workflow for an entity type"
+msgstr "Workflow por defecto para un tipo de entidad"
+
+msgid "default_workflow"
+msgstr "Workflow por defecto"
+
+msgctxt "CWEType"
+msgid "default_workflow"
+msgstr "Workflow por defecto"
+
+msgid "default_workflow_object"
+msgstr "Workflow por defecto de"
+
+msgctxt "Workflow"
+msgid "default_workflow_object"
+msgstr "Workflow por defecto de"
+
+msgid "defaultval"
+msgstr "Valor por defecto"
+
+msgctxt "CWAttribute"
+msgid "defaultval"
+msgstr "Valor por defecto"
+
+msgid "define a CubicWeb user"
+msgstr "Define un usuario CubicWeb"
+
+msgid "define a CubicWeb users group"
+msgstr "Define un grupo de usuarios CubicWeb"
+
+msgid ""
+"define a final relation: link a final relation type from a non final entity "
+"to a final entity type. used to build the instance schema"
+msgstr ""
+"Define una relación final: liga un tipo de relación final desde una entidad "
+"NO final hacia un tipo de entidad final. Se usa para crear el esquema de la "
+"instancia."
+
+msgid ""
+"define a non final relation: link a non final relation type from a non final "
+"entity to a non final entity type. used to build the instance schema"
+msgstr ""
+"Define una relación NO final: liga un tipo de relación NO final desde una "
+"entidad NO final hacia un tipo de entidad NO final. Se usa para crear el "
+"esquema de la instancia."
+
+msgid "define a relation type, used to build the instance schema"
+msgstr ""
+"Define un tipo de relación, usado para construir el esquema de la instancia."
+
+msgid "define a rql expression used to define permissions"
+msgstr "Expresión RQL utilizada para definir los derechos de acceso"
+
+msgid "define a schema constraint"
+msgstr "Define una condición de esquema"
+
+msgid "define a schema constraint type"
+msgstr "Define un tipo de condición de esquema"
+
+msgid "define a virtual relation type, used to build the instance schema"
+msgstr ""
+
+msgid "define an entity type, used to build the instance schema"
+msgstr ""
+"Define un tipo de entidad, usado para construir el esquema de la instancia."
+
+msgid "define how we get out from a sub-workflow"
+msgstr "Define como salir de un sub-Workflow"
+
+msgid "defines a sql-level multicolumn unique index"
+msgstr "define un índice SQL único a través de varias columnas"
+
+msgid ""
+"defines what's the property is applied for. You must select this first to be "
+"able to set value"
+msgstr ""
+"Define a que se aplica la propiedad . Debe de seleccionar esto antes de "
+"establecer un valor"
+
+msgid "delete"
+msgstr "Eliminar"
+
+msgid "delete this bookmark"
+msgstr "Eliminar este favorito"
+
+msgid "delete this relation"
+msgstr "Eliminar esta relación"
+
+msgid "delete_permission"
+msgstr "Permiso de eliminar"
+
+msgctxt "CWEType"
+msgid "delete_permission"
+msgstr "Permiso de eliminar"
+
+msgctxt "CWRelation"
+msgid "delete_permission"
+msgstr "Permiso de eliminar"
+
+msgid "delete_permission_object"
+msgstr "posee permiso para eliminar"
+
+msgctxt "CWGroup"
+msgid "delete_permission_object"
+msgstr "puede eliminar"
+
+msgctxt "RQLExpression"
+msgid "delete_permission_object"
+msgstr "puede eliminar"
+
+#, python-format
+msgid "deleted %(etype)s #%(eid)s (%(title)s)"
+msgstr "Eliminación de la entidad %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"La relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s "
+"ha sido suprimida."
+
+msgid "depends on the constraint type"
+msgstr "Depende del tipo de restricción"
+
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "BaseTransition"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "CWAttribute"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "CWComputedRType"
+msgid "description"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "CWRType"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "CWRelation"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "State"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "Transition"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "Workflow"
+msgid "description"
+msgstr "Descripción"
+
+msgctxt "WorkflowTransition"
+msgid "description"
+msgstr "Descripción"
+
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "BaseTransition"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "CWAttribute"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "CWComputedRType"
+msgid "description_format"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "CWRType"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "CWRelation"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "State"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "Transition"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "Workflow"
+msgid "description_format"
+msgstr "Formato"
+
+msgctxt "WorkflowTransition"
+msgid "description_format"
+msgstr "Formato"
+
+msgid "destination state for this transition"
+msgstr "Estados accesibles para esta transición"
+
+msgid "destination state must be in the same workflow as our parent transition"
+msgstr ""
+"El estado de destino debe pertenecer al mismo Workflow que la transición "
+"padre."
+
+msgid "destination state of a transition"
+msgstr "Estado destino de una transición"
+
+msgid ""
+"destination state. No destination state means that transition should go back "
+"to the state from which we've entered the subworkflow."
+msgstr ""
+"Estado destino de la transición. Si el Estado destino no ha sido "
+"especificado, la transición regresará hacia el estado que tenía la entidad "
+"al entrar en el Sub-Workflow."
+
+msgid "destination_state"
+msgstr "Estado destino"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "destination_state"
+msgstr "Estado destino"
+
+msgctxt "Transition"
+msgid "destination_state"
+msgstr "Estado destino"
+
+msgid "destination_state_object"
+msgstr "Destino de"
+
+msgctxt "State"
+msgid "destination_state_object"
+msgstr "Estado final de"
+
+msgid "detach attached file"
+msgstr "soltar el archivo existente"
+
+msgid "display order of the box"
+msgstr "Orden de aparición de la caja"
+
+msgid "display order of the component"
+msgstr "Orden de aparición del componente"
+
+msgid "display order of the facet"
+msgstr "Orden de aparición de la faceta"
+
+msgid "display the box or not"
+msgstr "Mostrar o no la caja"
+
+msgid "display the component or not"
+msgstr "Mostrar o no el componente"
+
+msgid "display the facet or not"
+msgstr "Mostrar o no la faceta"
+
+msgid "download"
+msgstr "Descargar"
+
+#, python-format
+msgid "download %s"
+msgstr "Descargar %s"
+
+msgid "download icon"
+msgstr "ícono de descarga"
+
+msgid "download schema as owl"
+msgstr "Descargar esquema en formato OWL"
+
+msgid "edit bookmarks"
+msgstr "Editar favoritos"
+
+msgid "editable-table"
+msgstr "Tabla modificable"
+
+msgid "eid"
+msgstr "eid"
+
+msgid "embedded html"
+msgstr "Html incrustado"
+
+msgid "end_timestamp"
+msgstr "horario final"
+
+msgctxt "CWDataImport"
+msgid "end_timestamp"
+msgstr "horario final"
+
+msgid "entities deleted"
+msgstr "Entidades eliminadas"
+
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+"los tipos de entidad y relación no pueden ser mapeados, solo los atributos y "
+"las relaciones"
+
+msgid "entity copied"
+msgstr "Entidad copiada"
+
+msgid "entity created"
+msgstr "Entidad creada"
+
+msgid "entity creation"
+msgstr "Creación de entidad"
+
+msgid "entity deleted"
+msgstr "Entidad eliminada"
+
+msgid "entity deletion"
+msgstr "Eliminación de entidad"
+
+msgid "entity edited"
+msgstr "Entidad modificada"
+
+msgid "entity has no workflow set"
+msgstr "La entidad no tiene Workflow"
+
+msgid "entity linked"
+msgstr "Entidad asociada"
+
+msgid "entity type"
+msgstr "Tipo de entidad"
+
+msgid "entity types which may use this workflow"
+msgstr "Tipos de entidades que pueden utilizar este Workflow"
+
+msgid "entity update"
+msgstr "Actualización de la Entidad"
+
+msgid "entityview"
+msgstr "vistas de entidades"
+
+msgid "error"
+msgstr "error"
+
+msgid "error while publishing ReST text"
+msgstr ""
+"Se ha producido un error durante la interpretación del texto en formato ReST"
+
+msgid "exit state must be a subworkflow state"
+msgstr "El estado de salida debe de ser un estado del Sub-Workflow"
+
+msgid "exit_point"
+msgstr "Estado de Salida"
+
+msgid "exit_point_object"
+msgstr "Estado de Salida de"
+
+#, python-format
+msgid "exiting from subworkflow %s"
+msgstr "Salida del subworkflow %s"
+
+msgid "expression"
+msgstr "Expresión"
+
+msgctxt "RQLExpression"
+msgid "expression"
+msgstr "RQL de la expresión"
+
+msgid "exprtype"
+msgstr "Tipo de la expresión"
+
+msgctxt "RQLExpression"
+msgid "exprtype"
+msgstr "Tipo"
+
+msgid "extra_props"
+msgstr "propiedades adicionales"
+
+msgctxt "CWAttribute"
+msgid "extra_props"
+msgstr "propiedades adicionales"
+
+msgid "facet-loading-msg"
+msgstr "procesando, espere por favor"
+
+msgid "facet.filters"
+msgstr "Filtros"
+
+msgid "facetbox"
+msgstr "Caja de facetas"
+
+msgid "facets_created_by-facet"
+msgstr "Faceta \"creada por\""
+
+msgid "facets_created_by-facet_description"
+msgstr "Faceta creada por"
+
+msgid "facets_cw_source-facet"
+msgstr "faceta \"fuente de datos\""
+
+msgid "facets_cw_source-facet_description"
+msgstr "fuente de datos"
+
+msgid "facets_cwfinal-facet"
+msgstr "Faceta \"final\""
+
+msgid "facets_cwfinal-facet_description"
+msgstr "Faceta para las entidades \"finales\""
+
+msgid "facets_datafeed.dataimport.status"
+msgstr "estado de la importación"
+
+msgid "facets_datafeed.dataimport.status_description"
+msgstr "Estado de la importación de datos"
+
+msgid "facets_etype-facet"
+msgstr "Faceta \"es de tipo\""
+
+msgid "facets_etype-facet_description"
+msgstr "Faceta es de tipo"
+
+msgid "facets_has_text-facet"
+msgstr "Faceta \"contiene el texto\""
+
+msgid "facets_has_text-facet_description"
+msgstr "Faceta contiene el texto"
+
+msgid "facets_in_group-facet"
+msgstr "Faceta \"forma parte del grupo\""
+
+msgid "facets_in_group-facet_description"
+msgstr "Faceta en grupo"
+
+msgid "facets_in_state-facet"
+msgstr "Faceta \"en el estado\""
+
+msgid "facets_in_state-facet_description"
+msgstr "Faceta en el estado"
+
+msgid "failed"
+msgstr "fallido"
+
+#, python-format
+msgid "failed to uniquify path (%s, %s)"
+msgstr "No se pudo obtener un dato único (%s, %s)"
+
+msgid "february"
+msgstr "Febrero"
+
+msgid "file tree view"
+msgstr "Arborescencia (archivos)"
+
+msgid "final"
+msgstr "Final"
+
+msgctxt "CWEType"
+msgid "final"
+msgstr "Final"
+
+msgctxt "CWRType"
+msgid "final"
+msgstr "Final"
+
+msgid "first name"
+msgstr "Nombre"
+
+msgid "firstname"
+msgstr "Nombre"
+
+msgctxt "CWUser"
+msgid "firstname"
+msgstr "Nombre"
+
+msgid "foaf"
+msgstr "Amigo de un Amigo, FOAF"
+
+msgid "focus on this selection"
+msgstr "muestre esta selección"
+
+msgid "follow"
+msgstr "Seguir la liga"
+
+#, python-format
+msgid "follow this link for more information on this %s"
+msgstr "Seleccione esta liga para obtener mayor información sobre %s"
+
+msgid "for_user"
+msgstr "Para el usuario"
+
+msgctxt "CWProperty"
+msgid "for_user"
+msgstr "Propiedad del Usuario"
+
+msgid "for_user_object"
+msgstr "Utiliza las propiedades"
+
+msgctxt "CWUser"
+msgid "for_user_object"
+msgstr "Tiene como preferencia"
+
+msgid "formula"
+msgstr ""
+
+msgctxt "CWAttribute"
+msgid "formula"
+msgstr ""
+
+msgid "friday"
+msgstr "Viernes"
+
+msgid "from"
+msgstr "De"
+
+#, python-format
+msgid "from %(date)s"
+msgstr "de %(date)s"
+
+msgid "from_entity"
+msgstr "De la entidad"
+
+msgctxt "CWAttribute"
+msgid "from_entity"
+msgstr "Atributo de la entidad"
+
+msgctxt "CWRelation"
+msgid "from_entity"
+msgstr "Relación de la entidad"
+
+msgid "from_entity_object"
+msgstr "Relación sujeto"
+
+msgctxt "CWEType"
+msgid "from_entity_object"
+msgstr "Entidad de"
+
+msgid "from_interval_start"
+msgstr "De"
+
+msgid "from_state"
+msgstr "Del Estado"
+
+msgctxt "TrInfo"
+msgid "from_state"
+msgstr "Estado de Inicio"
+
+msgid "from_state_object"
+msgstr "Transiciones desde este estado"
+
+msgctxt "State"
+msgid "from_state_object"
+msgstr "Estado de Inicio de"
+
+msgid "full text or RQL query"
+msgstr "Texto de búsqueda o demanda RQL"
+
+msgid "fulltext_container"
+msgstr "Contenedor de texto indexado"
+
+msgctxt "CWRType"
+msgid "fulltext_container"
+msgstr "Objeto a indexar"
+
+msgid "fulltextindexed"
+msgstr "Indexación de texto"
+
+msgctxt "CWAttribute"
+msgid "fulltextindexed"
+msgstr "Texto indexado"
+
+msgid "gc"
+msgstr "fuga de memoria"
+
+msgid "generic plot"
+msgstr "Gráfica Genérica"
+
+msgid "generic relation to link one entity to another"
+msgstr "Relación genérica para ligar entidades"
+
+msgid ""
+"generic relation to specify that an external entity represent the same "
+"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def"
+msgstr ""
+"Relación genérica que indicar que una entidad es idéntica a otro recurso web "
+"(ver http://www.w3.org/TR/owl-ref/#sameAs-def)."
+
+msgid "granted to groups"
+msgstr "Otorgado a los grupos"
+
+#, python-format
+msgid "graphical representation of %(appid)s data model"
+msgstr "Representación gráfica del modelo de datos de %(appid)s"
+
+#, python-format
+msgid ""
+"graphical representation of the %(etype)s entity type from %(appid)s data "
+"model"
+msgstr ""
+"Representación gráfica del modelo de datos para el tipo de entidad %(etype)s "
+"de %(appid)s"
+
+#, python-format
+msgid ""
+"graphical representation of the %(rtype)s relation type from %(appid)s data "
+"model"
+msgstr ""
+"Representación gráfica del modelo de datos para el tipo de relación "
+"%(rtype)s de %(appid)s"
+
+msgid "group in which a user should be to be allowed to pass this transition"
+msgstr "Grupo en el cual el usuario debe estar lograr la transición"
+
+msgid "groups"
+msgstr "Grupos"
+
+msgid "groups allowed to add entities/relations of this type"
+msgstr "grupos autorizados a agregar entidades/relaciones de este tipo"
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr "grupos autorizados a eliminar entidades/relaciones de este tipo"
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr "grupos autorizados a leer entidades/relaciones de este tipo"
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr "grupos autorizados a actualizar entidades/relaciones de este tipo"
+
+msgid "groups grant permissions to the user"
+msgstr "Los grupos otorgan los permisos al usuario"
+
+msgid "guests"
+msgstr "Invitados"
+
+msgid "hCalendar"
+msgstr "hCalendar"
+
+msgid "has_text"
+msgstr "Contiene el texto"
+
+msgid "header-center"
+msgstr "header - centro"
+
+msgid "header-left"
+msgstr "encabezado (izquierdo)"
+
+msgid "header-right"
+msgstr "encabezado (derecho)"
+
+msgid "hide filter form"
+msgstr "Esconder el filtro"
+
+msgid ""
+"how to format date and time in the ui (see this page for format "
+"description)"
+msgstr ""
+"Formato de fecha y hora que se utilizará por defecto en la interfaz (mayor información del formato)"
+
+msgid ""
+"how to format date in the ui (see this page for format "
+"description)"
+msgstr ""
+"Formato de fecha que se utilizará por defecto en la interfaz (mayor información del formato)"
+
+msgid "how to format float numbers in the ui"
+msgstr ""
+"Formato de números flotantes que se utilizará por defecto en la interfaz"
+
+msgid ""
+"how to format time in the ui (see this page for format "
+"description)"
+msgstr ""
+"Formato de hora que se utilizará por defecto en la interfaz (mayor información del formato)"
+
+msgid "i18n_bookmark_url_fqs"
+msgstr "Parámetros"
+
+msgid "i18n_bookmark_url_path"
+msgstr "Ruta"
+
+msgid "i18n_login_popup"
+msgstr "Identificarse"
+
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
+msgid "i18nprevnext_next"
+msgstr "Siguiente"
+
+msgid "i18nprevnext_previous"
+msgstr "Anterior"
+
+msgid "i18nprevnext_up"
+msgstr "Padre"
+
+msgid "iCalendar"
+msgstr "iCalendar"
+
+msgid "id of main template used to render pages"
+msgstr "ID del template principal"
+
+msgid "identical to"
+msgstr "Idéntico a"
+
+msgid "identical_to"
+msgstr "idéntico a"
+
+msgid "identity"
+msgstr "es idéntico a"
+
+msgid "identity_object"
+msgstr "es idéntico a"
+
+msgid ""
+"if full text content of subject/object entity should be added to other side "
+"entity (the container)."
+msgstr ""
+"Si el texto indexado de la entidad sujeto/objeto debe ser agregado a la "
+"entidad al otro extremo de la relación (el contenedor)."
+
+msgid "image"
+msgstr "Imagen"
+
+msgid "in progress"
+msgstr "en progreso"
+
+msgid "in_group"
+msgstr "En el grupo"
+
+msgctxt "CWUser"
+msgid "in_group"
+msgstr "Forma parte del grupo"
+
+msgid "in_group_object"
+msgstr "Miembros"
+
+msgctxt "CWGroup"
+msgid "in_group_object"
+msgstr "Contiene los usuarios"
+
+msgid "in_state"
+msgstr "Estado"
+
+msgid "in_state_object"
+msgstr "Estado de"
+
+msgid "in_synchronization"
+msgstr "sincronizado"
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr "sincronizado"
+
+msgid "incontext"
+msgstr "En el contexto"
+
+msgid "incorrect captcha value"
+msgstr "Valor del Captcha incorrecto"
+
+#, python-format
+msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\""
+msgstr "el valor (%(KEY-value)r) es incorrecto para el tipo \"%(KEY-type)s\""
+
+msgid "index this attribute's value in the plain text index"
+msgstr "Indexar el valor de este atributo en el índice de texto simple"
+
+msgid "indexed"
+msgstr "Indexado"
+
+msgctxt "CWAttribute"
+msgid "indexed"
+msgstr "Indexado"
+
+msgid "indicate the current state of an entity"
+msgstr "Indica el estado actual de una entidad"
+
+msgid ""
+"indicate which state should be used by default when an entity using states "
+"is created"
+msgstr ""
+"Indica cual estado deberá ser utilizado por defecto al crear una entidad"
+
+msgid "indifferent"
+msgstr "indifferente"
+
+msgid "info"
+msgstr "Información del Sistema"
+
+msgid "initial state for this workflow"
+msgstr "Estado inicial para este Workflow"
+
+msgid "initial_state"
+msgstr "Estado inicial"
+
+msgctxt "Workflow"
+msgid "initial_state"
+msgstr "Estado inicial"
+
+msgid "initial_state_object"
+msgstr "Estado inicial de"
+
+msgctxt "State"
+msgid "initial_state_object"
+msgstr "Estado inicial de"
+
+msgid "inlined"
+msgstr "Inlined"
+
+msgctxt "CWRType"
+msgid "inlined"
+msgstr "Inlined"
+
+msgid "instance home"
+msgstr "Repertorio de la Instancia"
+
+msgid "internal entity uri"
+msgstr "Uri Interna"
+
+msgid "internationalizable"
+msgstr "Internacionalizable"
+
+msgctxt "CWAttribute"
+msgid "internationalizable"
+msgstr "Internacionalizable"
+
+#, python-format
+msgid "invalid action %r"
+msgstr "Acción %r invalida"
+
+#, python-format
+msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s"
+msgstr "Valor %(KEY-value)s es incorrecto, seleccione entre %(KEY-choices)s"
+
+msgid "is"
+msgstr "es"
+
+msgid "is object of:"
+msgstr "es objeto de"
+
+msgid "is subject of:"
+msgstr "es sujeto de"
+
+msgid ""
+"is the subject/object entity of the relation composed of the other ? This "
+"implies that when the composite is deleted, composants are also deleted."
+msgstr ""
+"Es la entidad sujeto/objeto de la relación une agregación de el otro ? De "
+"ser así, el destruir el composite destruirá de igual manera sus componentes "
+
+msgid "is this attribute's value translatable"
+msgstr "Es el valor de este atributo traducible ?"
+
+msgid "is this relation equivalent in both direction ?"
+msgstr "Es esta relación equivalente en los ambos sentidos ?"
+
+msgid ""
+"is this relation physically inlined? you should know what you're doing if "
+"you are changing this!"
+msgstr ""
+"Es esta relación estilo INLINED en la base de datos ? Usted debe saber lo "
+"que hace si cambia esto !"
+
+msgid "is_instance_of"
+msgstr "es una instancia de"
+
+msgid "is_instance_of_object"
+msgstr "tiene como instancias"
+
+msgid "is_object"
+msgstr "tiene por instancia"
+
+msgid "january"
+msgstr "Enero"
+
+msgid "json-entities-export-view"
+msgstr "Exportación JSON (de entidades)"
+
+msgid "json-export-view"
+msgstr "Exportación JSON"
+
+msgid "july"
+msgstr "Julio"
+
+msgid "june"
+msgstr "Junio"
+
+msgid "language of the user interface"
+msgstr "Idioma que se utilizará por defecto en la interfaz usuario"
+
+msgid "last connection date"
+msgstr "Ultima conexión"
+
+msgid "last login time"
+msgstr "Ultima conexión"
+
+msgid "last name"
+msgstr "Apellido"
+
+msgid "last usage"
+msgstr "Ultimo uso"
+
+msgid "last_login_time"
+msgstr "Ultima fecha de conexión"
+
+msgctxt "CWUser"
+msgid "last_login_time"
+msgstr "Ultima conexión"
+
+msgid "latest import"
+msgstr "importaciones recientes"
+
+msgid "latest modification time of an entity"
+msgstr "Fecha de la última modificación de una entidad "
+
+msgid "latest synchronization time"
+msgstr "fecha de la última sincronización"
+
+msgid "latest update on"
+msgstr "Actualizado el"
+
+msgid "latest_retrieval"
+msgstr "última sincronización"
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr "fecha de la última sincronización de la fuente"
+
+msgid "left"
+msgstr "izquierda"
+
+msgid "line"
+msgstr "línea"
+
+msgid ""
+"link a property to the user which want this property customization. Unless "
+"you're a site manager, this relation will be handled automatically."
+msgstr ""
+"Liga una propiedad al usuario que desea esta personalización. Salvo que "
+"usted sea un administrador del sistema, esta relación será administrada de "
+"forma automática."
+
+msgid "link a relation definition to its object entity type"
+msgstr "Liga una definición de relación a su tipo de entidad objeto"
+
+msgid "link a relation definition to its relation type"
+msgstr "Liga una definición de relación a su tipo de relación"
+
+msgid "link a relation definition to its subject entity type"
+msgstr "Liga una definición de relación a su tipo de entidad"
+
+msgid "link a state to one or more workflow"
+msgstr "Liga un estado a uno o más Workflow"
+
+msgid "link a transition information to its object"
+msgstr "Liga una transición de informacion hacia los objetos asociados"
+
+msgid "link a transition to one or more workflow"
+msgstr "Liga una transición a uno o más Workflow"
+
+msgid "link a workflow to one or more entity type"
+msgstr "Liga un Workflow a uno a más tipos de entidad"
+
+msgid "list"
+msgstr "Lista"
+
+msgid "log"
+msgstr "log"
+
+msgctxt "CWDataImport"
+msgid "log"
+msgstr "log"
+
+msgid "log in"
+msgstr "Acceder"
+
+msgid "login"
+msgstr "Usuario"
+
+msgctxt "CWUser"
+msgid "login"
+msgstr "Usuario"
+
+msgid "login / password"
+msgstr "usuario / contraseña"
+
+msgid "login or email"
+msgstr "Usuario o dirección de correo"
+
+msgid "login_action"
+msgstr "Ingresa tus datos"
+
+msgid "logout"
+msgstr "Desconectarse"
+
+#, python-format
+msgid "loop in %(rel)s relation (%(eid)s)"
+msgstr "loop detectado en %(rel)s de la entidad #%(eid)s"
+
+msgid "main informations"
+msgstr "Informaciones Generales"
+
+msgid "main_tab"
+msgstr "descripción"
+
+msgid "mainvars"
+msgstr "Variables principales"
+
+msgctxt "RQLExpression"
+msgid "mainvars"
+msgstr "Variables principales"
+
+msgid "manage"
+msgstr "Administración Sistema"
+
+msgid "manage bookmarks"
+msgstr "Gestión de favoritos"
+
+msgid "manage permissions"
+msgstr "Gestión de permisos"
+
+msgid "managers"
+msgstr "Administradores"
+
+msgid "mandatory relation"
+msgstr "Relación obligatoria"
+
+msgid "march"
+msgstr "Marzo"
+
+msgid "match_host"
+msgstr "para el host"
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr "para el host"
+
+msgid "maximum number of characters in short description"
+msgstr "Máximo de caracteres en las descripciones cortas"
+
+msgid "maximum number of entities to display in related combo box"
+msgstr "Máximo de entidades a mostrar en las listas dinámicas"
+
+msgid "maximum number of objects displayed by page of results"
+msgstr "Máximo de elementos mostrados por página de resultados"
+
+msgid "maximum number of related entities to display in the primary view"
+msgstr "Máximo de entidades relacionadas a mostrar en la vista primaria"
+
+msgid "may"
+msgstr "Mayo"
+
+msgid "memory leak debugging"
+msgstr "depuración (debugging) de fuga de memoria"
+
+msgid "message"
+msgstr "mensaje"
+
+#, python-format
+msgid "missing parameters for entity %s"
+msgstr "Parámetros faltantes a la entidad %s"
+
+msgid "modification"
+msgstr "modificación"
+
+msgid "modification_date"
+msgstr "Fecha de modificación"
+
+msgid "modify"
+msgstr "Modificar"
+
+msgid "monday"
+msgstr "Lunes"
+
+msgid "more actions"
+msgstr "Más acciones"
+
+msgid "more info about this workflow"
+msgstr "Más información acerca de este workflow"
+
+msgid "multiple edit"
+msgstr "Edición multiple"
+
+msgid "my custom search"
+msgstr "Mi búsqueda personalizada"
+
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "BaseTransition"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWCache"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWComputedRType"
+msgid "name"
+msgstr ""
+
+msgctxt "CWConstraintType"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWEType"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWGroup"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWRType"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "CWSource"
+msgid "name"
+msgstr "nombre"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "name"
+msgstr "nombre"
+
+msgctxt "State"
+msgid "name"
+msgstr "nombre"
+
+msgctxt "Transition"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "Workflow"
+msgid "name"
+msgstr "Nombre"
+
+msgctxt "WorkflowTransition"
+msgid "name"
+msgstr "Nombre"
+
+msgid "name of the cache"
+msgstr "Nombre del Caché"
+
+msgid ""
+"name of the main variables which should be used in the selection if "
+"necessary (comma separated)"
+msgstr ""
+"Nombre de las variables principales que deberían ser utilizadas en la "
+"selección de ser necesario (separarlas con comas)"
+
+msgid "name of the source"
+msgstr "nombre de la fuente"
+
+msgid "navbottom"
+msgstr "Pie de página"
+
+msgid "navcontentbottom"
+msgstr "Pie de página del contenido principal"
+
+msgid "navcontenttop"
+msgstr "Encabezado"
+
+msgid "navigation"
+msgstr "Navegación"
+
+msgid "navigation.combobox-limit"
+msgstr "ComboBox"
+
+msgid "navigation.page-size"
+msgstr "Paginación"
+
+msgid "navigation.related-limit"
+msgstr "Entidades relacionadas"
+
+msgid "navigation.short-line-size"
+msgstr "Descripción corta"
+
+msgid "navtop"
+msgstr "Encabezado del contenido principal"
+
+msgid "new"
+msgstr "Nuevo"
+
+msgid "next page"
+msgstr "página siguiente"
+
+msgid "next_results"
+msgstr "Siguientes resultados"
+
+msgid "no"
+msgstr "No"
+
+msgid "no content next link"
+msgstr "no hay liga siguiente"
+
+msgid "no content prev link"
+msgstr "no existe liga previa"
+
+msgid "no edited fields specified"
+msgstr "ningún campo por editar especificado"
+
+msgid "no log to display"
+msgstr "no arrojó elementos para mostrar"
+
+msgid "no related entity"
+msgstr "No posee entidad asociada"
+
+msgid "no repository sessions found"
+msgstr "Ninguna sesión encontrada"
+
+msgid "no selected entities"
+msgstr "No hay entidades seleccionadas"
+
+#, python-format
+msgid "no such entity type %s"
+msgstr "El tipo de entidad '%s' no existe"
+
+msgid "no version information"
+msgstr "No existe la información de version"
+
+msgid "no web sessions found"
+msgstr "Ninguna sesión web encontrada"
+
+msgid "normal"
+msgstr "Normal"
+
+msgid "not authorized"
+msgstr "No autorizado"
+
+msgid "not selected"
+msgstr "No seleccionado"
+
+msgid "november"
+msgstr "Noviembre"
+
+msgid "num. users"
+msgstr "Número de Usuarios"
+
+msgid "object"
+msgstr "Objeto"
+
+msgid "object type"
+msgstr "Tipo de Objeto"
+
+msgid "october"
+msgstr "Octubre"
+
+msgid "one month"
+msgstr "Un mes"
+
+msgid "one week"
+msgstr "Una semana"
+
+msgid "oneline"
+msgstr "En una línea"
+
+msgid "only select queries are authorized"
+msgstr "Solo están permitidas consultas de lectura"
+
+msgid "open all"
+msgstr "Abrir todos"
+
+msgid "opened sessions"
+msgstr "Sesiones abiertas"
+
+msgid "opened web sessions"
+msgstr "Sesiones Web abiertas"
+
+msgid "options"
+msgstr "Opciones"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr "opciones"
+
+msgid "order"
+msgstr "Orden"
+
+msgid "ordernum"
+msgstr "Orden"
+
+msgctxt "CWAttribute"
+msgid "ordernum"
+msgstr "Número de Orden"
+
+msgctxt "CWRelation"
+msgid "ordernum"
+msgstr "Número de Orden"
+
+msgid "owl"
+msgstr "OWL"
+
+msgid "owlabox"
+msgstr "OWLabox"
+
+msgid "owned_by"
+msgstr "Pertenece a"
+
+msgid "owned_by_object"
+msgstr "Pertenece al objeto"
+
+msgid "owners"
+msgstr "Proprietarios"
+
+msgid "ownerships have been changed"
+msgstr "Derechos de propiedad modificados"
+
+msgid "pageid-not-found"
+msgstr "Página no encontrada."
+
+msgid "parser"
+msgstr "analizador (parser)"
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr "analizador (parser)"
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+"analizador (parser) que sirve para extraer entidades y relaciones del "
+"contenido recuperado de las URLs."
+
+msgid "password"
+msgstr "Contraseña"
+
+msgid "password and confirmation don't match"
+msgstr "Su contraseña y confirmación no concuerdan"
+
+msgid "path"
+msgstr "Ruta"
+
+msgctxt "Bookmark"
+msgid "path"
+msgstr "Ruta"
+
+msgid "permalink to this message"
+msgstr "liga permanente a este mensaje"
+
+msgid "permission"
+msgstr "Permiso"
+
+msgid "permissions"
+msgstr "Permisos"
+
+msgid "pick existing bookmarks"
+msgstr "Seleccionar favoritos existentes"
+
+msgid "pkey"
+msgstr "Clave"
+
+msgctxt "CWProperty"
+msgid "pkey"
+msgstr "Código de la Propiedad"
+
+msgid "please correct errors below"
+msgstr "Por favor corregir los errores señalados en la parte inferior"
+
+msgid "please correct the following errors:"
+msgstr "Por favor corregir los siguientes errores:"
+
+msgid "possible views"
+msgstr "Vistas posibles"
+
+msgid "prefered_form"
+msgstr "Forma preferida"
+
+msgctxt "EmailAddress"
+msgid "prefered_form"
+msgstr "Email principal"
+
+msgid "prefered_form_object"
+msgstr "Formato preferido sobre"
+
+msgctxt "EmailAddress"
+msgid "prefered_form_object"
+msgstr "Email principal de"
+
+msgid "preferences"
+msgstr "Preferencias"
+
+msgid "previous page"
+msgstr "página anterior"
+
+msgid "previous_results"
+msgstr "Resultados Anteriores"
+
+msgid "primary"
+msgstr "Primaria"
+
+msgid "primary_email"
+msgstr "Dirección principal de correo electrónico"
+
+msgctxt "CWUser"
+msgid "primary_email"
+msgstr "Dirección principal de correo electrónico"
+
+msgid "primary_email_object"
+msgstr "Dirección de email principal (objeto)"
+
+msgctxt "EmailAddress"
+msgid "primary_email_object"
+msgstr "Dirección principal de correo electrónico de"
+
+msgid "profile"
+msgstr "perfil"
+
+msgid "rdef-description"
+msgstr "Descripción"
+
+msgid "rdef-permissions"
+msgstr "Permisos"
+
+msgid "rdf export"
+msgstr "Exportación RDF"
+
+msgid "read"
+msgstr "Lectura"
+
+msgid "read_permission"
+msgstr "Permiso de lectura"
+
+msgctxt "CWAttribute"
+msgid "read_permission"
+msgstr "Permiso de Lectura"
+
+msgctxt "CWComputedRType"
+msgid "read_permission"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "read_permission"
+msgstr "Permiso de Lectura"
+
+msgctxt "CWRelation"
+msgid "read_permission"
+msgstr "Permiso de Lectura"
+
+msgid "read_permission_object"
+msgstr "Tiene acceso de lectura a"
+
+msgctxt "CWGroup"
+msgid "read_permission_object"
+msgstr "Puede leer"
+
+msgctxt "RQLExpression"
+msgid "read_permission_object"
+msgstr "Puede leer"
+
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+"expresión regular de los nombres de hosts a los cuales esta configuración "
+"aplica"
+
+msgid "registry"
+msgstr "Registro"
+
+msgid "related entity has no state"
+msgstr "La entidad relacionada no posee Estado"
+
+msgid "related entity has no workflow set"
+msgstr "La entidad relacionada no posee Workflow definido"
+
+msgid "relation"
+msgstr "relación"
+
+#, python-format
+msgid "relation %(relname)s of %(ent)s"
+msgstr "relación %(relname)s de %(ent)s"
+
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+"la relación %(rtype)s con %(etype)s como %(role)s es aceptada pero ningún "
+"tipo target es aceptado"
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+"la relación %(type)s con %(etype)s como %(role)s y tipo objetivo %(target)s "
+"es obligatoria pero no mantenida"
+
+#, python-format
+msgid ""
+"relation %s is supported but none of its definitions matches supported "
+"entities"
+msgstr ""
+"la relación %s es aceptada pero ninguna de sus definiciones corresponden a "
+"los tipos de entidades aceptadas"
+
+msgid "relation add"
+msgstr "Agregar Relación"
+
+msgid "relation removal"
+msgstr "Eliminar Relación"
+
+msgid "relation_type"
+msgstr "Tipo de Relación"
+
+msgctxt "CWAttribute"
+msgid "relation_type"
+msgstr "Tipo de Relación"
+
+msgctxt "CWRelation"
+msgid "relation_type"
+msgstr "Tipo de Relación"
+
+msgid "relation_type_object"
+msgstr "Definición de Relaciones"
+
+msgctxt "CWRType"
+msgid "relation_type_object"
+msgstr "Definición de Relaciones"
+
+msgid "relations"
+msgstr "relaciones"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "relations"
+msgstr "relaciones"
+
+msgid "relations deleted"
+msgstr "Relaciones Eliminadas"
+
+msgid "relations_object"
+msgstr "relaciones de"
+
+msgctxt "CWRType"
+msgid "relations_object"
+msgstr "relaciones de"
+
+msgid "relative url of the bookmarked page"
+msgstr "Url relativa de la página"
+
+msgid "remove-inlined-entity-form"
+msgstr "Eliminar"
+
+msgid "require_group"
+msgstr "Requiere el grupo"
+
+msgctxt "BaseTransition"
+msgid "require_group"
+msgstr "Restringida al Grupo"
+
+msgctxt "Transition"
+msgid "require_group"
+msgstr "Restringida al Grupo"
+
+msgctxt "WorkflowTransition"
+msgid "require_group"
+msgstr "Restringida al Grupo"
+
+msgid "require_group_object"
+msgstr "Posee derechos sobre"
+
+msgctxt "CWGroup"
+msgid "require_group_object"
+msgstr "Posee derechos sobre"
+
+msgid "required"
+msgstr "Requerido"
+
+msgid "required attribute"
+msgstr "Atributo requerido"
+
+msgid "required field"
+msgstr "Campo requerido"
+
+msgid "resources usage"
+msgstr "Recursos utilizados"
+
+msgid ""
+"restriction part of a rql query. For entity rql expression, X and U are "
+"predefined respectivly to the current object and to the request user. For "
+"relation rql expression, S, O and U are predefined respectivly to the "
+"current relation'subject, object and to the request user. "
+msgstr ""
+"Parte restrictiva de una consulta RQL. En una expresión ligada a una "
+"entidad, X y U son respectivamente asignadas a la Entidad y el Usuario en "
+"curso.En una expresión ligada a una relación, S, O y U son respectivamente "
+"asignados al Sujeto/Objeto de la relación y al Usuario actual."
+
+msgid "revert changes"
+msgstr "Anular modificación"
+
+msgid "right"
+msgstr "Derecha"
+
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr "Expresión RQL que permite AGREGAR entidades/relaciones de este tipo"
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr "Expresión RQL que permite ELIMINAR entidades/relaciones de este tipo"
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr "Expresión RQL que permite LEER entidades/relaciones de este tipo"
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr "Expresión RQL que permite ACTUALIZAR entidades/relaciones de este tipo"
+
+msgid "rql expressions"
+msgstr "Expresiones RQL"
+
+msgid "rss export"
+msgstr "Exportación RSS"
+
+msgid "rule"
+msgstr ""
+
+msgctxt "CWComputedRType"
+msgid "rule"
+msgstr ""
+
+msgid "same_as"
+msgstr "Idéntico a"
+
+msgid "sample format"
+msgstr "Ejemplo"
+
+msgid "saturday"
+msgstr "Sábado"
+
+msgid "schema-diagram"
+msgstr "Gráfica"
+
+msgid "schema-entity-types"
+msgstr "Entidades"
+
+msgid "schema-relation-types"
+msgstr "Relaciones"
+
+msgid "search"
+msgstr "Buscar"
+
+msgid "search for association"
+msgstr "Búsqueda por asociación"
+
+msgid "searching for"
+msgstr "Buscando"
+
+msgid "security"
+msgstr "Seguridad"
+
+msgid "see more"
+msgstr "ver más"
+
+msgid "see them all"
+msgstr "Ver todos"
+
+msgid "see_also"
+msgstr "Ver además"
+
+msgid "select"
+msgstr "Seleccionar"
+
+msgid "select a"
+msgstr "Seleccione un"
+
+msgid "select a key first"
+msgstr "Seleccione una clave"
+
+msgid "select a relation"
+msgstr "Seleccione una relación"
+
+msgid "select this entity"
+msgstr "Seleccionar esta entidad"
+
+msgid "selected"
+msgstr "Seleccionado"
+
+msgid "semantic description of this attribute"
+msgstr "Descripción semántica de este atributo"
+
+msgid "semantic description of this entity type"
+msgstr "Descripción semántica de este tipo de entidad"
+
+msgid "semantic description of this relation"
+msgstr "Descripción semántica de esta relación"
+
+msgid "semantic description of this relation type"
+msgstr "Descripción semántica de este tipo de relación"
+
+msgid "semantic description of this state"
+msgstr "Descripción semántica de este estado"
+
+msgid "semantic description of this transition"
+msgstr "Descripcion semántica de esta transición"
+
+msgid "semantic description of this workflow"
+msgstr "Descripcion semántica de este Workflow"
+
+msgid "september"
+msgstr "Septiembre"
+
+msgid "server information"
+msgstr "Información del servidor"
+
+msgid "severity"
+msgstr "severidad"
+
+msgid ""
+"should html fields being edited using fckeditor (a HTML WYSIWYG editor). "
+"You should also select text/html as default text format to actually get "
+"fckeditor."
+msgstr ""
+"Indica si los campos de tipo texto deberán ser editados usando fckeditor "
+"(un\n"
+"editor HTML WYSIWYG). Deberá también elegir text/html\n"
+"como formato de texto por defecto para poder utilizar fckeditor."
+
+#, python-format
+msgid "show %s results"
+msgstr "Mostrar %s resultados"
+
+msgid "show advanced fields"
+msgstr "Mostrar campos avanzados"
+
+msgid "show filter form"
+msgstr "Mostrar el Filtro"
+
+msgid "site configuration"
+msgstr "Configuración Sistema"
+
+msgid "site documentation"
+msgstr "Documentación Sistema"
+
+msgid "site title"
+msgstr "Nombre del Sistema"
+
+msgid "site-wide property can't be set for user"
+msgstr "Una propiedad específica al Sistema no puede ser propia al usuario"
+
+msgid "some later transaction(s) touch entity, undo them first"
+msgstr ""
+"Las transacciones más recientes modificaron esta entidad, anúlelas primero"
+
+msgid "some relations violate a unicity constraint"
+msgstr "algunas relaciones no respetan la restricción de unicidad"
+
+msgid "sorry, the server is unable to handle this query"
+msgstr "Lo sentimos, el servidor no puede manejar esta consulta"
+
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+"configuración de fuentes. Una clave=valor por línea, las claves permitidas "
+"dependen del tipo de la fuente."
+
+msgid "sparql xml"
+msgstr "XML Sparql"
+
+msgid "special transition allowing to go through a sub-workflow"
+msgstr "Transición especial que permite ir en un Sub-Workflow"
+
+msgid "specializes"
+msgstr "Deriva de"
+
+msgctxt "CWEType"
+msgid "specializes"
+msgstr "Especializa"
+
+msgid "specializes_object"
+msgstr "Especializado por"
+
+msgctxt "CWEType"
+msgid "specializes_object"
+msgstr "Especializado por"
+
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr "especificar %s es obligatorio"
+
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+"horario de inicio de la sincronización en curso, o NULL cuando no existe "
+"sincronización en curso"
+
+msgid "start_timestamp"
+msgstr "horario inicio"
+
+msgctxt "CWDataImport"
+msgid "start_timestamp"
+msgstr "horario inicio"
+
+msgid "startup views"
+msgstr "Vistas de inicio"
+
+msgid "startupview"
+msgstr "Vistas de Inicio"
+
+msgid "state"
+msgstr "Estado"
+
+msgid "state and transition don't belong the the same workflow"
+msgstr "El Estado y la Transición no pertenecen al mismo Workflow"
+
+msgid "state doesn't apply to this entity's type"
+msgstr "Este Estado no aplica a este tipo de Entidad"
+
+msgid "state doesn't belong to entity's current workflow"
+msgstr "El Estado no pertenece al Workflow actual de la Entidad"
+
+msgid "state doesn't belong to entity's workflow"
+msgstr "El Estado no pertenece al Workflow de la Entidad"
+
+msgid ""
+"state doesn't belong to entity's workflow. You may want to set a custom "
+"workflow for this entity first."
+msgstr ""
+"El Estado no pertenece al Workflow Actual de la Entidad. Usted deseaquizás "
+"especificar que esta entidad debe utilizar este Workflow"
+
+msgid "state doesn't belong to this workflow"
+msgstr "El Estado no pertenece a este Workflow"
+
+msgid "state_of"
+msgstr "Estado de"
+
+msgctxt "State"
+msgid "state_of"
+msgstr "Estado de"
+
+msgid "state_of_object"
+msgstr "Tiene por Estado"
+
+msgctxt "Workflow"
+msgid "state_of_object"
+msgstr "Tiene por Estado"
+
+msgid "status"
+msgstr "estado"
+
+msgctxt "CWDataImport"
+msgid "status"
+msgstr "estado"
+
+msgid "status change"
+msgstr "Cambio de Estatus"
+
+msgid "status changed"
+msgstr "Estatus cambiado"
+
+#, python-format
+msgid "status will change from %(st1)s to %(st2)s"
+msgstr "El estatus cambiará de %(st1)s a %(st2)s"
+
+msgid "subject"
+msgstr "Sujeto"
+
+msgid "subject type"
+msgstr "Tipo del sujeto"
+
+msgid "subject/object cardinality"
+msgstr "Cardinalidad Sujeto/Objeto"
+
+msgid "subworkflow"
+msgstr "Sub-Workflow"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow"
+msgstr "Sub-Workflow"
+
+msgid ""
+"subworkflow isn't a workflow for the same types as the transition's workflow"
+msgstr ""
+"Le Sub-Workflow no se aplica a los mismos tipos que el Workflow de esta "
+"transición"
+
+msgid "subworkflow state"
+msgstr "Estado de Sub-Workflow"
+
+msgid "subworkflow_exit"
+msgstr "Salida del Sub-Workflow"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow_exit"
+msgstr "Salida del Sub-Workflow"
+
+msgid "subworkflow_exit_object"
+msgstr "Salida Sub-Workflow de"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_exit_object"
+msgstr "Salida Sub-Workflow de"
+
+msgid "subworkflow_object"
+msgstr "Sub-Workflow de"
+
+msgctxt "Workflow"
+msgid "subworkflow_object"
+msgstr "Sub-Workflow de"
+
+msgid "subworkflow_state"
+msgstr "Estado de Sub-Workflow"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_state"
+msgstr "Estado de Sub-Workflow"
+
+msgid "subworkflow_state_object"
+msgstr "Estado de Salida de"
+
+msgctxt "State"
+msgid "subworkflow_state_object"
+msgstr "Estado de Salida de"
+
+msgid "success"
+msgstr "éxito"
+
+msgid "sunday"
+msgstr "Domingo"
+
+msgid "surname"
+msgstr "Apellido"
+
+msgctxt "CWUser"
+msgid "surname"
+msgstr "Apellido"
+
+msgid "symmetric"
+msgstr "Simétrico"
+
+msgctxt "CWRType"
+msgid "symmetric"
+msgstr "Simétrico"
+
+msgid "synchronization-interval must be greater than 1 minute"
+msgstr "synchronization-interval debe ser mayor a 1 minuto"
+
+msgid "synchronize"
+msgstr ""
+
+msgid "table"
+msgstr "Tabla"
+
+msgid "tablefilter"
+msgstr "Tablero de Filtrado"
+
+msgid "text"
+msgstr "Texto"
+
+msgid "text/cubicweb-page-template"
+msgstr "Usar Page Templates"
+
+msgid "text/html"
+msgstr "Usar HTML"
+
+msgid "text/markdown"
+msgstr ""
+
+msgid "text/plain"
+msgstr "Usar Texto simple"
+
+msgid "text/rest"
+msgstr "Texto en REST"
+
+msgid "the URI of the object"
+msgstr "El URI del Objeto"
+
+msgid "the prefered email"
+msgstr "Dirección principal de email"
+
+msgid "the system source has its configuration stored on the file-system"
+msgstr ""
+"el sistema fuente tiene su configuración almacenada en el sistema de archivos"
+
+msgid "there is no next page"
+msgstr "no existe página siguiente"
+
+msgid "there is no previous page"
+msgstr "no existe página anterior"
+
+#, python-format
+msgid "there is no transaction #%s"
+msgstr "no existe la transacción #%s"
+
+msgid "this action is not reversible!"
+msgstr "Esta acción es irreversible!."
+
+msgid "this entity is currently owned by"
+msgstr "Esta Entidad es propiedad de"
+
+msgid "this parser doesn't use a mapping"
+msgstr "este analizador (parser) no utiliza mapeo"
+
+msgid "this resource does not exist"
+msgstr "Este recurso no existe"
+
+msgid "this source doesn't use a mapping"
+msgstr "esta fuente no utiliza mapeo"
+
+msgid "thursday"
+msgstr "Jueves"
+
+msgid "timestamp"
+msgstr "Fecha"
+
+msgctxt "CWCache"
+msgid "timestamp"
+msgstr "Válido desde"
+
+msgid "timetable"
+msgstr "Tablero de tiempos"
+
+msgid "title"
+msgstr "Nombre"
+
+msgctxt "Bookmark"
+msgid "title"
+msgstr "Nombre"
+
+msgid "to"
+msgstr "a"
+
+#, python-format
+msgid "to %(date)s"
+msgstr "a %(date)s"
+
+msgid "to associate with"
+msgstr "Para asociar con"
+
+msgid "to_entity"
+msgstr "Hacia la entidad"
+
+msgctxt "CWAttribute"
+msgid "to_entity"
+msgstr "Por la entidad"
+
+msgctxt "CWRelation"
+msgid "to_entity"
+msgstr "Por la entidad"
+
+msgid "to_entity_object"
+msgstr "Objeto de la Relación"
+
+msgctxt "CWEType"
+msgid "to_entity_object"
+msgstr "Objeto de la Relación"
+
+msgid "to_interval_end"
+msgstr "a"
+
+msgid "to_state"
+msgstr "Hacia el Estado"
+
+msgctxt "TrInfo"
+msgid "to_state"
+msgstr "Hacia el Estado"
+
+msgid "to_state_object"
+msgstr "Transición hacia este Estado"
+
+msgctxt "State"
+msgid "to_state_object"
+msgstr "Transición hacia este Estado"
+
+msgid "toggle check boxes"
+msgstr "Cambiar valor"
+
+msgid "tr_count"
+msgstr "n° de transición"
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr "n° de transición"
+
+msgid "transaction undone"
+msgstr "transacción anulada"
+
+#, python-format
+msgid "transition %(tr)s isn't allowed from %(st)s"
+msgstr "La transición %(tr)s no esta permitida desde el Estado %(st)s"
+
+msgid "transition doesn't belong to entity's workflow"
+msgstr "La transición no pertenece al Workflow de la Entidad"
+
+msgid "transition isn't allowed"
+msgstr "La transición no esta permitida"
+
+msgid "transition may not be fired"
+msgstr "La transición no puede ser lanzada"
+
+msgid "transition_of"
+msgstr "Transición de"
+
+msgctxt "BaseTransition"
+msgid "transition_of"
+msgstr "Transición de"
+
+msgctxt "Transition"
+msgid "transition_of"
+msgstr "Transición de"
+
+msgctxt "WorkflowTransition"
+msgid "transition_of"
+msgstr "Transición de"
+
+msgid "transition_of_object"
+msgstr "Utiliza las transiciones"
+
+msgctxt "Workflow"
+msgid "transition_of_object"
+msgstr "Utiliza las transiciones"
+
+msgid "tree view"
+msgstr "Vista Jerárquica"
+
+msgid "tuesday"
+msgstr "Martes"
+
+msgid "type"
+msgstr "Tipo"
+
+msgctxt "BaseTransition"
+msgid "type"
+msgstr "Tipo"
+
+msgctxt "CWSource"
+msgid "type"
+msgstr "tipo"
+
+msgctxt "Transition"
+msgid "type"
+msgstr "Tipo"
+
+msgctxt "WorkflowTransition"
+msgid "type"
+msgstr "Tipo"
+
+msgid "type here a sparql query"
+msgstr "Escriba aquí su consulta en Sparql"
+
+msgid "type of the source"
+msgstr "tipo de la fuente"
+
+msgid "ui"
+msgstr "Interfaz Genérica"
+
+msgid "ui.date-format"
+msgstr "Formato de Fecha"
+
+msgid "ui.datetime-format"
+msgstr "Formato de Fecha y Hora"
+
+msgid "ui.default-text-format"
+msgstr "Formato de texto"
+
+msgid "ui.encoding"
+msgstr "Codificación"
+
+msgid "ui.fckeditor"
+msgstr "Editor de texto FCK"
+
+msgid "ui.float-format"
+msgstr "Números flotantes"
+
+msgid "ui.language"
+msgstr "Lenguaje"
+
+msgid "ui.main-template"
+msgstr "Plantilla Principal"
+
+msgid "ui.site-title"
+msgstr "Nombre del Sistema"
+
+msgid "ui.time-format"
+msgstr "Formato de hora"
+
+msgid "unable to check captcha, please try again"
+msgstr "Imposible de verificar el Captcha, inténtelo otra vez"
+
+msgid "unaccessible"
+msgstr "Inaccesible"
+
+msgid "unauthorized value"
+msgstr "Valor no permitido"
+
+msgid "undefined user"
+msgstr "usuario indefinido"
+
+msgid "undo"
+msgstr "Anular"
+
+msgid "unique identifier used to connect to the application"
+msgstr "Identificador único utilizado para conectarse al Sistema"
+
+msgid "unknown external entity"
+msgstr "Entidad externa desconocida"
+
+#, python-format
+msgid "unknown options %s"
+msgstr "opciones desconocidas: %s"
+
+#, python-format
+msgid "unknown property key %s"
+msgstr "Clave de Propiedad desconocida: %s"
+
+msgid "unknown vocabulary:"
+msgstr "Vocabulario desconocido: "
+
+msgid "unsupported protocol"
+msgstr "protocolo no soportado"
+
+msgid "upassword"
+msgstr "Contraseña"
+
+msgctxt "CWUser"
+msgid "upassword"
+msgstr "Contraseña"
+
+msgid "update"
+msgstr "Modificación"
+
+msgid "update_permission"
+msgstr "Puede ser modificado por"
+
+msgctxt "CWAttribute"
+msgid "update_permission"
+msgstr "Puede ser modificado por"
+
+msgctxt "CWEType"
+msgid "update_permission"
+msgstr "Puede ser modificado por"
+
+msgid "update_permission_object"
+msgstr "Tiene permiso de modificar"
+
+msgctxt "CWGroup"
+msgid "update_permission_object"
+msgstr "Puede modificar"
+
+msgctxt "RQLExpression"
+msgid "update_permission_object"
+msgstr "Puede modificar"
+
+msgid "update_relation"
+msgstr "Modificar"
+
+msgid "updated"
+msgstr "Actualizado"
+
+#, python-format
+msgid "updated %(etype)s #%(eid)s (%(title)s)"
+msgstr "actualización de la entidad %(etype)s #%(eid)s (%(title)s)"
+
+msgid "uri"
+msgstr "URI"
+
+msgctxt "ExternalUri"
+msgid "uri"
+msgstr "URI"
+
+msgid "url"
+msgstr "url"
+
+msgctxt "CWSource"
+msgid "url"
+msgstr "url"
+
+msgid ""
+"use to define a transition from one or multiple states to a destination "
+"states in workflow's definitions. Transition without destination state will "
+"go back to the state from which we arrived to the current state."
+msgstr ""
+"Se utiliza en una definición de procesos para agregar una transición desde "
+"uno o varios estados hacia un estado destino. Una transición sin Estado "
+"destino regresará al Estado anterior del Estado actual"
+
+msgid "use_email"
+msgstr "Correo electrónico"
+
+msgctxt "CWUser"
+msgid "use_email"
+msgstr "Usa el Correo Electrónico"
+
+msgid "use_email_object"
+msgstr "Email utilizado por"
+
+msgctxt "EmailAddress"
+msgid "use_email_object"
+msgstr "Utilizado por"
+
+msgid ""
+"used for cubicweb configuration. Once a property has been created you can't "
+"change the key."
+msgstr ""
+"Se utiliza para la configuración de CubicWeb. Una vez que la propiedad ha "
+"sido creada no puede cambiar la clave"
+
+msgid ""
+"used to associate simple states to an entity type and/or to define workflows"
+msgstr ""
+"Se utiliza para asociar estados simples a un tipo de entidad y/o para "
+"definir Workflows"
+
+msgid "user"
+msgstr "Usuario"
+
+#, python-format
+msgid ""
+"user %s has made the following change(s):\n"
+"\n"
+msgstr ""
+"El usuario %s ha efectuado los siguentes cambios:\n"
+"\n"
+
+msgid "user interface encoding"
+msgstr "Encoding de la interfaz de usuario"
+
+msgid "user preferences"
+msgstr "Preferencias"
+
+msgid "user's email account"
+msgstr "email del usuario"
+
+msgid "users"
+msgstr "Usuarios"
+
+msgid "users and groups"
+msgstr "usuarios y grupos"
+
+msgid "users using this bookmark"
+msgstr "Usuarios utilizando este Favorito"
+
+msgid "validate modifications on selected items"
+msgstr "Valida modificaciones sobre elementos seleccionados"
+
+msgid "validating..."
+msgstr "Validando ..."
+
+msgid "value"
+msgstr "Valor"
+
+msgctxt "CWConstraint"
+msgid "value"
+msgstr "Valor"
+
+msgctxt "CWProperty"
+msgid "value"
+msgstr "Vampr"
+
+#, python-format
+msgid "value %(KEY-value)s must be < %(KEY-boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(KEY-value)s must be <= %(KEY-boundary)s"
+msgstr "el valor %(KEY-value)s debe ser <= %(KEY-boundary)s"
+
+#, python-format
+msgid "value %(KEY-value)s must be > %(KEY-boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(KEY-value)s must be >= %(KEY-boundary)s"
+msgstr "el valor %(KEY-value)s debe ser >= %(KEY-boundary)s"
+
+msgid "value associated to this key is not editable manually"
+msgstr "El valor asociado a este elemento no es editable manualmente"
+
+#, python-format
+msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s"
+msgstr "el valor máximo es %(KEY-max)s y encontramos %(KEY-size)s"
+
+#, python-format
+msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s"
+msgstr "el valor mínimo debe ser %(KEY-min)s y encontramos %(KEY-size)s"
+
+msgid "vcard"
+msgstr "vcard"
+
+msgid "versions configuration"
+msgstr "Configuración de Versión"
+
+msgid "view"
+msgstr "Ver"
+
+msgid "view all"
+msgstr "Ver todos"
+
+msgid "view detail for this entity"
+msgstr "Ver a detalle esta entidad"
+
+msgid "view history"
+msgstr "Ver histórico"
+
+msgid "view identifier"
+msgstr "Identificador"
+
+msgid "view title"
+msgstr "Nombre"
+
+msgid "view workflow"
+msgstr "Ver Workflow"
+
+msgid "view_index"
+msgstr "Inicio"
+
+msgid "visible"
+msgstr "Visible"
+
+msgid "warning"
+msgstr "atención"
+
+msgid "we are not yet ready to handle this query"
+msgstr "Aún no podemos manejar este tipo de consulta Sparql"
+
+msgid "wednesday"
+msgstr "Miércoles"
+
+#, python-format
+msgid "welcome %s!"
+msgstr "Bienvenido %s."
+
+msgid "wf_info_for"
+msgstr "Histórico de"
+
+msgid "wf_info_for_object"
+msgstr "Histórico de transiciones"
+
+msgid "wf_tab_info"
+msgstr "Descripción"
+
+msgid "wfgraph"
+msgstr "Gráfica del Workflow"
+
+msgid ""
+"when multiple addresses are equivalent (such as python-projects@logilab.org "
+"and python-projects@lists.logilab.org), set this to indicate which is the "
+"preferred form."
+msgstr ""
+"Cuando varias direcciones email son equivalentes (como python-"
+"projects@logilab.org y python-projects@lists.logilab.org), aquí se indica "
+"cual es la forma preferida."
+
+msgid "workflow"
+msgstr "Workflow"
+
+#, python-format
+msgid "workflow changed to \"%s\""
+msgstr "Workflow cambiado a \"%s\""
+
+msgid "workflow has no initial state"
+msgstr "El Workflow no posee Estado Inicial"
+
+msgid "workflow history item"
+msgstr "Elemento histórico del Workflow"
+
+msgid "workflow isn't a workflow for this type"
+msgstr "El Workflow no se aplica a este Tipo de Entidad"
+
+msgid "workflow to which this state belongs"
+msgstr "Workflow al cual pertenece este estado"
+
+msgid "workflow to which this transition belongs"
+msgstr "Workflow al cual pertenece esta transición"
+
+msgid "workflow_of"
+msgstr "Workflow de"
+
+msgctxt "Workflow"
+msgid "workflow_of"
+msgstr "Workflow de"
+
+msgid "workflow_of_object"
+msgstr "Utiliza el Workflow"
+
+msgctxt "CWEType"
+msgid "workflow_of_object"
+msgstr "Utiliza el Workflow"
+
+#, python-format
+msgid "wrong query parameter line %s"
+msgstr "Parámetro erróneo de consulta línea %s"
+
+msgid "xbel export"
+msgstr "Exportación XBEL"
+
+msgid "xml export"
+msgstr "Exportar XML"
+
+msgid "xml export (entities)"
+msgstr "Exportación XML (entidades)"
+
+msgid "yes"
+msgstr "Sí"
+
+msgid "you have been logged out"
+msgstr "Ha terminado la sesión"
+
+msgid "you should probably delete that property"
+msgstr "probablamente debería suprimir esta propriedad"
+
+#~ msgid "%s relation should not be in mapped"
+#~ msgstr "la relación %s no debería estar mapeada"
+
+#~ msgid "Any"
+#~ msgstr "Cualquiera"
+
+#~ msgid "Browse by category"
+#~ msgstr "Busca por categoría"
+
+#~ msgid "No account? Try public access at %s"
+#~ msgstr "No esta registrado? Use el acceso público en %s"
+
+#~ msgid "anonymous"
+#~ msgstr "anónimo"
+
+#~ msgid "attribute/relation can't be mapped, only entity and relation types"
+#~ msgstr ""
+#~ "los atributos y las relaciones no pueden ser mapeados, solamente los "
+#~ "tipos de entidad y de relación"
+
+#~ msgid "can't connect to source %s, some data may be missing"
+#~ msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar"
+
+#~ msgid "can't mix dontcross and maycross options"
+#~ msgstr "no puede mezclar las opciones dontcross y maycross"
+
+#~ msgid "can't mix dontcross and write options"
+#~ msgstr "no puede mezclar las opciones dontcross y write"
+
+#~ msgid "components_etypenavigation"
+#~ msgstr "Filtar por tipo"
+
+#~ msgid "components_etypenavigation_description"
+#~ msgstr "Permite filtrar por tipo de entidad los resultados de una búsqueda"
+
+#~ msgid "edit canceled"
+#~ msgstr "Edición cancelada"
+
+#~ msgid "error while querying source %s, some data may be missing"
+#~ msgstr ""
+#~ "Un error ha ocurrido al interrogar %s, es posible que los \n"
+#~ "datos visibles se encuentren incompletos"
+
+#~ msgid "inlined relation %(rtype)s of %(etype)s should be supported"
+#~ msgstr ""
+#~ "la relación %(rtype)s del tipo de entidad %(etype)s debe ser aceptada "
+#~ "('inlined')"
+
+#~ msgid "no edited fields specified for entity %s"
+#~ msgstr "Ningún campo editable especificado para la entidad %s"
+
+#~ msgid "the value \"%s\" is already used, use another one"
+#~ msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro"
+
+#~ msgid "timeline"
+#~ msgstr "Escala de Tiempo"
+
+#~ msgid "unknown option(s): %s"
+#~ msgstr "opcion(es) desconocida(s): %s"
+
+#~ msgid "value %(KEY-value)s must be %(KEY-op)s %(KEY-boundary)s"
+#~ msgstr "El valor %(KEY-value)s debe ser %(KEY-op)s %(KEY-boundary)s"
+
+#~ msgid "web sessions without CNX"
+#~ msgstr "sesiones web sin conexión asociada"
+
+#~ msgid "workflow already has a state of that name"
+#~ msgstr "el workflow posee ya un estado con ese nombre"
+
+#~ msgid "workflow already has a transition of that name"
+#~ msgstr "El Workflow posee ya una transición con ese nombre"
+
+#~ msgid "you may want to specify something for %s"
+#~ msgstr "usted desea quizás especificar algo para la relación %s"
+
+#~ msgid ""
+#~ "you should un-inline relation %s which is supported and may be crossed "
+#~ msgstr ""
+#~ "usted debe quitar la puesta en línea de la relación %s que es aceptada y "
+#~ "puede ser cruzada"
diff -r 1400aee10df4 -r faf279e33298 cubicweb/i18n/fr.po
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/i18n/fr.po Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4747 @@
+# cubicweb i18n catalog
+# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Logilab
+msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb 2.46.0\n"
+"PO-Revision-Date: 2014-06-24 13:29+0200\n"
+"Last-Translator: Logilab Team \n"
+"Language-Team: fr \n"
+"Language: \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#, python-format
+msgid ""
+"\n"
+"%(user)s changed status from <%(previous_state)s> to <%(current_state)s> for "
+"entity\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+msgstr ""
+"\n"
+"%(user)s a changé l'état de <%(previous_state)s> vers <%(current_state)s> "
+"pour l'entité\n"
+"'%(title)s'\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"url: %(url)s\n"
+
+#, python-format
+msgid " from state %(fromstate)s to state %(tostate)s\n"
+msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n"
+
+msgid " :"
+msgstr " :"
+
+#, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+"\"action\" doit être specifié dans les options; les valeurs autorisées "
+"sont : %s"
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+"\"role=subject\" ou \"role=object\" doit être specifié dans les options"
+
+#, python-format
+msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r"
+msgstr "la valeur %(KEY-value)r ne satisfait pas la contrainte %(KEY-cstr)s"
+
+#, python-format
+msgid "%(KEY-rtype)s is part of violated unicity constraint"
+msgstr "%(KEY-rtype)s appartient à une contrainte d'unicité transgressée"
+
+#, python-format
+msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression"
+msgstr ""
+"%(KEY-value)r ne correspond pas à l'expression régulière %(KEY-regexp)r"
+
+#, python-format
+msgid "%(attr)s set to %(newvalue)s"
+msgstr "%(attr)s modifié à %(newvalue)s"
+
+#, python-format
+msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s"
+msgstr "%(attr)s modifié de %(oldvalue)s à %(newvalue)s"
+
+#, python-format
+msgid "%(etype)s by %(author)s"
+msgstr "%(etype)s par %(author)s"
+
+#, python-format
+msgid "%(firstname)s %(surname)s"
+msgstr "%(firstname)s %(surname)s"
+
+#, python-format
+msgid "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)"
+
+#, python-format
+msgid "%d days"
+msgstr "%d jours"
+
+#, python-format
+msgid "%d hours"
+msgstr "%d heures"
+
+#, python-format
+msgid "%d minutes"
+msgstr "%d minutes"
+
+#, python-format
+msgid "%d months"
+msgstr "%d mois"
+
+#, python-format
+msgid "%d seconds"
+msgstr "%d secondes"
+
+#, python-format
+msgid "%d weeks"
+msgstr "%d semaines"
+
+#, python-format
+msgid "%d years"
+msgstr "%d années"
+
+#, python-format
+msgid "%s could be supported"
+msgstr "%s pourrait être supporté"
+
+#, python-format
+msgid "%s error report"
+msgstr "%s rapport d'erreur"
+
+#, python-format
+msgid "%s software version of the database"
+msgstr "version logicielle de la base pour %s"
+
+#, python-format
+msgid "%s updated"
+msgstr "%s mis à jour"
+
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr "l'action '%s' ne prend pas d'option"
+
+#, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+"l'action '%s' pour la relation in_state doit au moins avoir l'option "
+"'linkattr=name'"
+
+#, python-format
+msgid "'%s' action requires 'linkattr' option"
+msgstr "l'action '%s' nécessite une option 'linkattr'"
+
+msgid "(UNEXISTANT EID)"
+msgstr "(EID INTROUVABLE)"
+
+#, python-format
+msgid "(suppressed) entity #%d"
+msgstr "entité #%d (supprimée)"
+
+msgid "**"
+msgstr "0..n 0..n"
+
+msgid "*+"
+msgstr "0..n 1..n"
+
+msgid "*1"
+msgstr "0..n 1"
+
+msgid "*?"
+msgstr "0..n 0..1"
+
+msgid "+*"
+msgstr "1..n 0..n"
+
+msgid "++"
+msgstr "1..n 1..n"
+
+msgid "+1"
+msgstr "1..n 1"
+
+msgid "+?"
+msgstr "1..n 0..1"
+
+msgid "1*"
+msgstr "1 0..n"
+
+msgid "1+"
+msgstr "1 1..n"
+
+msgid "11"
+msgstr "1 1"
+
+msgid "1?"
+msgstr "1 0..1"
+
+#, python-format
+msgid "<%s not specified>"
+msgstr "<%s non spécifié>"
+
+#, python-format
+msgid ""
+"
Ce schéma du modèle de données exclut les méta-données, mais "
+"vous pouvez afficher un schéma complet.
"
+
+msgid ""
+msgstr ""
+
+msgid ""
+msgstr ""
+
+msgid "?*"
+msgstr "0..1 0..n"
+
+msgid "?+"
+msgstr "0..1 1..n"
+
+msgid "?1"
+msgstr "0..1 1"
+
+msgid "??"
+msgstr "0..1 0..1"
+
+msgid "AND"
+msgstr "ET"
+
+msgid "About this site"
+msgstr "À propos de ce site"
+
+#, python-format
+msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s"
+msgstr "Relation ajoutée : %(entity_from)s %(rtype)s %(entity_to)s"
+
+msgid "Attributes permissions:"
+msgstr "Permissions des attributs"
+
+# schema pot file, generated on 2009-09-16 16:46:55
+#
+# singular and plural forms for each entity type
+msgid "BaseTransition"
+msgstr "Transition (abstraite)"
+
+msgid "BaseTransition_plural"
+msgstr "Transitions (abstraites)"
+
+msgid "BigInt"
+msgstr "Entier long"
+
+msgid "BigInt_plural"
+msgstr "Entiers longs"
+
+msgid "Bookmark"
+msgstr "Signet"
+
+msgid "Bookmark_plural"
+msgstr "Signets"
+
+msgid "Boolean"
+msgstr "Booléen"
+
+msgid "Boolean_plural"
+msgstr "Booléen"
+
+msgid "BoundConstraint"
+msgstr "contrainte de bornes"
+
+msgid "BoundaryConstraint"
+msgstr "contrainte de bornes"
+
+msgid "Browse by entity type"
+msgstr "Naviguer par type d'entité"
+
+#, python-format
+msgid "By %(user)s on %(dt)s [%(undo_link)s]"
+msgstr "Par %(user)s le %(dt)s [%(undo_link)s] "
+
+msgid "Bytes"
+msgstr "Donnée binaires"
+
+msgid "Bytes_plural"
+msgstr "Données binaires"
+
+msgid "CWAttribute"
+msgstr "Attribut"
+
+msgid "CWAttribute_plural"
+msgstr "Attributs"
+
+msgid "CWCache"
+msgstr "Cache applicatif"
+
+msgid "CWCache_plural"
+msgstr "Caches applicatifs"
+
+msgid "CWComputedRType"
+msgstr "Relation virtuelle"
+
+msgid "CWComputedRType_plural"
+msgstr "Relations virtuelles"
+
+msgid "CWConstraint"
+msgstr "Contrainte"
+
+msgid "CWConstraintType"
+msgstr "Type de contrainte"
+
+msgid "CWConstraintType_plural"
+msgstr "Types de contrainte"
+
+msgid "CWConstraint_plural"
+msgstr "Contraintes"
+
+msgid "CWDataImport"
+msgstr "Import de données"
+
+msgid "CWDataImport_plural"
+msgstr "Imports de données"
+
+msgid "CWEType"
+msgstr "Type d'entité"
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "CWEType"
+msgstr "Type d'entité"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "CWEType"
+msgstr "Type d'entité"
+
+msgid "CWEType_plural"
+msgstr "Types d'entité"
+
+msgid "CWGroup"
+msgstr "Groupe"
+
+msgid "CWGroup_plural"
+msgstr "Groupes"
+
+msgid "CWProperty"
+msgstr "Propriété"
+
+msgid "CWProperty_plural"
+msgstr "Propriétés"
+
+msgid "CWRType"
+msgstr "Type de relation"
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "CWRType"
+msgstr "Type de relation"
+
+msgid "CWRType_plural"
+msgstr "Types de relation"
+
+msgid "CWRelation"
+msgstr "Relation"
+
+msgid "CWRelation_plural"
+msgstr "Relations"
+
+msgid "CWSource"
+msgstr "Source de données"
+
+msgid "CWSourceHostConfig"
+msgstr "Configuration de source"
+
+msgid "CWSourceHostConfig_plural"
+msgstr "Configurations de source"
+
+msgid "CWSourceSchemaConfig"
+msgstr "Configuration de schéma de source"
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr "Configurations de schéma de source"
+
+msgid "CWSource_plural"
+msgstr "Source de données"
+
+msgid "CWUniqueTogetherConstraint"
+msgstr "Contrainte d'unicité"
+
+msgid "CWUniqueTogetherConstraint_plural"
+msgstr "Contraintes d'unicité"
+
+msgid "CWUser"
+msgstr "Utilisateur"
+
+msgid "CWUser_plural"
+msgstr "Utilisateurs"
+
+#, python-format
+msgid ""
+"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already "
+"linked using this relation."
+msgstr ""
+"Ne peut restaurer la relation %(role)s %(rtype)s vers l'entité %(eid)s qui "
+"est déja lié à une autre entité par cette relation."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation "
+"does not exists anymore in the schema."
+msgstr ""
+"Ne peut restaurer la relation %(rtype)s entre %(subj)s et %(obj)s, cette "
+"relation n'existe plus dans le schéma."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist "
+"anymore."
+msgstr ""
+"Ne peut restaurer la relation %(rtype)s, l'entité %(role)s %(eid)s n'existe "
+"plus."
+
+#, python-format
+msgid ""
+"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't "
+"exist anymore"
+msgstr ""
+"Ne peut annuler l'ajout de relation %(rtype)s de %(subj)s vers %(obj)s, "
+"cette relation n'existe plus"
+
+#, python-format
+msgid ""
+"Can't undo creation of entity %(eid)s of type %(etype)s, type no more "
+"supported"
+msgstr ""
+"Ne peut annuler la création de l'entité %(eid)s de type %(etype)s, ce type "
+"n'existe plus"
+
+msgid "Click to sort on this column"
+msgstr "Cliquer pour trier sur cette colonne"
+
+msgid ""
+"Configuration of the system source goes to the 'sources' file, not in the "
+"database"
+msgstr ""
+"La configuration de la source système va dans le fichier 'sources' et non "
+"dans la base de données"
+
+#, python-format
+msgid "Created %(etype)s : %(entity)s"
+msgstr "Entité %(etype)s crée : %(entity)s"
+
+msgid "DEBUG"
+msgstr "DEBUG"
+
+msgid "Date"
+msgstr "Date"
+
+msgid "Date_plural"
+msgstr "Dates"
+
+msgid "Datetime"
+msgstr "Date et heure"
+
+msgid "Datetime_plural"
+msgstr "Dates et heures"
+
+msgid "Decimal"
+msgstr "Nombre décimal"
+
+msgid "Decimal_plural"
+msgstr "Nombres décimaux"
+
+#, python-format
+msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s"
+msgstr "Relation supprimée : %(entity_from)s %(rtype)s %(entity_to)s"
+
+#, python-format
+msgid "Deleted %(etype)s : %(entity)s"
+msgstr "Entité %(etype)s supprimée : %(entity)s"
+
+msgid "Detected problems"
+msgstr "Problèmes détectés"
+
+msgid "Do you want to delete the following element(s)?"
+msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?"
+
+msgid "Download schema as OWL"
+msgstr "Télécharger le schéma au format OWL"
+
+msgid "ERROR"
+msgstr "ERREUR"
+
+msgid "EmailAddress"
+msgstr "Adresse électronique"
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "EmailAddress"
+msgstr "Adresse électronique"
+
+msgid "EmailAddress_plural"
+msgstr "Adresses électroniques"
+
+msgid "Entities"
+msgstr "entités"
+
+#, python-format
+msgid ""
+"Entity %(eid)s has changed since you started to edit it. Reload the page and "
+"reapply your changes."
+msgstr ""
+"L'entité %(eid)s a été modifiée depuis votre demande d'édition. Veuillez "
+"recharger cette page et réappliquer vos changements."
+
+msgid "Entity and relation supported by this source"
+msgstr "Entités et relations supportés par cette source"
+
+msgid "ExternalUri"
+msgstr "Uri externe"
+
+msgid "ExternalUri_plural"
+msgstr "Uri externes"
+
+msgid "FATAL"
+msgstr "FATAL"
+
+msgid "Float"
+msgstr "Nombre flottant"
+
+msgid "Float_plural"
+msgstr "Nombres flottants"
+
+# schema pot file, generated on 2009-12-03 09:22:35
+#
+# singular and plural forms for each entity type
+msgid "FormatConstraint"
+msgstr "contrainte de format"
+
+msgid "Garbage collection information"
+msgstr "Information sur le ramasse-miette"
+
+msgid "Help"
+msgstr "Aide"
+
+msgid "INFO"
+msgstr "INFO"
+
+msgid "Instance"
+msgstr "Instance"
+
+msgid "Int"
+msgstr "Nombre entier"
+
+msgid "Int_plural"
+msgstr "Nombres entiers"
+
+msgid "Interval"
+msgstr "Durée"
+
+msgid "IntervalBoundConstraint"
+msgstr "contrainte d'interval"
+
+msgid "Interval_plural"
+msgstr "Durées"
+
+msgid "Link:"
+msgstr "Lien :"
+
+msgid "Looked up classes"
+msgstr "Classes recherchées"
+
+msgid "Manage"
+msgstr "Administration"
+
+msgid "Manage security"
+msgstr "Gestion de la sécurité"
+
+msgid "Message threshold"
+msgstr "Niveau du message"
+
+msgid "Most referenced classes"
+msgstr "Classes les plus référencées"
+
+msgid "New BaseTransition"
+msgstr "XXX"
+
+msgid "New Bookmark"
+msgstr "Nouveau signet"
+
+msgid "New CWAttribute"
+msgstr "Nouvelle définition de relation finale"
+
+msgid "New CWCache"
+msgstr "Nouveau cache applicatif"
+
+msgid "New CWComputedRType"
+msgstr "Nouvelle relation virtuelle"
+
+msgid "New CWConstraint"
+msgstr "Nouvelle contrainte"
+
+msgid "New CWConstraintType"
+msgstr "Nouveau type de contrainte"
+
+msgid "New CWDataImport"
+msgstr "Nouvel import de données"
+
+msgid "New CWEType"
+msgstr "Nouveau type d'entité"
+
+msgid "New CWGroup"
+msgstr "Nouveau groupe"
+
+msgid "New CWProperty"
+msgstr "Nouvelle propriété"
+
+msgid "New CWRType"
+msgstr "Nouveau type de relation"
+
+msgid "New CWRelation"
+msgstr "Nouvelle définition de relation non finale"
+
+msgid "New CWSource"
+msgstr "Nouvelle source"
+
+msgid "New CWSourceHostConfig"
+msgstr "Nouvelle configuration de source"
+
+msgid "New CWSourceSchemaConfig"
+msgstr "Nouvelle partie de mapping de source"
+
+msgid "New CWUniqueTogetherConstraint"
+msgstr "Nouvelle contrainte unique_together"
+
+msgid "New CWUser"
+msgstr "Nouvel utilisateur"
+
+msgid "New EmailAddress"
+msgstr "Nouvelle adresse électronique"
+
+msgid "New ExternalUri"
+msgstr "Nouvelle Uri externe"
+
+msgid "New RQLExpression"
+msgstr "Nouvelle expression rql"
+
+msgid "New State"
+msgstr "Nouvel état"
+
+msgid "New SubWorkflowExitPoint"
+msgstr "Nouvelle sortie de sous-workflow"
+
+msgid "New TrInfo"
+msgstr "Nouvelle information de transition"
+
+msgid "New Transition"
+msgstr "Nouvelle transition"
+
+msgid "New Workflow"
+msgstr "Nouveau workflow"
+
+msgid "New WorkflowTransition"
+msgstr "Nouvelle transition workflow"
+
+msgid "No result matching query"
+msgstr "Aucun résultat ne correspond à la requête"
+
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr "Liste non exhaustive des vues s'appliquant à ce type d'entité"
+
+msgid "OR"
+msgstr "OU"
+
+msgid "Ownership"
+msgstr "Propriété"
+
+msgid "Parent class:"
+msgstr "Classe parente"
+
+msgid "Password"
+msgstr "Mot de passe"
+
+msgid "Password_plural"
+msgstr "Mots de passe"
+
+msgid "Please note that this is only a shallow copy"
+msgstr "Attention, cela n'effectue qu'une copie de surface"
+
+msgid "Powered by CubicWeb"
+msgstr "Construit avec CubicWeb"
+
+msgid "RQLConstraint"
+msgstr "contrainte rql"
+
+msgid "RQLExpression"
+msgstr "Expression RQL"
+
+msgid "RQLExpression_plural"
+msgstr "Expressions RQL"
+
+msgid "RQLUniqueConstraint"
+msgstr "contrainte rql d'unicité"
+
+msgid "RQLVocabularyConstraint"
+msgstr "contrainte rql de vocabulaire"
+
+msgid "RegexpConstraint"
+msgstr "contrainte expression régulière"
+
+msgid "Registry's content"
+msgstr "Contenu du registre"
+
+msgid "Relations"
+msgstr "Relations"
+
+msgid "Repository"
+msgstr "Entrepôt de données"
+
+#, python-format
+msgid "Schema %s"
+msgstr "Schéma %s"
+
+msgid "Schema's permissions definitions"
+msgstr "Permissions définies dans le schéma"
+
+msgid "Search for"
+msgstr "Rechercher"
+
+msgid "Site information"
+msgstr "Information du site"
+
+msgid "SizeConstraint"
+msgstr "contrainte de taille"
+
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+"Configuration de la source pour un hôte spécifique. Une clé=valeur par "
+"ligne, les clés autorisées dépendantes du type de source. Les valeurs "
+"surchargent celles définies sur la source."
+
+msgid "Startup views"
+msgstr "Vues de départ"
+
+msgid "State"
+msgstr "État"
+
+msgid "State_plural"
+msgstr "États"
+
+msgid "StaticVocabularyConstraint"
+msgstr "contrainte de vocabulaire"
+
+msgid "String"
+msgstr "Chaîne de caractères"
+
+msgid "String_plural"
+msgstr "Chaînes de caractères"
+
+msgid "Sub-classes:"
+msgstr "Classes filles :"
+
+msgid "SubWorkflowExitPoint"
+msgstr "Sortie de sous-workflow"
+
+msgid "SubWorkflowExitPoint_plural"
+msgstr "Sorties de sous-workflow"
+
+msgid "Submit bug report"
+msgstr "Soumettre un rapport de bug"
+
+msgid "Submit bug report by mail"
+msgstr "Soumettre ce rapport par email"
+
+msgid "Synchronization has been requested, refresh this page in a few minutes."
+msgstr ""
+"La demande de synchronisation a été soumise, rafraichir cette page d'ici "
+"quelques minutes"
+
+msgid "TZDatetime"
+msgstr "Date et heure internationale"
+
+msgid "TZDatetime_plural"
+msgstr "Dates et heures internationales"
+
+msgid "TZTime"
+msgstr "Heure internationale"
+
+msgid "TZTime_plural"
+msgstr "Heures internationales"
+
+#, python-format
+msgid "The view %s can not be applied to this query"
+msgstr "La vue %s ne peut être appliquée à cette requête"
+
+#, python-format
+msgid "The view %s could not be found"
+msgstr "La vue %s est introuvable"
+
+msgid "There is no default workflow"
+msgstr "Ce type d'entité n'a pas de workflow par défault"
+
+msgid "This BaseTransition:"
+msgstr "Cette transition abstraite :"
+
+msgid "This Bookmark:"
+msgstr "Ce signet :"
+
+msgid "This CWAttribute:"
+msgstr "Cette définition de relation finale :"
+
+msgid "This CWCache:"
+msgstr "Ce cache applicatif :"
+
+msgid "This CWComputedRType:"
+msgstr "Cette relation virtuelle :"
+
+msgid "This CWConstraint:"
+msgstr "Cette contrainte :"
+
+msgid "This CWConstraintType:"
+msgstr "Ce type de contrainte :"
+
+msgid "This CWDataImport:"
+msgstr "Cet import de données :"
+
+msgid "This CWEType:"
+msgstr "Ce type d'entité :"
+
+msgid "This CWGroup:"
+msgstr "Ce groupe :"
+
+msgid "This CWProperty:"
+msgstr "Cette propriété :"
+
+msgid "This CWRType:"
+msgstr "Ce type de relation :"
+
+msgid "This CWRelation:"
+msgstr "Cette définition de relation :"
+
+msgid "This CWSource:"
+msgstr "Cette source :"
+
+msgid "This CWSourceHostConfig:"
+msgstr "Cette configuration de source :"
+
+msgid "This CWSourceSchemaConfig:"
+msgstr "Cette partie de mapping de source :"
+
+msgid "This CWUniqueTogetherConstraint:"
+msgstr "Cette contrainte unique_together :"
+
+msgid "This CWUser:"
+msgstr "Cet utilisateur :"
+
+msgid "This EmailAddress:"
+msgstr "Cette adresse électronique :"
+
+msgid "This ExternalUri:"
+msgstr "Cette Uri externe :"
+
+msgid "This RQLExpression:"
+msgstr "Cette expression RQL :"
+
+msgid "This State:"
+msgstr "Cet état :"
+
+msgid "This SubWorkflowExitPoint:"
+msgstr "Cette sortie de sous-workflow :"
+
+msgid "This TrInfo:"
+msgstr "Cette information de transition :"
+
+msgid "This Transition:"
+msgstr "Cette transition :"
+
+msgid "This Workflow:"
+msgstr "Ce workflow :"
+
+msgid "This WorkflowTransition:"
+msgstr "Cette transition workflow :"
+
+msgid ""
+"This action is forbidden. If you think it should be allowed, please contact "
+"the site administrator."
+msgstr ""
+"Cette action est interdite. Si toutefois vous pensez qu'elle devrait être "
+"autorisée, veuillez contacter l'administrateur du site."
+
+msgid "This entity type permissions:"
+msgstr "Permissions pour ce type d'entité"
+
+msgid "Time"
+msgstr "Heure"
+
+msgid "Time_plural"
+msgstr "Heures"
+
+msgid "TrInfo"
+msgstr "Information transition"
+
+msgid "TrInfo_plural"
+msgstr "Information transitions"
+
+msgid "Transition"
+msgstr "Transition"
+
+msgid "Transition_plural"
+msgstr "Transitions"
+
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+"URLs depuis lesquelles le contenu sera importé. Vous pouvez mettre une URL "
+"par ligne."
+
+msgid "Undoable actions"
+msgstr "Action annulables"
+
+msgid "Undoing"
+msgstr "Annuler"
+
+msgid "UniqueConstraint"
+msgstr "contrainte d'unicité"
+
+msgid "Unknown source type"
+msgstr "Type de source inconnue"
+
+msgid "Unreachable objects"
+msgstr "Objets inaccessibles"
+
+#, python-format
+msgid "Updated %(etype)s : %(entity)s"
+msgstr "Entité %(etype)s mise à jour : %(entity)s"
+
+msgid "Used by:"
+msgstr "Utilisé par :"
+
+msgid "Users and groups management"
+msgstr "Gestion des utilisateurs et groupes"
+
+msgid "WARNING"
+msgstr "AVERTISSEMENT"
+
+msgid "Web server"
+msgstr "Serveur web"
+
+msgid "Workflow"
+msgstr "Workflow"
+
+msgid "Workflow history"
+msgstr "Historique des changements d'état"
+
+msgid "WorkflowTransition"
+msgstr "Transition workflow"
+
+msgid "WorkflowTransition_plural"
+msgstr "Transitions workflow"
+
+msgid "Workflow_plural"
+msgstr "Workflows"
+
+msgid ""
+"You can either submit a new file using the browse button above, or choose to "
+"remove already uploaded file by checking the \"detach attached file\" check-"
+"box, or edit file content online with the widget below."
+msgstr ""
+"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n"
+"\"parcourir\" ci-dessus, soit supprimer le fichier déjà présent en\n"
+"cochant la case \"détacher fichier attaché\", soit éditer le contenu\n"
+"du fichier en ligne avec le champ ci-dessous."
+
+msgid ""
+"You can either submit a new file using the browse button above, or edit file "
+"content online with the widget below."
+msgstr ""
+"Vous pouvez soit soumettre un nouveau fichier en utilisant le bouton\n"
+"\"parcourir\" ci-dessus, soit éditer le contenu du fichier en ligne\n"
+"avec le champ ci-dessous."
+
+msgid "You can't change this relation"
+msgstr "Vous ne pouvez pas modifier cette relation"
+
+msgid "You cannot remove the system source"
+msgstr "Vous ne pouvez pas supprimer la source système"
+
+msgid "You cannot rename the system source"
+msgstr "Vous ne pouvez pas renommer la source système"
+
+msgid ""
+"You have no access to this view or it can not be used to display the current "
+"data."
+msgstr ""
+"Vous n'avez pas accès à cette vue ou elle ne peut pas afficher ces données."
+
+msgid ""
+"You're not authorized to access this page. If you think you should, please "
+"contact the site administrator."
+msgstr ""
+"Vous n'êtes pas autorisé à accéder à cette page. Si toutefois vous pensez\n"
+"que c'est une erreur, veuillez contacter l'administrateur du site."
+
+#, python-format
+msgid "[%s supervision] changes summary"
+msgstr "[%s supervision] description des changements"
+
+msgid ""
+"a RQL expression which should return some results, else the transition won't "
+"be available. This query may use X and U variables that will respectivly "
+"represents the current entity and the current user."
+msgstr ""
+"une expression RQL devant retourner des résultats pour que la transition "
+"puisse être passée. Cette expression peut utiliser les variables X et U qui "
+"représentent respectivement l'entité à laquelle on veut appliquer la "
+"transition et l'utilisateur courant."
+
+msgid "a URI representing an object in external data store"
+msgstr "une Uri désignant un objet dans un entrepôt de données externe"
+
+msgid "a float is expected"
+msgstr "un nombre flottant est attendu"
+
+msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected"
+msgstr "un nombre (en seconde) ou 20s, 10min, 24h ou 4d sont attendus"
+
+msgid ""
+"a simple cache entity characterized by a name and a validity date. The "
+"target application is responsible for updating timestamp when necessary to "
+"invalidate the cache (typically in hooks). Also, checkout the AppObject."
+"get_cache() method."
+msgstr ""
+"un cache simple caractérisé par un nom et une date de validité. C'est\n"
+"le code de l'instance qui est responsable de mettre à jour la date de\n"
+"validité lorsque le cache doit être invalidé (en général dans un hook).\n"
+"Pour récupérer un cache, il faut utiliser utiliser la méthode\n"
+"get_cache(cachename)."
+
+msgid "abstract base class for transitions"
+msgstr "classe de base abstraite pour les transitions"
+
+msgid "action menu"
+msgstr "actions"
+
+msgid "action(s) on this selection"
+msgstr "action(s) sur cette sélection"
+
+msgid "actions"
+msgstr "actions"
+
+msgid "activate"
+msgstr "activer"
+
+msgid "activated"
+msgstr "activé"
+
+msgid "add"
+msgstr "ajouter"
+
+msgid "add Bookmark bookmarked_by CWUser object"
+msgstr "signet"
+
+msgid "add CWAttribute add_permission RQLExpression subject"
+msgstr "définir une expression RQL d'ajout"
+
+msgid "add CWAttribute constrained_by CWConstraint subject"
+msgstr "contrainte"
+
+msgid "add CWAttribute read_permission RQLExpression subject"
+msgstr "expression rql de lecture"
+
+msgid "add CWAttribute relation_type CWRType object"
+msgstr "définition d'attribut"
+
+msgid "add CWAttribute update_permission RQLExpression subject"
+msgstr "permission de mise à jour"
+
+msgid "add CWComputedRType read_permission RQLExpression subject"
+msgstr "permission de lecture"
+
+msgid "add CWEType add_permission RQLExpression subject"
+msgstr "définir une expression RQL d'ajout"
+
+msgid "add CWEType delete_permission RQLExpression subject"
+msgstr "définir une expression RQL de suppression"
+
+msgid "add CWEType read_permission RQLExpression subject"
+msgstr "définir une expression RQL de lecture"
+
+msgid "add CWEType update_permission RQLExpression subject"
+msgstr "définir une expression RQL de mise à jour"
+
+msgid "add CWProperty for_user CWUser object"
+msgstr "propriété"
+
+msgid "add CWRelation add_permission RQLExpression subject"
+msgstr "expression rql d'ajout"
+
+msgid "add CWRelation constrained_by CWConstraint subject"
+msgstr "contrainte"
+
+msgid "add CWRelation delete_permission RQLExpression subject"
+msgstr "expression rql de suppression"
+
+msgid "add CWRelation read_permission RQLExpression subject"
+msgstr "expression rql de lecture"
+
+msgid "add CWRelation relation_type CWRType object"
+msgstr "définition de relation"
+
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr "configuration d'hôte"
+
+msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
+msgstr "contrainte unique_together"
+
+msgid "add CWUser in_group CWGroup object"
+msgstr "utilisateur"
+
+msgid "add CWUser use_email EmailAddress subject"
+msgstr "adresse email"
+
+msgid "add State allowed_transition Transition object"
+msgstr "état en entrée"
+
+msgid "add State allowed_transition Transition subject"
+msgstr "transition en sortie"
+
+msgid "add State allowed_transition WorkflowTransition subject"
+msgstr "transition workflow en sortie"
+
+msgid "add State state_of Workflow object"
+msgstr "état"
+
+msgid "add Transition condition RQLExpression subject"
+msgstr "condition"
+
+msgid "add Transition destination_state State object"
+msgstr "transition en entrée"
+
+msgid "add Transition destination_state State subject"
+msgstr "état de sortie"
+
+msgid "add Transition transition_of Workflow object"
+msgstr "transition"
+
+msgid "add WorkflowTransition condition RQLExpression subject"
+msgstr "condition"
+
+msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject"
+msgstr "sortie de sous-workflow"
+
+msgid "add WorkflowTransition transition_of Workflow object"
+msgstr "transition workflow"
+
+msgid "add a BaseTransition"
+msgstr ""
+
+msgid "add a Bookmark"
+msgstr ""
+
+msgid "add a CWAttribute"
+msgstr ""
+
+msgid "add a CWCache"
+msgstr ""
+
+msgid "add a CWComputedRType"
+msgstr ""
+
+msgid "add a CWConstraint"
+msgstr ""
+
+msgid "add a CWConstraintType"
+msgstr ""
+
+msgid "add a CWDataImport"
+msgstr ""
+
+msgid "add a CWEType"
+msgstr ""
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "add a CWEType"
+msgstr "ajouter un type d'entité sujet"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "add a CWEType"
+msgstr "ajouter un type d'entité objet"
+
+msgid "add a CWGroup"
+msgstr ""
+
+msgid "add a CWProperty"
+msgstr ""
+
+msgid "add a CWRType"
+msgstr ""
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "add a CWRType"
+msgstr "ajouter un type de relation"
+
+msgid "add a CWRelation"
+msgstr ""
+
+msgid "add a CWSource"
+msgstr ""
+
+msgid "add a CWSourceHostConfig"
+msgstr ""
+
+msgid "add a CWSourceSchemaConfig"
+msgstr ""
+
+msgid "add a CWUniqueTogetherConstraint"
+msgstr ""
+
+msgid "add a CWUser"
+msgstr ""
+
+msgid "add a EmailAddress"
+msgstr ""
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "add a EmailAddress"
+msgstr "ajouter une adresse électronique"
+
+msgid "add a ExternalUri"
+msgstr ""
+
+msgid "add a RQLExpression"
+msgstr ""
+
+msgid "add a State"
+msgstr ""
+
+msgid "add a SubWorkflowExitPoint"
+msgstr ""
+
+msgid "add a TrInfo"
+msgstr ""
+
+msgid "add a Transition"
+msgstr ""
+
+msgid "add a Workflow"
+msgstr ""
+
+msgid "add a WorkflowTransition"
+msgstr ""
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgid "add_permission"
+msgstr "peut ajouter"
+
+msgctxt "CWAttribute"
+msgid "add_permission"
+msgstr "permission d'ajout"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgctxt "CWEType"
+msgid "add_permission"
+msgstr "permission d'ajout"
+
+msgctxt "CWRelation"
+msgid "add_permission"
+msgstr "permission d'ajout"
+
+msgid "add_permission_object"
+msgstr "a la permission d'ajouter"
+
+msgctxt "CWGroup"
+msgid "add_permission_object"
+msgstr "a la permission d'ajouter"
+
+msgctxt "RQLExpression"
+msgid "add_permission_object"
+msgstr "a la permission d'ajouter"
+
+msgid "add_relation"
+msgstr "ajouter"
+
+#, python-format
+msgid "added %(etype)s #%(eid)s (%(title)s)"
+msgstr "ajout de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"la relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #"
+"%(eidto)s a été ajoutée"
+
+msgid "additional type specific properties"
+msgstr "propriétés supplémentaires spécifiques au type"
+
+msgid "addrelated"
+msgstr "ajouter"
+
+msgid "address"
+msgstr "adresse électronique"
+
+msgctxt "EmailAddress"
+msgid "address"
+msgstr "adresse électronique"
+
+msgid "alias"
+msgstr "alias"
+
+msgctxt "EmailAddress"
+msgid "alias"
+msgstr "alias"
+
+msgid "allow to set a specific workflow for an entity"
+msgstr "permet de spécifier un workflow donné pour une entité"
+
+msgid "allowed options depends on the source type"
+msgstr "les options autorisées dépendent du type de la source"
+
+msgid "allowed transitions from this state"
+msgstr "transitions autorisées depuis cet état"
+
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr "les valeurs autorisées pour \"action\" sont %s"
+
+msgid "allowed_transition"
+msgstr "transitions autorisées"
+
+msgctxt "State"
+msgid "allowed_transition"
+msgstr "transitions autorisées"
+
+msgid "allowed_transition_object"
+msgstr "états en entrée"
+
+msgctxt "BaseTransition"
+msgid "allowed_transition_object"
+msgstr "transition autorisée de"
+
+msgctxt "Transition"
+msgid "allowed_transition_object"
+msgstr "transition autorisée de"
+
+msgctxt "WorkflowTransition"
+msgid "allowed_transition_object"
+msgstr "transition autorisée de"
+
+msgid "an electronic mail address associated to a short alias"
+msgstr "une adresse électronique associée à un alias"
+
+msgid "an error occurred"
+msgstr "une erreur est survenue"
+
+msgid "an error occurred while processing your request"
+msgstr "une erreur est survenue pendant le traitement de votre requête"
+
+msgid "an error occurred, the request cannot be fulfilled"
+msgstr "une erreur est survenue, la requête ne peut être complétée"
+
+msgid "an integer is expected"
+msgstr "un nombre entier est attendu"
+
+msgid "and linked"
+msgstr "et liée"
+
+msgid "and/or between different values"
+msgstr "et/ou entre les différentes valeurs"
+
+msgid "anyrsetview"
+msgstr "vues pour tout rset"
+
+msgid "april"
+msgstr "avril"
+
+#, python-format
+msgid "archive for %(author)s"
+msgstr "archive pour l'auteur %(author)s"
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr "archive pour le mois %(month)s/%(year)s"
+
+#, python-format
+msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
+msgstr ""
+"l'entité #%(eid)s de type %(etype)s doit nécessairement être reliée à une\n"
+"autre via la relation %(rtype)s"
+
+msgid "attribute"
+msgstr "attribut"
+
+msgid "august"
+msgstr "août"
+
+msgid "authentication failure"
+msgstr "Identifiant ou mot de passe incorrect"
+
+msgid "auto"
+msgstr "automatique"
+
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+"attribut calculé automatiquement pour assurer la cohérence de la transition"
+
+msgid "automatic"
+msgstr "automatique"
+
+#, python-format
+msgid "back to pagination (%s results)"
+msgstr "retour à la vue paginée (%s résultats)"
+
+msgid "bad value"
+msgstr "mauvaise valeur"
+
+msgid "badly formatted url"
+msgstr "URL mal formattée"
+
+msgid "base url"
+msgstr "url de base"
+
+msgid "bookmark has been removed"
+msgstr "le signet a été retiré"
+
+msgid "bookmark this page"
+msgstr "poser un signet ici"
+
+msgid "bookmark this search"
+msgstr "mémoriser cette recherche"
+
+msgid "bookmarked_by"
+msgstr "utilisé par"
+
+msgctxt "Bookmark"
+msgid "bookmarked_by"
+msgstr "utilisé par"
+
+msgid "bookmarked_by_object"
+msgstr "utilise le(s) signet(s)"
+
+msgctxt "CWUser"
+msgid "bookmarked_by_object"
+msgstr "utilise le(s) signet(s)"
+
+msgid "bookmarks"
+msgstr "signets"
+
+msgid "bookmarks are used to have user's specific internal links"
+msgstr ""
+"les signets sont utilisés pour gérer des liens internes par utilisateur"
+
+msgid "boxes"
+msgstr "boîtes"
+
+msgid "bug report sent"
+msgstr "rapport d'erreur envoyé"
+
+msgid "button_apply"
+msgstr "appliquer"
+
+msgid "button_cancel"
+msgstr "annuler"
+
+msgid "button_delete"
+msgstr "supprimer"
+
+msgid "button_ok"
+msgstr "valider"
+
+msgid "by"
+msgstr "par"
+
+msgid "by relation"
+msgstr "via la relation"
+
+msgid "by_transition"
+msgstr "transition"
+
+msgctxt "TrInfo"
+msgid "by_transition"
+msgstr "transition"
+
+msgid "by_transition_object"
+msgstr "changement d'états"
+
+msgctxt "BaseTransition"
+msgid "by_transition_object"
+msgstr "a pour information"
+
+msgctxt "Transition"
+msgid "by_transition_object"
+msgstr "a pour information"
+
+msgctxt "WorkflowTransition"
+msgid "by_transition_object"
+msgstr "a pour information"
+
+msgid "calendar"
+msgstr "afficher un calendrier"
+
+msgid "can not resolve entity types:"
+msgstr "impossible d'interpréter les types d'entités :"
+
+msgid "can only have one url"
+msgstr "ne supporte qu'une seule URL"
+
+msgid "can't be changed"
+msgstr "ne peut-être modifié"
+
+msgid "can't be deleted"
+msgstr "ne peut-être supprimé"
+
+msgid "can't change this attribute"
+msgstr "cet attribut ne peut pas être modifié"
+
+#, python-format
+msgid "can't display data, unexpected error: %s"
+msgstr "impossible d'afficher les données à cause de l'erreur suivante: %s"
+
+msgid "can't have multiple exits on the same state"
+msgstr "ne peut avoir plusieurs sorties sur le même état"
+
+#, python-format
+msgid "can't parse %(value)r (expected %(format)s)"
+msgstr "ne peut analyser %(value)r (format attendu : %(format)s)"
+
+#, python-format
+msgid ""
+"can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid "
+"%(value)s) does not exist any longer"
+msgstr ""
+"impossible de rétablir l'entité %(eid)s de type %(eschema)s, cible de la "
+"relation %(rtype)s (eid %(value)s) n'existe plus"
+
+#, python-format
+msgid ""
+"can't restore relation %(rtype)s of entity %(eid)s, this relation does not "
+"exist in the schema anymore."
+msgstr ""
+"impossible de rétablir la relation %(rtype)s sur l'entité %(eid)s, cette "
+"relation n'existe plus dans le schéma."
+
+#, python-format
+msgid "can't restore state of entity %s, it has been deleted inbetween"
+msgstr ""
+"impossible de rétablir l'état de l'entité %s, elle a été supprimée entre-"
+"temps"
+
+#, python-format
+msgid ""
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
+"%(card)s"
+msgstr ""
+"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a pour "
+"cardinalité %(card)s"
+
+msgid "cancel"
+msgstr "annuler"
+
+msgid "cancel select"
+msgstr "annuler la sélection"
+
+msgid "cancel this insert"
+msgstr "annuler cette insertion"
+
+msgid "cardinality"
+msgstr "cardinalité"
+
+msgctxt "CWAttribute"
+msgid "cardinality"
+msgstr "cardinalité"
+
+msgctxt "CWRelation"
+msgid "cardinality"
+msgstr "cardinalité"
+
+msgid "category"
+msgstr "categorie"
+
+#, python-format
+msgid "changed state of %(etype)s #%(eid)s (%(title)s)"
+msgstr "changement de l'état de %(etype)s #%(eid)s (%(title)s)"
+
+msgid "changes applied"
+msgstr "changements appliqués"
+
+msgid "click here to see created entity"
+msgstr "cliquez ici pour voir l'entité créée"
+
+msgid "click here to see edited entity"
+msgstr "cliquez ici pour voir l'entité modifiée"
+
+msgid "click on the box to cancel the deletion"
+msgstr "cliquez dans la zone d'édition pour annuler la suppression"
+
+msgid "click to add a value"
+msgstr "cliquer pour ajouter une valeur"
+
+msgid "click to delete this value"
+msgstr "cliquer pour supprimer cette valeur"
+
+msgid "click to edit this field"
+msgstr "cliquez pour éditer ce champ"
+
+msgid "close all"
+msgstr "tout fermer"
+
+msgid "comment"
+msgstr "commentaire"
+
+msgctxt "TrInfo"
+msgid "comment"
+msgstr "commentaire"
+
+msgid "comment_format"
+msgstr "format"
+
+msgctxt "TrInfo"
+msgid "comment_format"
+msgstr "format"
+
+msgid "components"
+msgstr "composants"
+
+msgid "components_navigation"
+msgstr "navigation par page"
+
+msgid "components_navigation_description"
+msgstr ""
+"composant permettant de présenter sur plusieurs pages les requêtes renvoyant "
+"plus d'un certain nombre de résultat"
+
+msgid "components_rqlinput"
+msgstr "barre rql"
+
+msgid "components_rqlinput_description"
+msgstr "la barre de requête rql, dans l'en-tête de page"
+
+msgid "composite"
+msgstr "composite"
+
+msgctxt "CWRelation"
+msgid "composite"
+msgstr "composite"
+
+msgid "condition"
+msgstr "condition"
+
+msgctxt "BaseTransition"
+msgid "condition"
+msgstr "condition"
+
+msgctxt "Transition"
+msgid "condition"
+msgstr "condition"
+
+msgctxt "WorkflowTransition"
+msgid "condition"
+msgstr "condition"
+
+msgid "condition_object"
+msgstr "condition de"
+
+msgctxt "RQLExpression"
+msgid "condition_object"
+msgstr "condition de"
+
+msgid "conditions"
+msgstr "conditions"
+
+msgid "config"
+msgstr "configuration"
+
+msgctxt "CWSource"
+msgid "config"
+msgstr "configuration"
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr "configuration"
+
+msgid "config mode"
+msgstr "mode de configuration"
+
+msgid "config type"
+msgstr "type de configuration"
+
+msgid "confirm password"
+msgstr "confirmer le mot de passe"
+
+msgid "constrained_by"
+msgstr "contraint par"
+
+msgctxt "CWAttribute"
+msgid "constrained_by"
+msgstr "contraint par"
+
+msgctxt "CWRelation"
+msgid "constrained_by"
+msgstr "contraint par"
+
+msgid "constrained_by_object"
+msgstr "contrainte de"
+
+msgctxt "CWConstraint"
+msgid "constrained_by_object"
+msgstr "contrainte de"
+
+msgid "constraint factory"
+msgstr "fabrique de contraintes"
+
+msgid "constraint_of"
+msgstr "contrainte de"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "constraint_of"
+msgstr "contrainte de"
+
+msgid "constraint_of_object"
+msgstr "contraint par"
+
+msgctxt "CWEType"
+msgid "constraint_of_object"
+msgstr "contraint par"
+
+msgid "constraints"
+msgstr "contraintes"
+
+msgid "constraints applying on this relation"
+msgstr "contraintes s'appliquant à cette relation"
+
+msgid "content type"
+msgstr "type MIME"
+
+msgid "context"
+msgstr "contexte"
+
+msgid "context where this box should be displayed"
+msgstr "contexte dans lequel la boite devrait être affichée"
+
+msgid "context where this component should be displayed"
+msgstr "contexte où ce composant doit être affiché"
+
+msgid "context where this facet should be displayed, leave empty for both"
+msgstr ""
+"contexte où cette facette doit être affichée. Laissez ce champ vide pour "
+"l'avoir dans les deux."
+
+msgid "control subject entity's relations order"
+msgstr "contrôle l'ordre des relations de l'entité sujet"
+
+msgid "copy"
+msgstr "copier"
+
+msgid "core relation indicating a user's groups"
+msgstr ""
+"relation système indiquant les groupes auxquels appartient l'utilisateur"
+
+msgid ""
+"core relation indicating owners of an entity. This relation implicitly put "
+"the owner into the owners group for the entity"
+msgstr ""
+"relation système indiquant le(s) propriétaire(s) d'une entité. Cette "
+"relation place implicitement les utilisateurs liés dans le groupe des "
+"propriétaires pour cette entité"
+
+msgid "core relation indicating the original creator of an entity"
+msgstr "relation système indiquant le créateur d'une entité."
+
+msgid "core relation indicating the type of an entity"
+msgstr "relation système indiquant le type de l'entité"
+
+msgid ""
+"core relation indicating the types (including specialized types) of an entity"
+msgstr ""
+"relation système indiquant les types (y compris les types parents) d'une "
+"entité"
+
+msgid "could not connect to the SMTP server"
+msgstr "impossible de se connecter au serveur SMTP"
+
+msgid "create an index for quick search on this attribute"
+msgstr "créer un index pour accélérer les recherches sur cet attribut"
+
+msgid "created on"
+msgstr "créé le"
+
+msgid "created_by"
+msgstr "créé par"
+
+msgid "created_by_object"
+msgstr "a créé"
+
+msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)"
+msgstr "création d'un signet pour %(linkto)s"
+
+msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)"
+msgstr "création d'un attribut %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)"
+msgstr "création d'une contrainte pour l'attribut %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)"
+msgstr "création d'une contrainte pour la relation %(linkto)s"
+
+msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)"
+msgstr "création d'une propriété pour l'utilisateur %(linkto)s"
+
+msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)"
+msgstr "création relation %(linkto)s"
+
+msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr "création d'une configuration d'hôte pour la source %(linkto)s"
+
+msgid ""
+"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
+"constraint_of CWEType %(linkto)s)"
+msgstr "création d'une contrainte unique_together sur %(linkto)s"
+
+msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)"
+msgstr "création d'un utilisateur à rajouter au groupe %(linkto)s"
+
+msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)"
+msgstr "création d'une adresse électronique pour l'utilisateur %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)"
+msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)"
+msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s update_permission "
+"RQLExpression)"
+msgstr ""
+"création d'une expression rql pour le droit de mise à jour de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWComputedRType %(linkto)s read_permission "
+"RQLExpression)"
+msgstr "creation d'une expression rql pour le droit en lecture de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)"
+msgstr "création d'une expression RQL pour la permission d'ajout de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de suppression de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)"
+msgstr "création d'une expression RQL pour la permission de lire %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)"
+msgstr ""
+"création d'une expression RQL pour la permission de mise à jour de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)"
+msgstr "création d'une expression rql pour le droit d'ajout de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s delete_permission "
+"RQLExpression)"
+msgstr ""
+"création d'une expression rql pour le droit de suppression de %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)"
+msgstr "création d'une expression rql pour le droit de lecture de %(linkto)s"
+
+msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)"
+msgstr "création d'une expression RQL pour la transition %(linkto)s"
+
+msgid ""
+"creating RQLExpression (WorkflowTransition %(linkto)s condition "
+"RQLExpression)"
+msgstr "création d'une expression RQL pour la transition workflow %(linkto)s"
+
+msgid "creating State (State allowed_transition Transition %(linkto)s)"
+msgstr "création d'un état pouvant aller vers la transition %(linkto)s"
+
+msgid "creating State (State state_of Workflow %(linkto)s)"
+msgstr "création d'un état du workflow %(linkto)s"
+
+msgid "creating State (Transition %(linkto)s destination_state State)"
+msgstr "création d'un état destination de la transition %(linkto)s"
+
+msgid ""
+"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s "
+"subworkflow_exit SubWorkflowExitPoint)"
+msgstr "création d'un point de sortie de la transition workflow %(linkto)s"
+
+msgid "creating Transition (State %(linkto)s allowed_transition Transition)"
+msgstr "création d'une transition autorisée depuis l'état %(linkto)s"
+
+msgid "creating Transition (Transition destination_state State %(linkto)s)"
+msgstr "création d'une transition vers l'état %(linkto)s"
+
+msgid "creating Transition (Transition transition_of Workflow %(linkto)s)"
+msgstr "création d'une transition du workflow %(linkto)s"
+
+msgid ""
+"creating WorkflowTransition (State %(linkto)s allowed_transition "
+"WorkflowTransition)"
+msgstr "création d'une transition workflow autorisée depuis l'état %(linkto)s"
+
+msgid ""
+"creating WorkflowTransition (WorkflowTransition transition_of Workflow "
+"%(linkto)s)"
+msgstr "création d'une transition workflow du workflow %(linkto)s"
+
+msgid "creation"
+msgstr "création"
+
+msgid "creation date"
+msgstr "date de création"
+
+msgid "creation time of an entity"
+msgstr "date de création d'une entité"
+
+msgid "creation_date"
+msgstr "date de création"
+
+msgid "cstrtype"
+msgstr "type de contrainte"
+
+msgctxt "CWConstraint"
+msgid "cstrtype"
+msgstr "type"
+
+msgid "cstrtype_object"
+msgstr "utilisé par"
+
+msgctxt "CWConstraintType"
+msgid "cstrtype_object"
+msgstr "type des contraintes"
+
+msgid "csv export"
+msgstr "export CSV"
+
+msgid "csv export (entities)"
+msgstr "export CSV (entités)"
+
+msgid "ctxcomponents"
+msgstr "composants contextuels"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr "lien utilisateur"
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+"affiche un lien vers le formulaire d'authentification pour les utilisateurs "
+"anonymes, sinon une boite contenant notamment des liens propres à "
+"l'utilisateur connectés"
+
+msgid "ctxcomponents_appliname"
+msgstr "titre de l'application"
+
+msgid "ctxcomponents_appliname_description"
+msgstr "affiche le titre de l'application dans l'en-tête de page"
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "boîte signets"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "boîte contenant les signets de l'utilisateur"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "fil d'ariane"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr ""
+"affiche un chemin permettant de localiser la page courante dans le site"
+
+msgid "ctxcomponents_download_box"
+msgstr "boîte de téléchargement"
+
+msgid "ctxcomponents_download_box_description"
+msgstr "boîte contenant un lien permettant de télécharger la ressource"
+
+msgid "ctxcomponents_edit_box"
+msgstr "boîte d'actions"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr ""
+"boîte affichant les différentes actions possibles sur les données affichées"
+
+msgid "ctxcomponents_facet.filterbox"
+msgstr "boîte à facettes"
+
+msgid "ctxcomponents_facet.filterbox_description"
+msgstr ""
+"boîte permettant de filtrer parmi les résultats d'une recherche à l'aide de "
+"facettes"
+
+msgid "ctxcomponents_logo"
+msgstr "logo"
+
+msgid "ctxcomponents_logo_description"
+msgstr "le logo de l'application, dans l'en-tête de page"
+
+msgid "ctxcomponents_metadata"
+msgstr "méta-données de l'entité"
+
+msgid "ctxcomponents_metadata_description"
+msgstr ""
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "boîte des vues possibles"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "boîte affichant les vues possibles pour les données courantes"
+
+msgid "ctxcomponents_prevnext"
+msgstr "élément précedent / suivant"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"affiche des liens permettant de passer d'une entité à une autre sur les "
+"entités implémentant l'interface \"précédent/suivant\"."
+
+msgid "ctxcomponents_rss"
+msgstr "icône RSS"
+
+msgid "ctxcomponents_rss_description"
+msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées"
+
+msgid "ctxcomponents_search_box"
+msgstr "boîte de recherche"
+
+msgid "ctxcomponents_search_box_description"
+msgstr "boîte avec un champ de recherche simple"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "boîte des vues de départs"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "boîte affichant les vues de départs de l'application"
+
+msgid "ctxcomponents_userstatus"
+msgstr "état de l'utilisateur"
+
+msgid "ctxcomponents_userstatus_description"
+msgstr ""
+
+msgid "ctxcomponents_wfhistory"
+msgstr "historique du workflow."
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr ""
+"section affichant l'historique du workflow pour les entités ayant un "
+"workflow."
+
+msgid "ctxtoolbar"
+msgstr "barre d'outils"
+
+msgid "custom_workflow"
+msgstr "workflow spécifique"
+
+msgid "custom_workflow_object"
+msgstr "workflow de"
+
+msgid "cw.groups-management"
+msgstr "groupes"
+
+msgid "cw.users-management"
+msgstr "utilisateurs"
+
+msgid "cw_for_source"
+msgstr "source"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr "source"
+
+msgid "cw_for_source_object"
+msgstr "élément de mapping"
+
+msgctxt "CWSource"
+msgid "cw_for_source_object"
+msgstr "élément de mapping"
+
+msgid "cw_host_config_of"
+msgstr "host configuration of"
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr "host configuration of"
+
+msgid "cw_host_config_of_object"
+msgstr "has host configuration"
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr "has host configuration"
+
+msgid "cw_import_of"
+msgstr "source"
+
+msgctxt "CWDataImport"
+msgid "cw_import_of"
+msgstr "source"
+
+msgid "cw_import_of_object"
+msgstr "imports"
+
+msgctxt "CWSource"
+msgid "cw_import_of_object"
+msgstr "imports"
+
+msgid "cw_schema"
+msgstr "schéma"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
+msgstr "schéma"
+
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWRType"
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgid "cw_source"
+msgstr "source"
+
+msgid "cw_source_object"
+msgstr "entités"
+
+msgid "cwetype-box"
+msgstr "vue \"boîte\""
+
+msgid "cwetype-description"
+msgstr "description"
+
+msgid "cwetype-permissions"
+msgstr "permissions"
+
+msgid "cwetype-views"
+msgstr "vues"
+
+msgid "cwetype-workflow"
+msgstr "workflow"
+
+msgid "cwgroup-main"
+msgstr "description"
+
+msgid "cwgroup-permissions"
+msgstr "permissions"
+
+msgid "cwrtype-description"
+msgstr "description"
+
+msgid "cwrtype-permissions"
+msgstr "permissions"
+
+msgid "cwsource-imports"
+msgstr "imports"
+
+msgid "cwsource-main"
+msgstr "description"
+
+msgid "cwsource-mapping"
+msgstr "mapping"
+
+msgid "cwuri"
+msgstr "uri interne"
+
+msgid "data directory url"
+msgstr "url du répertoire de données"
+
+msgid "data model schema"
+msgstr "schéma du modèle de données"
+
+msgid "data sources"
+msgstr "sources de données"
+
+msgid "data sources management"
+msgstr "gestion des sources de données"
+
+msgid "date"
+msgstr "date"
+
+msgid "deactivate"
+msgstr "désactiver"
+
+msgid "deactivated"
+msgstr "désactivé"
+
+msgid "december"
+msgstr "décembre"
+
+msgid "default"
+msgstr "valeur par défaut"
+
+msgid "default text format for rich text fields."
+msgstr "format de texte par défaut pour les champs textes"
+
+msgid "default user workflow"
+msgstr "workflow par défaut des utilisateurs"
+
+msgid "default value"
+msgstr "valeur par défaut"
+
+msgid "default value as gziped pickled python object"
+msgstr "valeur par défaut, sous forme d'objet python picklé zippé"
+
+msgid "default workflow for an entity type"
+msgstr "workflow par défaut pour un type d'entité"
+
+msgid "default_workflow"
+msgstr "workflow par défaut"
+
+msgctxt "CWEType"
+msgid "default_workflow"
+msgstr "workflow par défaut"
+
+msgid "default_workflow_object"
+msgstr "workflow par défaut de"
+
+msgctxt "Workflow"
+msgid "default_workflow_object"
+msgstr "workflow par défaut de"
+
+msgid "defaultval"
+msgstr "valeur par défaut"
+
+msgctxt "CWAttribute"
+msgid "defaultval"
+msgstr "valeur par défaut"
+
+msgid "define a CubicWeb user"
+msgstr "défini un utilisateur CubicWeb"
+
+msgid "define a CubicWeb users group"
+msgstr "défini un groupe d'utilisateur CubicWeb"
+
+msgid ""
+"define a final relation: link a final relation type from a non final entity "
+"to a final entity type. used to build the instance schema"
+msgstr ""
+"définit une relation non finale: lie un type de relation non finale depuis "
+"une entité vers un type d'entité non final. Utilisé pour construire le "
+"schéma de l'instance"
+
+msgid ""
+"define a non final relation: link a non final relation type from a non final "
+"entity to a non final entity type. used to build the instance schema"
+msgstr ""
+"définit une relation 'attribut', utilisé pour construire le schéma de "
+"l'instance"
+
+msgid "define a relation type, used to build the instance schema"
+msgstr "définit un type de relation"
+
+msgid "define a rql expression used to define permissions"
+msgstr "définit une expression rql donnant une permission"
+
+msgid "define a schema constraint"
+msgstr "définit une contrainte de schema"
+
+msgid "define a schema constraint type"
+msgstr "définit un type de contrainte de schema"
+
+msgid "define a virtual relation type, used to build the instance schema"
+msgstr "définit une relation virtuelle"
+
+msgid "define an entity type, used to build the instance schema"
+msgstr "définit un type d'entité"
+
+msgid "define how we get out from a sub-workflow"
+msgstr "définit comment sortir d'un sous-workflow"
+
+msgid "defines a sql-level multicolumn unique index"
+msgstr "définit un index SQL unique sur plusieurs colonnes"
+
+msgid ""
+"defines what's the property is applied for. You must select this first to be "
+"able to set value"
+msgstr ""
+"définit à quoi la propriété est appliquée. Vous devez sélectionner cela "
+"avant de pouvoir fixer une valeur"
+
+msgid "delete"
+msgstr "supprimer"
+
+msgid "delete this bookmark"
+msgstr "supprimer ce signet"
+
+msgid "delete this relation"
+msgstr "supprimer cette relation"
+
+msgid "delete_permission"
+msgstr "permission de supprimer"
+
+msgctxt "CWEType"
+msgid "delete_permission"
+msgstr "permission de supprimer"
+
+msgctxt "CWRelation"
+msgid "delete_permission"
+msgstr "permission de supprimer"
+
+msgid "delete_permission_object"
+msgstr "a la permission de supprimer"
+
+msgctxt "CWGroup"
+msgid "delete_permission_object"
+msgstr "peut supprimer"
+
+msgctxt "RQLExpression"
+msgid "delete_permission_object"
+msgstr "peut supprimer"
+
+#, python-format
+msgid "deleted %(etype)s #%(eid)s (%(title)s)"
+msgstr "suppression de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #%(eidto)s "
+"supprimée"
+
+msgid "depends on the constraint type"
+msgstr "dépend du type de contrainte"
+
+msgid "description"
+msgstr "description"
+
+msgctxt "BaseTransition"
+msgid "description"
+msgstr "description"
+
+msgctxt "CWAttribute"
+msgid "description"
+msgstr "description"
+
+msgctxt "CWComputedRType"
+msgid "description"
+msgstr "description"
+
+msgctxt "CWEType"
+msgid "description"
+msgstr "description"
+
+msgctxt "CWRType"
+msgid "description"
+msgstr "description"
+
+msgctxt "CWRelation"
+msgid "description"
+msgstr "description"
+
+msgctxt "State"
+msgid "description"
+msgstr "description"
+
+msgctxt "Transition"
+msgid "description"
+msgstr "description"
+
+msgctxt "Workflow"
+msgid "description"
+msgstr "description"
+
+msgctxt "WorkflowTransition"
+msgid "description"
+msgstr "description"
+
+msgid "description_format"
+msgstr "format"
+
+msgctxt "BaseTransition"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "CWAttribute"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "CWComputedRType"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "CWEType"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "CWRType"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "CWRelation"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "State"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "Transition"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "Workflow"
+msgid "description_format"
+msgstr "format"
+
+msgctxt "WorkflowTransition"
+msgid "description_format"
+msgstr "format"
+
+msgid "destination state for this transition"
+msgstr "états accessibles par cette transition"
+
+msgid "destination state must be in the same workflow as our parent transition"
+msgstr ""
+"l'état de destination doit être dans le même workflow que la transition "
+"parente"
+
+msgid "destination state of a transition"
+msgstr "état d'arrivée d'une transition"
+
+msgid ""
+"destination state. No destination state means that transition should go back "
+"to the state from which we've entered the subworkflow."
+msgstr ""
+"état de destination de la transition. Si aucun état de destination n'est "
+"spécifié, la transition ira vers l'état depuis lequel l'entité est entrée "
+"dans le sous-workflow."
+
+msgid "destination_state"
+msgstr "état de destination"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "destination_state"
+msgstr "état de destination"
+
+msgctxt "Transition"
+msgid "destination_state"
+msgstr "état de destination"
+
+msgid "destination_state_object"
+msgstr "destination de"
+
+msgctxt "State"
+msgid "destination_state_object"
+msgstr "état final de"
+
+msgid "detach attached file"
+msgstr "détacher le fichier existant"
+
+msgid "display order of the box"
+msgstr "ordre d'affichage de la boîte"
+
+msgid "display order of the component"
+msgstr "ordre d'affichage du composant"
+
+msgid "display order of the facet"
+msgstr "ordre d'affichage de la facette"
+
+msgid "display the box or not"
+msgstr "afficher la boîte ou non"
+
+msgid "display the component or not"
+msgstr "afficher le composant ou non"
+
+msgid "display the facet or not"
+msgstr "afficher la facette ou non"
+
+msgid "download"
+msgstr "télécharger"
+
+#, python-format
+msgid "download %s"
+msgstr "télécharger %s"
+
+msgid "download icon"
+msgstr "icône de téléchargement"
+
+msgid "download schema as owl"
+msgstr "télécharger le schéma OWL"
+
+msgid "edit bookmarks"
+msgstr "éditer les signets"
+
+msgid "editable-table"
+msgstr "table éditable"
+
+msgid "eid"
+msgstr "eid"
+
+msgid "embedded html"
+msgstr "HTML contenu"
+
+msgid "end_timestamp"
+msgstr "horodate de fin"
+
+msgctxt "CWDataImport"
+msgid "end_timestamp"
+msgstr "horodate de fin"
+
+msgid "entities deleted"
+msgstr "entités supprimées"
+
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+"les types d'entités et de relations ne peuvent être mappés, uniquement les "
+"relations"
+
+msgid "entity copied"
+msgstr "entité copiée"
+
+msgid "entity created"
+msgstr "entité créée"
+
+msgid "entity creation"
+msgstr "création d'entité"
+
+msgid "entity deleted"
+msgstr "entité supprimée"
+
+msgid "entity deletion"
+msgstr "suppression d'entité"
+
+msgid "entity edited"
+msgstr "entité éditée"
+
+msgid "entity has no workflow set"
+msgstr "l'entité n'a pas de workflow"
+
+msgid "entity linked"
+msgstr "entité liée"
+
+msgid "entity type"
+msgstr "type d'entité"
+
+msgid "entity types which may use this workflow"
+msgstr "types d'entité pouvant utiliser ce workflow"
+
+msgid "entity update"
+msgstr "mise à jour d'entité"
+
+msgid "entityview"
+msgstr "vues d'entité"
+
+msgid "error"
+msgstr "erreur"
+
+msgid "error while publishing ReST text"
+msgstr ""
+"une erreur s'est produite lors de l'interprétation du texte au format ReST"
+
+msgid "exit state must be a subworkflow state"
+msgstr "l'état de sortie doit être un état du sous-workflow"
+
+msgid "exit_point"
+msgstr "état de sortie"
+
+msgid "exit_point_object"
+msgstr "état de sortie de"
+
+#, python-format
+msgid "exiting from subworkflow %s"
+msgstr "sortie du sous-workflow %s"
+
+msgid "expression"
+msgstr "expression"
+
+msgctxt "RQLExpression"
+msgid "expression"
+msgstr "rql de l'expression"
+
+msgid "exprtype"
+msgstr "type de l'expression"
+
+msgctxt "RQLExpression"
+msgid "exprtype"
+msgstr "type"
+
+msgid "extra_props"
+msgstr ""
+
+msgctxt "CWAttribute"
+msgid "extra_props"
+msgstr "propriétés additionnelles"
+
+msgid "facet-loading-msg"
+msgstr "en cours de traitement, merci de patienter"
+
+msgid "facet.filters"
+msgstr "facettes"
+
+msgid "facetbox"
+msgstr "boîte à facettes"
+
+msgid "facets_created_by-facet"
+msgstr "facette \"créé par\""
+
+msgid "facets_created_by-facet_description"
+msgstr ""
+
+msgid "facets_cw_source-facet"
+msgstr "facette \"source de données\""
+
+msgid "facets_cw_source-facet_description"
+msgstr ""
+
+msgid "facets_cwfinal-facet"
+msgstr "facette \"type d'entité ou de relation final\""
+
+msgid "facets_cwfinal-facet_description"
+msgstr ""
+
+msgid "facets_datafeed.dataimport.status"
+msgstr "état de l'iport"
+
+msgid "facets_datafeed.dataimport.status_description"
+msgstr ""
+
+msgid "facets_etype-facet"
+msgstr "facette \"est de type\""
+
+msgid "facets_etype-facet_description"
+msgstr ""
+
+msgid "facets_has_text-facet"
+msgstr "facette \"contient le texte\""
+
+msgid "facets_has_text-facet_description"
+msgstr ""
+
+msgid "facets_in_group-facet"
+msgstr "facette \"fait partie du groupe\""
+
+msgid "facets_in_group-facet_description"
+msgstr ""
+
+msgid "facets_in_state-facet"
+msgstr "facette \"dans l'état\""
+
+msgid "facets_in_state-facet_description"
+msgstr ""
+
+msgid "failed"
+msgstr "échec"
+
+#, python-format
+msgid "failed to uniquify path (%s, %s)"
+msgstr "ne peut obtenir un nom de fichier unique (%s, %s)"
+
+msgid "february"
+msgstr "février"
+
+msgid "file tree view"
+msgstr "arborescence (fichiers)"
+
+msgid "final"
+msgstr "final"
+
+msgctxt "CWEType"
+msgid "final"
+msgstr "final"
+
+msgctxt "CWRType"
+msgid "final"
+msgstr "final"
+
+msgid "first name"
+msgstr "prénom"
+
+msgid "firstname"
+msgstr "prénom"
+
+msgctxt "CWUser"
+msgid "firstname"
+msgstr "prénom"
+
+msgid "foaf"
+msgstr "foaf"
+
+msgid "focus on this selection"
+msgstr "afficher cette sélection"
+
+msgid "follow"
+msgstr "suivre le lien"
+
+#, python-format
+msgid "follow this link for more information on this %s"
+msgstr "suivez ce lien pour plus d'information sur ce %s"
+
+msgid "for_user"
+msgstr "pour l'utilisateur"
+
+msgctxt "CWProperty"
+msgid "for_user"
+msgstr "propriété de l'utilisateur"
+
+msgid "for_user_object"
+msgstr "utilise les propriétés"
+
+msgctxt "CWUser"
+msgid "for_user_object"
+msgstr "a pour préférence"
+
+msgid "formula"
+msgstr "formule"
+
+msgctxt "CWAttribute"
+msgid "formula"
+msgstr "formule"
+
+msgid "friday"
+msgstr "vendredi"
+
+msgid "from"
+msgstr "de"
+
+#, python-format
+msgid "from %(date)s"
+msgstr "du %(date)s"
+
+msgid "from_entity"
+msgstr "de l'entité"
+
+msgctxt "CWAttribute"
+msgid "from_entity"
+msgstr "attribut de l'entité"
+
+msgctxt "CWRelation"
+msgid "from_entity"
+msgstr "relation de l'entité"
+
+msgid "from_entity_object"
+msgstr "relation sujet"
+
+msgctxt "CWEType"
+msgid "from_entity_object"
+msgstr "entité de"
+
+msgid "from_interval_start"
+msgstr "De"
+
+msgid "from_state"
+msgstr "de l'état"
+
+msgctxt "TrInfo"
+msgid "from_state"
+msgstr "état de départ"
+
+msgid "from_state_object"
+msgstr "transitions depuis cet état"
+
+msgctxt "State"
+msgid "from_state_object"
+msgstr "état de départ de"
+
+msgid "full text or RQL query"
+msgstr "texte à rechercher ou requête RQL"
+
+msgid "fulltext_container"
+msgstr "conteneur du texte indexé"
+
+msgctxt "CWRType"
+msgid "fulltext_container"
+msgstr "objet à indexer"
+
+msgid "fulltextindexed"
+msgstr "indexation du texte"
+
+msgctxt "CWAttribute"
+msgid "fulltextindexed"
+msgstr "texte indexé"
+
+msgid "gc"
+msgstr "fuite mémoire"
+
+msgid "generic plot"
+msgstr "tracé de courbes standard"
+
+msgid "generic relation to link one entity to another"
+msgstr "relation générique pour lier une entité à une autre"
+
+msgid ""
+"generic relation to specify that an external entity represent the same "
+"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def"
+msgstr ""
+"relation générique permettant d'indiquer qu'une entité est identique à une "
+"autre ressource web (voir http://www.w3.org/TR/owl-ref/#sameAs-def)."
+
+msgid "granted to groups"
+msgstr "accordée aux groupes"
+
+#, python-format
+msgid "graphical representation of %(appid)s data model"
+msgstr "réprésentation graphique du modèle de données de %(appid)s"
+
+#, python-format
+msgid ""
+"graphical representation of the %(etype)s entity type from %(appid)s data "
+"model"
+msgstr ""
+"réprésentation graphique du modèle de données pour le type d'entité "
+"%(etype)s de %(appid)s"
+
+#, python-format
+msgid ""
+"graphical representation of the %(rtype)s relation type from %(appid)s data "
+"model"
+msgstr ""
+"réprésentation graphique du modèle de données pour le type de relation "
+"%(rtype)s de %(appid)s"
+
+msgid "group in which a user should be to be allowed to pass this transition"
+msgstr ""
+"groupe dans lequel l'utilisateur doit être pour pouvoir passer la transition"
+
+msgid "groups"
+msgstr "groupes"
+
+msgid "groups allowed to add entities/relations of this type"
+msgstr "groupes autorisés à ajouter des entités/relations de ce type"
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr "groupes autorisés à supprimer des entités/relations de ce type"
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr "groupes autorisés à lire des entités/relations de ce type"
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr "groupes autorisés à mettre à jour des entités/relations de ce type"
+
+msgid "groups grant permissions to the user"
+msgstr "les groupes donnent des permissions à l'utilisateur"
+
+msgid "guests"
+msgstr "invités"
+
+msgid "hCalendar"
+msgstr "hCalendar"
+
+msgid "has_text"
+msgstr "contient le texte"
+
+msgid "header-center"
+msgstr "en-tête (centre)"
+
+msgid "header-left"
+msgstr "en-tête (gauche)"
+
+msgid "header-right"
+msgstr "en-tête (droite)"
+
+msgid "hide filter form"
+msgstr "cacher le filtre"
+
+msgid ""
+"how to format date and time in the ui (see this page for format "
+"description)"
+msgstr ""
+"comment formater l'horodate dans l'interface (description du "
+"format)"
+
+msgid ""
+"how to format date in the ui (see this page for format "
+"description)"
+msgstr ""
+"comment formater la date dans l'interface (description du format)"
+
+msgid "how to format float numbers in the ui"
+msgstr "comment formater les nombres flottants dans l'interface"
+
+msgid ""
+"how to format time in the ui (see this page for format "
+"description)"
+msgstr ""
+"comment formater l'heure dans l'interface (description du format)"
+
+msgid "i18n_bookmark_url_fqs"
+msgstr "paramètres"
+
+msgid "i18n_bookmark_url_path"
+msgstr "chemin"
+
+msgid "i18n_login_popup"
+msgstr "s'identifier"
+
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
+msgid "i18nprevnext_next"
+msgstr "suivant"
+
+msgid "i18nprevnext_previous"
+msgstr "précédent"
+
+msgid "i18nprevnext_up"
+msgstr "parent"
+
+msgid "iCalendar"
+msgstr "iCalendar"
+
+msgid "id of main template used to render pages"
+msgstr "id du template principal"
+
+msgid "identical to"
+msgstr "identique à"
+
+msgid "identical_to"
+msgstr "identique à"
+
+msgid "identity"
+msgstr "est identique à"
+
+msgid "identity_object"
+msgstr "est identique à"
+
+msgid ""
+"if full text content of subject/object entity should be added to other side "
+"entity (the container)."
+msgstr ""
+"si le text indexé de l'entité sujet/objet doit être ajouté à l'entité à "
+"l'autre extrémité de la relation (le conteneur)."
+
+msgid "image"
+msgstr "image"
+
+msgid "in progress"
+msgstr "en cours"
+
+msgid "in_group"
+msgstr "dans le groupe"
+
+msgctxt "CWUser"
+msgid "in_group"
+msgstr "fait partie du groupe"
+
+msgid "in_group_object"
+msgstr "membres"
+
+msgctxt "CWGroup"
+msgid "in_group_object"
+msgstr "contient les utilisateurs"
+
+msgid "in_state"
+msgstr "état"
+
+msgid "in_state_object"
+msgstr "état de"
+
+msgid "in_synchronization"
+msgstr "en cours de synchronisation"
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr "en cours de synchronisation"
+
+msgid "incontext"
+msgstr "dans le contexte"
+
+msgid "incorrect captcha value"
+msgstr "valeur de captcha incorrecte"
+
+#, python-format
+msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\""
+msgstr "la valeur %(KEY-value)s est incorrecte pour le type \"%(KEY-type)s\""
+
+msgid "index this attribute's value in the plain text index"
+msgstr "indexer la valeur de cet attribut dans l'index plein texte"
+
+msgid "indexed"
+msgstr "index"
+
+msgctxt "CWAttribute"
+msgid "indexed"
+msgstr "indexé"
+
+msgid "indicate the current state of an entity"
+msgstr "indique l'état courant d'une entité"
+
+msgid ""
+"indicate which state should be used by default when an entity using states "
+"is created"
+msgstr ""
+"indique quel état devrait être utilisé par défaut lorsqu'une entité est créée"
+
+msgid "indifferent"
+msgstr "indifférent"
+
+msgid "info"
+msgstr "information"
+
+msgid "initial state for this workflow"
+msgstr "état initial pour ce workflow"
+
+msgid "initial_state"
+msgstr "état initial"
+
+msgctxt "Workflow"
+msgid "initial_state"
+msgstr "état initial"
+
+msgid "initial_state_object"
+msgstr "état initial de"
+
+msgctxt "State"
+msgid "initial_state_object"
+msgstr "état initial de"
+
+msgid "inlined"
+msgstr "mise en ligne"
+
+msgctxt "CWRType"
+msgid "inlined"
+msgstr "mise en ligne"
+
+msgid "instance home"
+msgstr "répertoire de l'instance"
+
+msgid "internal entity uri"
+msgstr "uri interne"
+
+msgid "internationalizable"
+msgstr "internationalisable"
+
+msgctxt "CWAttribute"
+msgid "internationalizable"
+msgstr "internationalisable"
+
+#, python-format
+msgid "invalid action %r"
+msgstr "action %r invalide"
+
+#, python-format
+msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s"
+msgstr ""
+"la valeur %(KEY-value)s est incorrecte, elle doit être parmi %(KEY-choices)s"
+
+msgid "is"
+msgstr "de type"
+
+msgid "is object of:"
+msgstr "est object de"
+
+msgid "is subject of:"
+msgstr "est sujet de"
+
+msgid ""
+"is the subject/object entity of the relation composed of the other ? This "
+"implies that when the composite is deleted, composants are also deleted."
+msgstr ""
+"Est-ce que l'entité sujet/objet de la relation est une agrégation de "
+"l'autre ?Si c'est le cas, détruire le composite détruira ses composants "
+"également"
+
+msgid "is this attribute's value translatable"
+msgstr "est-ce que la valeur de cet attribut est traduisible ?"
+
+msgid "is this relation equivalent in both direction ?"
+msgstr "est que cette relation est équivalent dans les deux sens ?"
+
+msgid ""
+"is this relation physically inlined? you should know what you're doing if "
+"you are changing this!"
+msgstr ""
+"est ce que cette relation est mise en ligne dans la base de données ?vous "
+"devez savoir ce que vous faites si vous changez cela !"
+
+msgid "is_instance_of"
+msgstr "est une instance de"
+
+msgid "is_instance_of_object"
+msgstr "type de"
+
+msgid "is_object"
+msgstr "a pour instance"
+
+msgid "january"
+msgstr "janvier"
+
+msgid "json-entities-export-view"
+msgstr "export JSON (entités)"
+
+msgid "json-export-view"
+msgstr "export JSON"
+
+msgid "july"
+msgstr "juillet"
+
+msgid "june"
+msgstr "juin"
+
+msgid "language of the user interface"
+msgstr "langue pour l'interface utilisateur"
+
+msgid "last connection date"
+msgstr "dernière date de connexion"
+
+msgid "last login time"
+msgstr "dernière date de connexion"
+
+msgid "last name"
+msgstr "nom"
+
+msgid "last usage"
+msgstr "dernier usage"
+
+msgid "last_login_time"
+msgstr "dernière date de connexion"
+
+msgctxt "CWUser"
+msgid "last_login_time"
+msgstr "dernière date de connexion"
+
+msgid "latest import"
+msgstr "dernier import"
+
+msgid "latest modification time of an entity"
+msgstr "date de dernière modification d'une entité"
+
+msgid "latest synchronization time"
+msgstr "date de la dernière synchronisation"
+
+msgid "latest update on"
+msgstr "dernière mise à jour"
+
+msgid "latest_retrieval"
+msgstr "dernière synchronisation"
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr "date de la dernière synchronisation de la source."
+
+msgid "left"
+msgstr "gauche"
+
+msgid "line"
+msgstr "ligne"
+
+msgid ""
+"link a property to the user which want this property customization. Unless "
+"you're a site manager, this relation will be handled automatically."
+msgstr ""
+"lie une propriété à l'utilisateur désirant cette personnalisation. A moins "
+"que vous ne soyez gestionnaire du site, cette relation est gérée "
+"automatiquement."
+
+msgid "link a relation definition to its object entity type"
+msgstr "lie une définition de relation à son type d'entité objet"
+
+msgid "link a relation definition to its relation type"
+msgstr "lie une définition de relation à son type d'entité"
+
+msgid "link a relation definition to its subject entity type"
+msgstr "lie une définition de relation à son type d'entité sujet"
+
+msgid "link a state to one or more workflow"
+msgstr "lie un état à un ou plusieurs workflow"
+
+msgid "link a transition information to its object"
+msgstr "lié une enregistrement de transition vers l'objet associé"
+
+msgid "link a transition to one or more workflow"
+msgstr "lie une transition à un ou plusieurs workflow"
+
+msgid "link a workflow to one or more entity type"
+msgstr "lie un workflow à un ou plusieurs types d'entité"
+
+msgid "list"
+msgstr "liste"
+
+msgid "log"
+msgstr "journal"
+
+msgctxt "CWDataImport"
+msgid "log"
+msgstr "journal"
+
+msgid "log in"
+msgstr "s'identifier"
+
+msgid "login"
+msgstr "identifiant"
+
+msgctxt "CWUser"
+msgid "login"
+msgstr "identifiant"
+
+msgid "login / password"
+msgstr "identifiant / mot de passe"
+
+msgid "login or email"
+msgstr "identifiant ou email"
+
+msgid "login_action"
+msgstr "identifiez vous"
+
+msgid "logout"
+msgstr "se déconnecter"
+
+#, python-format
+msgid "loop in %(rel)s relation (%(eid)s)"
+msgstr "boucle détectée en parcourant la relation %(rel)s de l'entité #%(eid)s"
+
+msgid "main informations"
+msgstr "Informations générales"
+
+msgid "main_tab"
+msgstr "description"
+
+msgid "mainvars"
+msgstr "variables principales"
+
+msgctxt "RQLExpression"
+msgid "mainvars"
+msgstr "variables principales"
+
+msgid "manage"
+msgstr "gestion du site"
+
+msgid "manage bookmarks"
+msgstr "gérer les signets"
+
+msgid "manage permissions"
+msgstr "gestion des permissions"
+
+msgid "managers"
+msgstr "administrateurs"
+
+msgid "mandatory relation"
+msgstr "relation obligatoire"
+
+msgid "march"
+msgstr "mars"
+
+msgid "match_host"
+msgstr "pour l'hôte"
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr "pour l'hôte"
+
+msgid "maximum number of characters in short description"
+msgstr "nombre maximum de caractères dans les descriptions courtes"
+
+msgid "maximum number of entities to display in related combo box"
+msgstr "nombre maximum d'entités à afficher dans les listes déroulantes"
+
+msgid "maximum number of objects displayed by page of results"
+msgstr "nombre maximum d'entités affichées par pages"
+
+msgid "maximum number of related entities to display in the primary view"
+msgstr "nombre maximum d'entités liées à afficher dans la vue primaire"
+
+msgid "may"
+msgstr "mai"
+
+msgid "memory leak debugging"
+msgstr "Déboguage des fuites de mémoire"
+
+msgid "message"
+msgstr "message"
+
+#, python-format
+msgid "missing parameters for entity %s"
+msgstr "paramètres manquants pour l'entité %s"
+
+msgid "modification"
+msgstr "modification"
+
+msgid "modification_date"
+msgstr "date de modification"
+
+msgid "modify"
+msgstr "modifier"
+
+msgid "monday"
+msgstr "lundi"
+
+msgid "more actions"
+msgstr "plus d'actions"
+
+msgid "more info about this workflow"
+msgstr "plus d'information sur ce workflow"
+
+msgid "multiple edit"
+msgstr "édition multiple"
+
+msgid "my custom search"
+msgstr "ma recherche personnalisée"
+
+msgid "name"
+msgstr "nom"
+
+msgctxt "BaseTransition"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWCache"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWComputedRType"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWConstraintType"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWEType"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWGroup"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWRType"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWSource"
+msgid "name"
+msgstr "nom"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "name"
+msgstr "nom"
+
+msgctxt "State"
+msgid "name"
+msgstr "nom"
+
+msgctxt "Transition"
+msgid "name"
+msgstr "nom"
+
+msgctxt "Workflow"
+msgid "name"
+msgstr "nom"
+
+msgctxt "WorkflowTransition"
+msgid "name"
+msgstr "nom"
+
+msgid "name of the cache"
+msgstr "nom du cache applicatif"
+
+msgid ""
+"name of the main variables which should be used in the selection if "
+"necessary (comma separated)"
+msgstr ""
+"nom des variables principales qui devrait être utilisées dans la sélection "
+"si nécessaire (les séparer par des virgules)"
+
+msgid "name of the source"
+msgstr "nom de la source"
+
+msgid "navbottom"
+msgstr "bas de page"
+
+msgid "navcontentbottom"
+msgstr "bas de page du contenu principal"
+
+msgid "navcontenttop"
+msgstr "haut de page"
+
+msgid "navigation"
+msgstr "navigation"
+
+msgid "navigation.combobox-limit"
+msgstr "nombre d'entités dans les listes déroulantes"
+
+msgid "navigation.page-size"
+msgstr "nombre de résultats"
+
+msgid "navigation.related-limit"
+msgstr "nombre d'entités dans la vue primaire"
+
+msgid "navigation.short-line-size"
+msgstr "taille des descriptions courtes"
+
+msgid "navtop"
+msgstr "haut de page du contenu principal"
+
+msgid "new"
+msgstr "nouveau"
+
+msgid "next page"
+msgstr "page suivante"
+
+msgid "next_results"
+msgstr "résultats suivants"
+
+msgid "no"
+msgstr "non"
+
+msgid "no content next link"
+msgstr "pas de lien 'suivant'"
+
+msgid "no content prev link"
+msgstr "pas de lien 'précédent'"
+
+msgid "no edited fields specified"
+msgstr "aucun champ à éditer spécifié"
+
+msgid "no log to display"
+msgstr "rien à afficher"
+
+msgid "no related entity"
+msgstr "pas d'entité liée"
+
+msgid "no repository sessions found"
+msgstr "aucune session trouvée"
+
+msgid "no selected entities"
+msgstr "pas d'entité sélectionnée"
+
+#, python-format
+msgid "no such entity type %s"
+msgstr "le type d'entité '%s' n'existe pas"
+
+msgid "no version information"
+msgstr "pas d'information de version"
+
+msgid "no web sessions found"
+msgstr "aucune session trouvée"
+
+msgid "normal"
+msgstr "normal"
+
+msgid "not authorized"
+msgstr "non autorisé"
+
+msgid "not selected"
+msgstr "non sélectionné"
+
+msgid "november"
+msgstr "novembre"
+
+msgid "num. users"
+msgstr "nombre d'utilisateurs"
+
+msgid "object"
+msgstr "objet"
+
+msgid "object type"
+msgstr "type de l'objet"
+
+msgid "october"
+msgstr "octobre"
+
+msgid "one month"
+msgstr "un mois"
+
+msgid "one week"
+msgstr "une semaine"
+
+msgid "oneline"
+msgstr "une ligne"
+
+msgid "only select queries are authorized"
+msgstr "seules les requêtes de sélections sont autorisées"
+
+msgid "open all"
+msgstr "tout ouvrir"
+
+msgid "opened sessions"
+msgstr "sessions ouvertes"
+
+msgid "opened web sessions"
+msgstr "sessions web ouvertes"
+
+msgid "options"
+msgstr "options"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr "options"
+
+msgid "order"
+msgstr "ordre"
+
+msgid "ordernum"
+msgstr "ordre"
+
+msgctxt "CWAttribute"
+msgid "ordernum"
+msgstr "numéro d'ordre"
+
+msgctxt "CWRelation"
+msgid "ordernum"
+msgstr "numéro d'ordre"
+
+msgid "owl"
+msgstr "owl"
+
+msgid "owlabox"
+msgstr "owl ABox"
+
+msgid "owned_by"
+msgstr "appartient à"
+
+msgid "owned_by_object"
+msgstr "possède"
+
+msgid "owners"
+msgstr "propriétaires"
+
+msgid "ownerships have been changed"
+msgstr "les droits de propriété ont été modifiés"
+
+msgid "pageid-not-found"
+msgstr ""
+"des données nécessaires semblent expirées, veuillez recharger la page et "
+"recommencer."
+
+msgid "parser"
+msgstr "parseur"
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr "parseur"
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+"parseur à utiliser pour extraire entités et relations du contenu récupéré "
+"aux URLs données"
+
+msgid "password"
+msgstr "mot de passe"
+
+msgid "password and confirmation don't match"
+msgstr "le mot de passe et la confirmation sont différents"
+
+msgid "path"
+msgstr "chemin"
+
+msgctxt "Bookmark"
+msgid "path"
+msgstr "chemin"
+
+msgid "permalink to this message"
+msgstr "lien permanent vers ce message"
+
+msgid "permission"
+msgstr "permission"
+
+msgid "permissions"
+msgstr "permissions"
+
+msgid "pick existing bookmarks"
+msgstr "récupérer des signets existants"
+
+msgid "pkey"
+msgstr "clé"
+
+msgctxt "CWProperty"
+msgid "pkey"
+msgstr "code de la propriété"
+
+msgid "please correct errors below"
+msgstr "veuillez corriger les erreurs ci-dessous"
+
+msgid "please correct the following errors:"
+msgstr "veuillez corriger les erreurs suivantes :"
+
+msgid "possible views"
+msgstr "vues possibles"
+
+msgid "prefered_form"
+msgstr "forme préférée"
+
+msgctxt "EmailAddress"
+msgid "prefered_form"
+msgstr "forme préférée"
+
+msgid "prefered_form_object"
+msgstr "forme préférée à"
+
+msgctxt "EmailAddress"
+msgid "prefered_form_object"
+msgstr "forme préférée de"
+
+msgid "preferences"
+msgstr "préférences"
+
+msgid "previous page"
+msgstr "page précédente"
+
+msgid "previous_results"
+msgstr "résultats précédents"
+
+msgid "primary"
+msgstr "primaire"
+
+msgid "primary_email"
+msgstr "adresse email principale"
+
+msgctxt "CWUser"
+msgid "primary_email"
+msgstr "email principal"
+
+msgid "primary_email_object"
+msgstr "adresse email principale (object)"
+
+msgctxt "EmailAddress"
+msgid "primary_email_object"
+msgstr "adresse principale de"
+
+msgid "profile"
+msgstr "profil"
+
+msgid "rdef-description"
+msgstr "description"
+
+msgid "rdef-permissions"
+msgstr "permissions"
+
+msgid "rdf export"
+msgstr "export RDF"
+
+msgid "read"
+msgstr "lecture"
+
+msgid "read_permission"
+msgstr "permission de lire"
+
+msgctxt "CWAttribute"
+msgid "read_permission"
+msgstr "permission de lire"
+
+msgctxt "CWComputedRType"
+msgid "read_permission"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "read_permission"
+msgstr "permission de lire"
+
+msgctxt "CWRelation"
+msgid "read_permission"
+msgstr "permission de lire"
+
+msgid "read_permission_object"
+msgstr "a la permission de lire"
+
+msgctxt "CWGroup"
+msgid "read_permission_object"
+msgstr "peut lire"
+
+msgctxt "RQLExpression"
+msgid "read_permission_object"
+msgstr "peut lire"
+
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+"expression régulière des noms d'hôtes auxquels cette configuration s'applique"
+
+msgid "registry"
+msgstr "registre"
+
+msgid "related entity has no state"
+msgstr "l'entité lié n'a pas d'état"
+
+msgid "related entity has no workflow set"
+msgstr "l'entité lié n'a pas de workflow"
+
+msgid "relation"
+msgstr "relation"
+
+#, python-format
+msgid "relation %(relname)s of %(ent)s"
+msgstr "relation %(relname)s de %(ent)s"
+
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+"la relation %(rtype)s avec %(etype)s comme %(role)s est supportée mais aucun "
+"type cible n'est supporté"
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+"la relation %(rtype)s avec %(etype)s comme %(role)s est obligatoire mais non "
+"supportée"
+
+#, python-format
+msgid ""
+"relation %s is supported but none of its definitions matches supported "
+"entities"
+msgstr ""
+"la relation %s est supportée mais aucune de ses définitions ne correspondent "
+"aux types d'entités supportés"
+
+msgid "relation add"
+msgstr "ajout de relation"
+
+msgid "relation removal"
+msgstr "suppression de relation"
+
+msgid "relation_type"
+msgstr "type de relation"
+
+msgctxt "CWAttribute"
+msgid "relation_type"
+msgstr "type de relation"
+
+msgctxt "CWRelation"
+msgid "relation_type"
+msgstr "type de relation"
+
+msgid "relation_type_object"
+msgstr "définition"
+
+msgctxt "CWRType"
+msgid "relation_type_object"
+msgstr "définition"
+
+msgid "relations"
+msgstr "relations"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "relations"
+msgstr "relations"
+
+msgid "relations deleted"
+msgstr "relations supprimées"
+
+msgid "relations_object"
+msgstr "relations de"
+
+msgctxt "CWRType"
+msgid "relations_object"
+msgstr "relations de"
+
+msgid "relative url of the bookmarked page"
+msgstr "url relative de la page"
+
+msgid "remove-inlined-entity-form"
+msgstr "supprimer"
+
+msgid "require_group"
+msgstr "nécessite le groupe"
+
+msgctxt "BaseTransition"
+msgid "require_group"
+msgstr "restreinte au groupe"
+
+msgctxt "Transition"
+msgid "require_group"
+msgstr "restreinte au groupe"
+
+msgctxt "WorkflowTransition"
+msgid "require_group"
+msgstr "restreinte au groupe"
+
+msgid "require_group_object"
+msgstr "a les droits"
+
+msgctxt "CWGroup"
+msgid "require_group_object"
+msgstr "a les droits"
+
+msgid "required"
+msgstr "requis"
+
+msgid "required attribute"
+msgstr "attribut requis"
+
+msgid "required field"
+msgstr "champ requis"
+
+msgid "resources usage"
+msgstr "resources utilisées"
+
+msgid ""
+"restriction part of a rql query. For entity rql expression, X and U are "
+"predefined respectivly to the current object and to the request user. For "
+"relation rql expression, S, O and U are predefined respectivly to the "
+"current relation'subject, object and to the request user. "
+msgstr ""
+"partie restriction de la requête rql. Pour une expression s'appliquant à une "
+"entité, X et U sont respectivement prédéfinis à l'entité et à l'utilisateur "
+"courant. Pour une expression s'appliquant à une relation, S, O et U sont "
+"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur "
+"courant."
+
+msgid "revert changes"
+msgstr "annuler les changements"
+
+msgid "right"
+msgstr "droite"
+
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr "expression rql autorisant à ajouter des entités/relations de ce type"
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr "expression rql autorisant à supprimer des entités/relations de ce type"
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr "expression rql autorisant à lire des entités/relations de ce type"
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr ""
+"expression rql autorisant à mettre à jour des entités/relations de ce type"
+
+msgid "rql expressions"
+msgstr "conditions rql"
+
+msgid "rss export"
+msgstr "export RSS"
+
+msgid "rule"
+msgstr "règle"
+
+msgctxt "CWComputedRType"
+msgid "rule"
+msgstr "règle"
+
+msgid "same_as"
+msgstr "identique à"
+
+msgid "sample format"
+msgstr "exemple"
+
+msgid "saturday"
+msgstr "samedi"
+
+msgid "schema-diagram"
+msgstr "diagramme"
+
+msgid "schema-entity-types"
+msgstr "types d'entités"
+
+msgid "schema-relation-types"
+msgstr "types de relations"
+
+msgid "search"
+msgstr "rechercher"
+
+msgid "search for association"
+msgstr "rechercher pour associer"
+
+msgid "searching for"
+msgstr "Recherche de"
+
+msgid "security"
+msgstr "sécurité"
+
+msgid "see more"
+msgstr "voir plus"
+
+msgid "see them all"
+msgstr "les voir toutes"
+
+msgid "see_also"
+msgstr "voir aussi"
+
+msgid "select"
+msgstr "sélectionner"
+
+msgid "select a"
+msgstr "sélectionner un"
+
+msgid "select a key first"
+msgstr "sélectionnez d'abord une clé"
+
+msgid "select a relation"
+msgstr "sélectionner une relation"
+
+msgid "select this entity"
+msgstr "sélectionner cette entité"
+
+msgid "selected"
+msgstr "sélectionné"
+
+msgid "semantic description of this attribute"
+msgstr "description sémantique de cet attribut"
+
+msgid "semantic description of this entity type"
+msgstr "description sémantique de ce type d'entité"
+
+msgid "semantic description of this relation"
+msgstr "description sémantique de cette relation"
+
+msgid "semantic description of this relation type"
+msgstr "description sémantique de ce type de relation"
+
+msgid "semantic description of this state"
+msgstr "description sémantique de cet état"
+
+msgid "semantic description of this transition"
+msgstr "description sémantique de cette transition"
+
+msgid "semantic description of this workflow"
+msgstr "description sémantique de ce workflow"
+
+msgid "september"
+msgstr "septembre"
+
+msgid "server information"
+msgstr "informations serveur"
+
+msgid "severity"
+msgstr "sévérité"
+
+msgid ""
+"should html fields being edited using fckeditor (a HTML WYSIWYG editor). "
+"You should also select text/html as default text format to actually get "
+"fckeditor."
+msgstr ""
+"indique si les champs HTML doivent être édités avec fckeditor (un\n"
+"éditeur HTML WYSIWYG). Il est également conseillé de choisir text/html\n"
+"comme format de texte par défaut pour pouvoir utiliser fckeditor."
+
+#, python-format
+msgid "show %s results"
+msgstr "montrer %s résultats"
+
+msgid "show advanced fields"
+msgstr "montrer les champs avancés"
+
+msgid "show filter form"
+msgstr "afficher le filtre"
+
+msgid "site configuration"
+msgstr "configuration du site"
+
+msgid "site documentation"
+msgstr "documentation du site"
+
+msgid "site title"
+msgstr "titre du site"
+
+msgid "site-wide property can't be set for user"
+msgstr "une propriété spécifique au site ne peut être propre à un utilisateur"
+
+msgid "some later transaction(s) touch entity, undo them first"
+msgstr ""
+"des transactions plus récentes modifient cette entité, annulez les d'abord"
+
+msgid "some relations violate a unicity constraint"
+msgstr "certaines relations transgressent une contrainte d'unicité"
+
+msgid "sorry, the server is unable to handle this query"
+msgstr "désolé, le serveur ne peut traiter cette requête"
+
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+"Configuration de la source. Une clé=valeur par ligne, les clés autorisées "
+"dépendantes du type de source. Les valeur surchargent celles définies sur la "
+"source."
+
+msgid "sparql xml"
+msgstr "XML Sparql"
+
+msgid "special transition allowing to go through a sub-workflow"
+msgstr "transition spécial permettant d'aller dans un sous-workfow"
+
+msgid "specializes"
+msgstr "dérive de"
+
+msgctxt "CWEType"
+msgid "specializes"
+msgstr "spécialise"
+
+msgid "specializes_object"
+msgstr "parent de"
+
+msgctxt "CWEType"
+msgid "specializes_object"
+msgstr "parent de"
+
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr "spécifier %s est obligatoire"
+
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+"horodate de départ de la synchronisation en cours, ou NULL s'il n'y en a pas."
+
+msgid "start_timestamp"
+msgstr "horodate de début"
+
+msgctxt "CWDataImport"
+msgid "start_timestamp"
+msgstr "horodate de début"
+
+msgid "startup views"
+msgstr "vues de départ"
+
+msgid "startupview"
+msgstr "vues de départ"
+
+msgid "state"
+msgstr "état"
+
+msgid "state and transition don't belong the the same workflow"
+msgstr "l'état et la transition n'appartiennent pas au même workflow"
+
+msgid "state doesn't apply to this entity's type"
+msgstr "cet état ne s'applique pas à ce type d'entité"
+
+msgid "state doesn't belong to entity's current workflow"
+msgstr "l'état n'appartient pas au workflow courant de l'entité"
+
+msgid "state doesn't belong to entity's workflow"
+msgstr "l'état n'appartient pas au workflow de l'entité"
+
+msgid ""
+"state doesn't belong to entity's workflow. You may want to set a custom "
+"workflow for this entity first."
+msgstr ""
+"l'état n'appartient pas au workflow courant de l'entité. Vous désirez peut-"
+"être spécifier que cette entité doit utiliser ce workflow."
+
+msgid "state doesn't belong to this workflow"
+msgstr "l'état n'appartient pas à ce workflow"
+
+msgid "state_of"
+msgstr "état de"
+
+msgctxt "State"
+msgid "state_of"
+msgstr "état de"
+
+msgid "state_of_object"
+msgstr "a pour état"
+
+msgctxt "Workflow"
+msgid "state_of_object"
+msgstr "contient les états"
+
+msgid "status"
+msgstr "état"
+
+msgctxt "CWDataImport"
+msgid "status"
+msgstr "état"
+
+msgid "status change"
+msgstr "changer l'état"
+
+msgid "status changed"
+msgstr "changement d'état"
+
+#, python-format
+msgid "status will change from %(st1)s to %(st2)s"
+msgstr "l'entité passera de l'état %(st1)s à l'état %(st2)s"
+
+msgid "subject"
+msgstr "sujet"
+
+msgid "subject type"
+msgstr "type du sujet"
+
+msgid "subject/object cardinality"
+msgstr "cardinalité sujet/objet"
+
+msgid "subworkflow"
+msgstr "sous-workflow"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow"
+msgstr "sous-workflow"
+
+msgid ""
+"subworkflow isn't a workflow for the same types as the transition's workflow"
+msgstr ""
+"le sous-workflow ne s'applique pas aux mêmes types que le workflow de cette "
+"transition"
+
+msgid "subworkflow state"
+msgstr "état de sous-workflow"
+
+msgid "subworkflow_exit"
+msgstr "sortie de sous-workflow"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow_exit"
+msgstr "sortie du sous-workflow"
+
+msgid "subworkflow_exit_object"
+msgstr "états de sortie"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_exit_object"
+msgstr "états de sortie"
+
+msgid "subworkflow_object"
+msgstr "utilisé par la transition"
+
+msgctxt "Workflow"
+msgid "subworkflow_object"
+msgstr "sous workflow de"
+
+msgid "subworkflow_state"
+msgstr "état du sous-workflow"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_state"
+msgstr "état"
+
+msgid "subworkflow_state_object"
+msgstr "état de sortie de"
+
+msgctxt "State"
+msgid "subworkflow_state_object"
+msgstr "état de sortie de"
+
+msgid "success"
+msgstr "succès"
+
+msgid "sunday"
+msgstr "dimanche"
+
+msgid "surname"
+msgstr "nom"
+
+msgctxt "CWUser"
+msgid "surname"
+msgstr "nom de famille"
+
+msgid "symmetric"
+msgstr "symétrique"
+
+msgctxt "CWRType"
+msgid "symmetric"
+msgstr "symétrique"
+
+msgid "synchronization-interval must be greater than 1 minute"
+msgstr "synchronization-interval doit être supérieur à 1 minute"
+
+msgid "synchronize"
+msgstr "synchroniser"
+
+msgid "table"
+msgstr "table"
+
+msgid "tablefilter"
+msgstr "filtre de tableau"
+
+msgid "text"
+msgstr "text"
+
+msgid "text/cubicweb-page-template"
+msgstr "contenu dynamique"
+
+msgid "text/html"
+msgstr "html"
+
+msgid "text/markdown"
+msgstr "texte au format Markdown"
+
+msgid "text/plain"
+msgstr "texte"
+
+msgid "text/rest"
+msgstr "texte au format reStructuredText"
+
+msgid "the URI of the object"
+msgstr "l'Uri de l'objet"
+
+msgid "the prefered email"
+msgstr "l'adresse électronique principale"
+
+msgid "the system source has its configuration stored on the file-system"
+msgstr "la source système a sa configuration stockée sur le système de fichier"
+
+msgid "there is no next page"
+msgstr "Il n'y a pas de page suivante"
+
+msgid "there is no previous page"
+msgstr "Il n'y a pas de page précédente"
+
+#, python-format
+msgid "there is no transaction #%s"
+msgstr "Il n'y a pas de transaction #%s"
+
+msgid "this action is not reversible!"
+msgstr ""
+"Attention ! Cette opération va détruire les données de façon irréversible."
+
+msgid "this entity is currently owned by"
+msgstr "cette entité appartient à"
+
+msgid "this parser doesn't use a mapping"
+msgstr "ce parseur n'utilise pas de mapping"
+
+msgid "this resource does not exist"
+msgstr "cette ressource est introuvable"
+
+msgid "this source doesn't use a mapping"
+msgstr "cette source n'utilise pas de mapping"
+
+msgid "thursday"
+msgstr "jeudi"
+
+msgid "timestamp"
+msgstr "date"
+
+msgctxt "CWCache"
+msgid "timestamp"
+msgstr "valide depuis"
+
+msgid "timetable"
+msgstr "emploi du temps"
+
+msgid "title"
+msgstr "titre"
+
+msgctxt "Bookmark"
+msgid "title"
+msgstr "libellé"
+
+msgid "to"
+msgstr "à"
+
+#, python-format
+msgid "to %(date)s"
+msgstr "au %(date)s"
+
+msgid "to associate with"
+msgstr "pour associer à"
+
+msgid "to_entity"
+msgstr "vers l'entité"
+
+msgctxt "CWAttribute"
+msgid "to_entity"
+msgstr "pour l'entité"
+
+msgctxt "CWRelation"
+msgid "to_entity"
+msgstr "pour l'entité"
+
+msgid "to_entity_object"
+msgstr "objet de la relation"
+
+msgctxt "CWEType"
+msgid "to_entity_object"
+msgstr "objet de la relation"
+
+msgid "to_interval_end"
+msgstr "à"
+
+msgid "to_state"
+msgstr "vers l'état"
+
+msgctxt "TrInfo"
+msgid "to_state"
+msgstr "état de destination"
+
+msgid "to_state_object"
+msgstr "transitions vers cet état"
+
+msgctxt "State"
+msgid "to_state_object"
+msgstr "transition vers cet état"
+
+msgid "toggle check boxes"
+msgstr "afficher/masquer les cases à cocher"
+
+msgid "tr_count"
+msgstr "n° de transition"
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr "n° de transition"
+
+msgid "transaction undone"
+msgstr "transaction annulée"
+
+#, python-format
+msgid "transition %(tr)s isn't allowed from %(st)s"
+msgstr "la transition %(tr)s n'est pas autorisée depuis l'état %(st)s"
+
+msgid "transition doesn't belong to entity's workflow"
+msgstr "la transition n'appartient pas au workflow de l'entité"
+
+msgid "transition isn't allowed"
+msgstr "la transition n'est pas autorisée"
+
+msgid "transition may not be fired"
+msgstr "la transition ne peut-être déclenchée"
+
+msgid "transition_of"
+msgstr "transition de"
+
+msgctxt "BaseTransition"
+msgid "transition_of"
+msgstr "transition de"
+
+msgctxt "Transition"
+msgid "transition_of"
+msgstr "transition de"
+
+msgctxt "WorkflowTransition"
+msgid "transition_of"
+msgstr "transition de"
+
+msgid "transition_of_object"
+msgstr "a pour transition"
+
+msgctxt "Workflow"
+msgid "transition_of_object"
+msgstr "a pour transition"
+
+msgid "tree view"
+msgstr "arborescence"
+
+msgid "tuesday"
+msgstr "mardi"
+
+msgid "type"
+msgstr "type"
+
+msgctxt "BaseTransition"
+msgid "type"
+msgstr "type"
+
+msgctxt "CWSource"
+msgid "type"
+msgstr "type"
+
+msgctxt "Transition"
+msgid "type"
+msgstr "type"
+
+msgctxt "WorkflowTransition"
+msgid "type"
+msgstr "type"
+
+msgid "type here a sparql query"
+msgstr "Tapez une requête sparql"
+
+msgid "type of the source"
+msgstr "type de la source"
+
+msgid "ui"
+msgstr "propriétés génériques de l'interface"
+
+msgid "ui.date-format"
+msgstr "format de date"
+
+msgid "ui.datetime-format"
+msgstr "format de date et de l'heure"
+
+msgid "ui.default-text-format"
+msgstr "format de texte"
+
+msgid "ui.encoding"
+msgstr "encodage"
+
+msgid "ui.fckeditor"
+msgstr "éditeur du contenu"
+
+msgid "ui.float-format"
+msgstr "format des flottants"
+
+msgid "ui.language"
+msgstr "langue"
+
+msgid "ui.main-template"
+msgstr "gabarit principal"
+
+msgid "ui.site-title"
+msgstr "titre du site"
+
+msgid "ui.time-format"
+msgstr "format de l'heure"
+
+msgid "unable to check captcha, please try again"
+msgstr "impossible de vérifier le captcha, veuillez réessayer"
+
+msgid "unaccessible"
+msgstr "inaccessible"
+
+msgid "unauthorized value"
+msgstr "valeur non autorisée"
+
+msgid "undefined user"
+msgstr "utilisateur inconnu"
+
+msgid "undo"
+msgstr "annuler"
+
+msgid "unique identifier used to connect to the application"
+msgstr "identifiant unique utilisé pour se connecter à l'application"
+
+msgid "unknown external entity"
+msgstr "entité (externe) introuvable"
+
+#, python-format
+msgid "unknown options %s"
+msgstr "options inconnues : %s"
+
+#, python-format
+msgid "unknown property key %s"
+msgstr "clé de propriété inconnue : %s"
+
+msgid "unknown vocabulary:"
+msgstr "vocabulaire inconnu : "
+
+msgid "unsupported protocol"
+msgstr "protocole non supporté"
+
+msgid "upassword"
+msgstr "mot de passe"
+
+msgctxt "CWUser"
+msgid "upassword"
+msgstr "mot de passe"
+
+msgid "update"
+msgstr "modification"
+
+msgid "update_permission"
+msgstr "permission de modification"
+
+msgctxt "CWAttribute"
+msgid "update_permission"
+msgstr "permission de modifier"
+
+msgctxt "CWEType"
+msgid "update_permission"
+msgstr "permission de modifier"
+
+msgid "update_permission_object"
+msgstr "a la permission de modifier"
+
+msgctxt "CWGroup"
+msgid "update_permission_object"
+msgstr "peut modifier"
+
+msgctxt "RQLExpression"
+msgid "update_permission_object"
+msgstr "peut modifier"
+
+msgid "update_relation"
+msgstr "modifier"
+
+msgid "updated"
+msgstr "mis à jour"
+
+#, python-format
+msgid "updated %(etype)s #%(eid)s (%(title)s)"
+msgstr "modification de l'entité %(etype)s #%(eid)s (%(title)s)"
+
+msgid "uri"
+msgstr "uri"
+
+msgctxt "ExternalUri"
+msgid "uri"
+msgstr "uri"
+
+msgid "url"
+msgstr "url"
+
+msgctxt "CWSource"
+msgid "url"
+msgstr "url"
+
+msgid ""
+"use to define a transition from one or multiple states to a destination "
+"states in workflow's definitions. Transition without destination state will "
+"go back to the state from which we arrived to the current state."
+msgstr ""
+"utilisé dans une définition de processus pour ajouter une transition depuis "
+"un ou plusieurs états vers un état de destination. Une transition sans état "
+"de destination retournera à l'état précédent l'état courant."
+
+msgid "use_email"
+msgstr "adresse électronique"
+
+msgctxt "CWUser"
+msgid "use_email"
+msgstr "utilise l'adresse électronique"
+
+msgid "use_email_object"
+msgstr "adresse utilisée par"
+
+msgctxt "EmailAddress"
+msgid "use_email_object"
+msgstr "utilisée par"
+
+msgid ""
+"used for cubicweb configuration. Once a property has been created you can't "
+"change the key."
+msgstr ""
+"utilisé pour la configuration de l'application. Une fois qu'une propriété a "
+"été créée, vous ne pouvez plus changez la clé associée"
+
+msgid ""
+"used to associate simple states to an entity type and/or to define workflows"
+msgstr "associe les états à un type d'entité pour définir un workflow"
+
+msgid "user"
+msgstr "utilisateur"
+
+#, python-format
+msgid ""
+"user %s has made the following change(s):\n"
+"\n"
+msgstr ""
+"l'utilisateur %s a effectué le(s) changement(s) suivant(s):\n"
+"\n"
+
+msgid "user interface encoding"
+msgstr "encodage utilisé dans l'interface utilisateur"
+
+msgid "user preferences"
+msgstr "préférences utilisateur"
+
+msgid "user's email account"
+msgstr "email de l'utilisateur"
+
+msgid "users"
+msgstr "utilisateurs"
+
+msgid "users and groups"
+msgstr "utilisateurs et groupes"
+
+msgid "users using this bookmark"
+msgstr "utilisateurs utilisant ce signet"
+
+msgid "validate modifications on selected items"
+msgstr "valider les modifications apportées aux éléments sélectionnés"
+
+msgid "validating..."
+msgstr "chargement en cours ..."
+
+msgid "value"
+msgstr "valeur"
+
+msgctxt "CWConstraint"
+msgid "value"
+msgstr "contrainte"
+
+msgctxt "CWProperty"
+msgid "value"
+msgstr "valeur"
+
+#, python-format
+msgid "value %(KEY-value)s must be < %(KEY-boundary)s"
+msgstr ""
+"la valeur %(KEY-value)s doit être strictement inférieure à %(KEY-boundary)s"
+
+#, python-format
+msgid "value %(KEY-value)s must be <= %(KEY-boundary)s"
+msgstr ""
+"la valeur %(KEY-value)s doit être inférieure ou égale à %(KEY-boundary)s"
+
+#, python-format
+msgid "value %(KEY-value)s must be > %(KEY-boundary)s"
+msgstr ""
+"la valeur %(KEY-value)s doit être strictement supérieure à %(KEY-boundary)s"
+
+#, python-format
+msgid "value %(KEY-value)s must be >= %(KEY-boundary)s"
+msgstr ""
+"la valeur %(KEY-value)s doit être supérieure ou égale à %(KEY-boundary)s"
+
+msgid "value associated to this key is not editable manually"
+msgstr "la valeur associée à cette clé n'est pas éditable manuellement"
+
+#, python-format
+msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s"
+msgstr ""
+"la taille maximum est %(KEY-max)s mais cette valeur est de taille "
+"%(KEY-size)s"
+
+#, python-format
+msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s"
+msgstr ""
+"la taille minimum est %(KEY-min)s mais cette valeur est de taille "
+"%(KEY-size)s"
+
+msgid "vcard"
+msgstr "vcard"
+
+msgid "versions configuration"
+msgstr "configuration de version"
+
+msgid "view"
+msgstr "voir"
+
+msgid "view all"
+msgstr "voir tous"
+
+msgid "view detail for this entity"
+msgstr "voir les détails de cette entité"
+
+msgid "view history"
+msgstr "voir l'historique"
+
+msgid "view identifier"
+msgstr "identifiant"
+
+msgid "view title"
+msgstr "titre"
+
+msgid "view workflow"
+msgstr "voir les états possibles"
+
+msgid "view_index"
+msgstr "accueil"
+
+msgid "visible"
+msgstr "visible"
+
+msgid "warning"
+msgstr "attention"
+
+msgid "we are not yet ready to handle this query"
+msgstr ""
+"nous ne sommes pas capable de gérer ce type de requête sparql pour le moment"
+
+msgid "wednesday"
+msgstr "mercredi"
+
+#, python-format
+msgid "welcome %s!"
+msgstr "bienvenue %s !"
+
+msgid "wf_info_for"
+msgstr "historique de"
+
+msgid "wf_info_for_object"
+msgstr "historique des transitions"
+
+msgid "wf_tab_info"
+msgstr "description"
+
+msgid "wfgraph"
+msgstr "image du workflow"
+
+msgid ""
+"when multiple addresses are equivalent (such as python-projects@logilab.org "
+"and python-projects@lists.logilab.org), set this to indicate which is the "
+"preferred form."
+msgstr ""
+"quand plusieurs addresses sont équivalentes (comme python-projects@logilab."
+"org et python-projects@lists.logilab.org), indique laquelle est la forme "
+"préférentielle."
+
+msgid "workflow"
+msgstr "workflow"
+
+#, python-format
+msgid "workflow changed to \"%s\""
+msgstr "workflow changé à \"%s\""
+
+msgid "workflow has no initial state"
+msgstr "le workflow n'a pas d'état initial"
+
+msgid "workflow history item"
+msgstr "entrée de l'historique de workflow"
+
+msgid "workflow isn't a workflow for this type"
+msgstr "le workflow ne s'applique pas à ce type d'entité"
+
+msgid "workflow to which this state belongs"
+msgstr "workflow auquel cet état appartient"
+
+msgid "workflow to which this transition belongs"
+msgstr "workflow auquel cette transition appartient"
+
+msgid "workflow_of"
+msgstr "workflow de"
+
+msgctxt "Workflow"
+msgid "workflow_of"
+msgstr "workflow de"
+
+msgid "workflow_of_object"
+msgstr "a pour workflow"
+
+msgctxt "CWEType"
+msgid "workflow_of_object"
+msgstr "a pour workflow"
+
+#, python-format
+msgid "wrong query parameter line %s"
+msgstr "mauvais paramètre de requête ligne %s"
+
+msgid "xbel export"
+msgstr "export XBEL"
+
+msgid "xml export"
+msgstr "export XML"
+
+msgid "xml export (entities)"
+msgstr "export XML (entités)"
+
+msgid "yes"
+msgstr "oui"
+
+msgid "you have been logged out"
+msgstr "vous avez été déconnecté"
+
+msgid "you should probably delete that property"
+msgstr "vous devriez probablement supprimer cette propriété"
+
+#~ msgid "edit canceled"
+#~ msgstr "édition annulée"
+
+#~ msgid "the value \"%s\" is already used, use another one"
+#~ msgstr ""
+#~ "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur"
diff -r 1400aee10df4 -r faf279e33298 cubicweb/mail.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/mail.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,154 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Common utilies to format / send emails."""
+
+__docformat__ = "restructuredtext en"
+
+from base64 import b64encode, b64decode
+from time import time
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.mime.image import MIMEImage
+from email.header import Header
+from email.utils import formatdate
+from socket import gethostname
+
+from six import PY2, PY3, text_type
+
+
+def header(ustring):
+ if PY3:
+ return Header(ustring, 'utf-8')
+ return Header(ustring.encode('UTF-8'), 'UTF-8')
+
+def addrheader(uaddr, uname=None):
+ # even if an email address should be ascii, encode it using utf8 since
+ # automatic tests may generate non ascii email address
+ if PY2:
+ addr = uaddr.encode('UTF-8')
+ else:
+ addr = uaddr
+ if uname:
+ val = '%s <%s>' % (header(uname).encode(), addr)
+ else:
+ val = addr
+ assert isinstance(val, str) # bytes in py2, ascii-encoded unicode in py3
+ return val
+
+
+def construct_message_id(appid, eid, withtimestamp=True):
+ if withtimestamp:
+ addrpart = 'eid=%s×tamp=%.10f' % (eid, time())
+ else:
+ addrpart = 'eid=%s' % eid
+ # we don't want any equal sign nor trailing newlines
+ leftpart = b64encode(addrpart.encode('ascii'), b'.-').decode('ascii').rstrip().rstrip('=')
+ return '<%s@%s.%s>' % (leftpart, appid, gethostname())
+
+
+def parse_message_id(msgid, appid):
+ if msgid[0] == '<':
+ msgid = msgid[1:]
+ if msgid[-1] == '>':
+ msgid = msgid[:-1]
+ try:
+ values, qualif = msgid.split('@')
+ padding = len(values) % 4
+ values = b64decode(str(values + '='*padding), '.-').decode('ascii')
+ values = dict(v.split('=') for v in values.split('&'))
+ fromappid, host = qualif.split('.', 1)
+ except Exception:
+ return None
+ if appid != fromappid or host != gethostname():
+ return None
+ return values
+
+
+def format_mail(uinfo, to_addrs, content, subject="",
+ cc_addrs=(), msgid=None, references=(), config=None):
+ """Sends an Email to 'e_addr' with content 'content', and subject 'subject'
+
+ to_addrs and cc_addrs are expected to be a list of email address without
+ name
+ """
+ assert isinstance(content, text_type), repr(content)
+ msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8')
+ # safety: keep only the first newline
+ try:
+ subject = subject.splitlines()[0]
+ msg['Subject'] = header(subject)
+ except IndexError:
+ pass # no subject
+ if uinfo.get('email'):
+ email = uinfo['email']
+ elif config and config['sender-addr']:
+ email = text_type(config['sender-addr'])
+ else:
+ email = u''
+ if uinfo.get('name'):
+ name = uinfo['name']
+ elif config and config['sender-name']:
+ name = text_type(config['sender-name'])
+ else:
+ name = u''
+ msg['From'] = addrheader(email, name)
+ if config and config['sender-addr'] and config['sender-addr'] != email:
+ appaddr = addrheader(config['sender-addr'], config['sender-name'])
+ msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr)
+ elif email:
+ msg['Reply-to'] = msg['From']
+ if config is not None:
+ msg['X-CW'] = config.appid
+ unique_addrs = lambda addrs: sorted(set(addr for addr in addrs if addr is not None))
+ msg['To'] = ', '.join(addrheader(addr) for addr in unique_addrs(to_addrs))
+ if cc_addrs:
+ msg['Cc'] = ', '.join(addrheader(addr) for addr in unique_addrs(cc_addrs))
+ if msgid:
+ msg['Message-id'] = msgid
+ if references:
+ msg['References'] = ', '.join(references)
+ msg['Date'] = formatdate()
+ return msg
+
+
+class HtmlEmail(MIMEMultipart):
+
+ def __init__(self, subject, textcontent, htmlcontent,
+ sendermail=None, sendername=None, recipients=None, ccrecipients=None):
+ MIMEMultipart.__init__(self, 'related')
+ self['Subject'] = header(subject)
+ self.preamble = 'This is a multi-part message in MIME format.'
+ # Attach alternative text message
+ alternative = MIMEMultipart('alternative')
+ self.attach(alternative)
+ msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8')
+ alternative.attach(msgtext)
+ # Attach html message
+ msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8')
+ alternative.attach(msghtml)
+ if sendermail or sendername:
+ self['From'] = addrheader(sendermail, sendername)
+ if recipients:
+ self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None)
+ if ccrecipients:
+ self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None)
+
+ def attach_image(self, data, htmlId):
+ image = MIMEImage(data)
+ image.add_header('Content-ID', '<%s>' % htmlId)
+ self.attach(image)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/md5crypt.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/md5crypt.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,121 @@
+# md5crypt.py
+#
+# 0423.2000 by michal wallace http://www.sabren.com/
+# based on perl's Crypt::PasswdMD5 by Luis Munoz (lem@cantv.net)
+# based on /usr/src/libcrypt/crypt.c from FreeBSD 2.2.5-RELEASE
+#
+# MANY THANKS TO
+#
+# Carey Evans - http://home.clear.net.nz/pages/c.evans/
+# Dennis Marti - http://users.starpower.net/marti1/
+#
+# For the patches that got this thing working!
+#
+# modification by logilab:
+# * remove usage of the string module
+# * don't include the magic string in the output string
+# for true crypt.crypt compatibility
+# * use hashlib module instead of md5
+#########################################################
+"""md5crypt.py - Provides interoperable MD5-based crypt() function
+
+SYNOPSIS
+
+ import md5crypt.py
+
+ cryptedpassword = md5crypt.md5crypt(password, salt);
+
+DESCRIPTION
+
+unix_md5_crypt() provides a crypt()-compatible interface to the
+rather new MD5-based crypt() function found in modern operating systems.
+It's based on the implementation found on FreeBSD 2.2.[56]-RELEASE and
+contains the following license in it:
+
+ "THE BEER-WARE LICENSE" (Revision 42):
+ wrote this file. As long as you retain this notice you
+ can do whatever you want with this stuff. If we meet some day, and you think
+ this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
+"""
+
+MAGIC = b'$1$' # Magic string
+ITOA64 = b"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+
+from hashlib import md5 # pylint: disable=E0611
+
+from six import text_type, indexbytes
+from six.moves import range
+
+
+def to64 (v, n):
+ ret = bytearray()
+ while (n - 1 >= 0):
+ n = n - 1
+ ret.append(ITOA64[v & 0x3f])
+ v = v >> 6
+ return ret
+
+def crypt(pw, salt):
+ if isinstance(pw, text_type):
+ pw = pw.encode('utf-8')
+ if isinstance(salt, text_type):
+ salt = salt.encode('ascii')
+ # Take care of the magic string if present
+ if salt.startswith(MAGIC):
+ salt = salt[len(MAGIC):]
+ # salt can have up to 8 characters:
+ salt = salt.split(b'$', 1)[0]
+ salt = salt[:8]
+ ctx = pw + MAGIC + salt
+ final = md5(pw + salt + pw).digest()
+ for pl in range(len(pw), 0, -16):
+ if pl > 16:
+ ctx = ctx + final[:16]
+ else:
+ ctx = ctx + final[:pl]
+ # Now the 'weird' xform (??)
+ i = len(pw)
+ while i:
+ if i & 1:
+ ctx = ctx + b'\0' #if ($i & 1) { $ctx->add(pack("C", 0)); }
+ else:
+ ctx = ctx + pw[0]
+ i = i >> 1
+ final = md5(ctx).digest()
+ # The following is supposed to make
+ # things run slower.
+ # my question: WTF???
+ for i in range(1000):
+ ctx1 = b''
+ if i & 1:
+ ctx1 = ctx1 + pw
+ else:
+ ctx1 = ctx1 + final[:16]
+ if i % 3:
+ ctx1 = ctx1 + salt
+ if i % 7:
+ ctx1 = ctx1 + pw
+ if i & 1:
+ ctx1 = ctx1 + final[:16]
+ else:
+ ctx1 = ctx1 + pw
+ final = md5(ctx1).digest()
+ # Final xform
+ passwd = b''
+ passwd += to64((indexbytes(final, 0) << 16)
+ |(indexbytes(final, 6) << 8)
+ |(indexbytes(final, 12)),4)
+ passwd += to64((indexbytes(final, 1) << 16)
+ |(indexbytes(final, 7) << 8)
+ |(indexbytes(final, 13)), 4)
+ passwd += to64((indexbytes(final, 2) << 16)
+ |(indexbytes(final, 8) << 8)
+ |(indexbytes(final, 14)), 4)
+ passwd += to64((indexbytes(final, 3) << 16)
+ |(indexbytes(final, 9) << 8)
+ |(indexbytes(final, 15)), 4)
+ passwd += to64((indexbytes(final, 4) << 16)
+ |(indexbytes(final, 10) << 8)
+ |(indexbytes(final, 5)), 4)
+ passwd += to64((indexbytes(final, 11)), 2)
+ return passwd
diff -r 1400aee10df4 -r faf279e33298 cubicweb/migration.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/migration.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,550 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""utilities for instances migration"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import logging
+import tempfile
+from os.path import exists, join, basename, splitext
+from itertools import chain
+from warnings import warn
+
+from six import string_types
+
+from logilab.common import IGNORED_EXTENSIONS
+from logilab.common.decorators import cached
+from logilab.common.configuration import REQUIRED, read_old_config
+from logilab.common.shellutils import ASK
+from logilab.common.changelog import Version
+from logilab.common.deprecation import deprecated
+
+from cubicweb import ConfigurationError, ExecutionError
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from cubicweb.toolsutils import show_diffs
+
+def filter_scripts(config, directory, fromversion, toversion, quiet=True):
+ """return a list of paths of migration files to consider to upgrade
+ from a version to a greater one
+ """
+ from logilab.common.changelog import Version # doesn't work with appengine
+ assert fromversion
+ assert toversion
+ assert isinstance(fromversion, tuple), fromversion.__class__
+ assert isinstance(toversion, tuple), toversion.__class__
+ assert fromversion <= toversion, (fromversion, toversion)
+ if not exists(directory):
+ if not quiet:
+ print(directory, "doesn't exists, no migration path")
+ return []
+ if fromversion == toversion:
+ return []
+ result = []
+ for fname in os.listdir(directory):
+ if fname.endswith(IGNORED_EXTENSIONS):
+ continue
+ fpath = join(directory, fname)
+ try:
+ tver, mode = fname.split('_', 1)
+ except ValueError:
+ continue
+ mode = mode.split('.', 1)[0]
+ if not config.accept_mode(mode):
+ continue
+ try:
+ tver = Version(tver)
+ except ValueError:
+ continue
+ if tver <= fromversion:
+ continue
+ if tver > toversion:
+ continue
+ result.append((tver, fpath))
+ # be sure scripts are executed in order
+ return sorted(result)
+
+
+def execscript_confirm(scriptpath):
+ """asks for confirmation before executing a script and provides the
+ ability to show the script's content
+ """
+ while True:
+ answer = ASK.ask('Execute %r ?' % scriptpath,
+ ('Y','n','show','abort'), 'Y')
+ if answer == 'abort':
+ raise SystemExit(1)
+ elif answer == 'n':
+ return False
+ elif answer == 'show':
+ stream = open(scriptpath)
+ scriptcontent = stream.read()
+ stream.close()
+ print()
+ print(scriptcontent)
+ print()
+ else:
+ return True
+
+def yes(*args, **kwargs):
+ return True
+
+
+class MigrationHelper(object):
+ """class holding CubicWeb Migration Actions used by migration scripts"""
+
+ def __init__(self, config, interactive=True, verbosity=1):
+ self.config = config
+ if config:
+ # no config on shell to a remote instance
+ self.config.init_log(logthreshold=logging.ERROR)
+ # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything
+ self.verbosity = verbosity
+ self.need_wrap = True
+ if not interactive or not verbosity:
+ self.confirm = yes
+ self.execscript_confirm = yes
+ else:
+ self.execscript_confirm = execscript_confirm
+ self._option_changes = []
+ self.__context = {'confirm': self.confirm,
+ 'config': self.config,
+ 'interactive_mode': interactive,
+ }
+ self._context_stack = []
+
+ def __getattribute__(self, name):
+ try:
+ return object.__getattribute__(self, name)
+ except AttributeError:
+ cmd = 'cmd_%s' % name
+ # search self.__class__ to avoid infinite recursion
+ if hasattr(self.__class__, cmd):
+ meth = getattr(self, cmd)
+ return lambda *args, **kwargs: self.interact(args, kwargs,
+ meth=meth)
+ raise
+ raise AttributeError(name)
+
+ def migrate(self, vcconf, toupgrade, options):
+ """upgrade the given set of cubes
+
+ `cubes` is an ordered list of 3-uple:
+ (cube, fromversion, toversion)
+ """
+ if options.fs_only:
+ # monkey path configuration.accept_mode so database mode (e.g. Any)
+ # won't be accepted
+ orig_accept_mode = self.config.accept_mode
+ def accept_mode(mode):
+ if mode == 'Any':
+ return False
+ return orig_accept_mode(mode)
+ self.config.accept_mode = accept_mode
+ # may be an iterator
+ toupgrade = tuple(toupgrade)
+ vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade)
+ ctx = self.__context
+ ctx['versions_map'] = vmap
+ if self.config.accept_mode('Any') and 'cubicweb' in vmap:
+ migrdir = self.config.migration_scripts_dir()
+ self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py'))
+ for cube, fromversion, toversion in toupgrade:
+ if cube == 'cubicweb':
+ migrdir = self.config.migration_scripts_dir()
+ else:
+ migrdir = self.config.cube_migration_scripts_dir(cube)
+ scripts = filter_scripts(self.config, migrdir, fromversion, toversion)
+ if scripts:
+ prevversion = None
+ for version, script in scripts:
+ # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call
+ # cube_upgraded once all script of X.Y.Z have been executed
+ if prevversion is not None and version != prevversion:
+ self.cube_upgraded(cube, prevversion)
+ prevversion = version
+ self.cmd_process_script(script)
+ self.cube_upgraded(cube, toversion)
+ else:
+ self.cube_upgraded(cube, toversion)
+
+ def cube_upgraded(self, cube, version):
+ pass
+
+ def shutdown(self):
+ pass
+
+ def interact(self, args, kwargs, meth):
+ """execute the given method according to user's confirmation"""
+ msg = 'Execute command: %s(%s) ?' % (
+ meth.__name__[4:],
+ ', '.join([repr(arg) for arg in args] +
+ ['%s=%r' % (n,v) for n,v in kwargs.items()]))
+ if 'ask_confirm' in kwargs:
+ ask_confirm = kwargs.pop('ask_confirm')
+ else:
+ ask_confirm = True
+ if not ask_confirm or self.confirm(msg):
+ return meth(*args, **kwargs)
+
+ def confirm(self, question, # pylint: disable=E0202
+ shell=True, abort=True, retry=False, pdb=False, default='y'):
+ """ask for confirmation and return true on positive answer
+
+ if `retry` is true the r[etry] answer may return 2
+ """
+ possibleanswers = ['y', 'n']
+ if abort:
+ possibleanswers.append('abort')
+ if pdb:
+ possibleanswers.append('pdb')
+ if shell:
+ possibleanswers.append('shell')
+ if retry:
+ possibleanswers.append('retry')
+ try:
+ answer = ASK.ask(question, possibleanswers, default)
+ except (EOFError, KeyboardInterrupt):
+ answer = 'abort'
+ if answer == 'n':
+ return False
+ if answer == 'retry':
+ return 2
+ if answer == 'abort':
+ raise SystemExit(1)
+ if answer == 'shell':
+ self.interactive_shell()
+ return self.confirm(question, shell, abort, retry, pdb, default)
+ if answer == 'pdb':
+ import pdb
+ pdb.set_trace()
+ return self.confirm(question, shell, abort, retry, pdb, default)
+ return True
+
+ def interactive_shell(self):
+ self.confirm = yes
+ self.need_wrap = False
+ # avoid '_' to be added to builtins by sys.display_hook
+ def do_not_add___to_builtins(obj):
+ if obj is not None:
+ print(repr(obj))
+ sys.displayhook = do_not_add___to_builtins
+ local_ctx = self._create_context()
+ try:
+ import readline
+ from cubicweb.toolsutils import CWShellCompleter
+ except ImportError:
+ # readline not available
+ pass
+ else:
+ rql_completer = CWShellCompleter(local_ctx)
+ readline.set_completer(rql_completer.complete)
+ readline.parse_and_bind('tab: complete')
+ home_key = 'HOME'
+ if sys.platform == 'win32':
+ home_key = 'USERPROFILE'
+ histfile = os.path.join(os.environ[home_key], ".cwshell_history")
+ try:
+ readline.read_history_file(histfile)
+ except IOError:
+ pass
+ from code import interact
+ banner = """entering the migration python shell
+just type migration commands or arbitrary python code and type ENTER to execute it
+type "exit" or Ctrl-D to quit the shell and resume operation"""
+ interact(banner, local=local_ctx)
+ try:
+ readline.write_history_file(histfile)
+ except IOError:
+ pass
+ # delete instance's confirm attribute to avoid questions
+ del self.confirm
+ self.need_wrap = True
+
+ @cached
+ def _create_context(self):
+ """return a dictionary to use as migration script execution context"""
+ context = self.__context
+ for attr in dir(self):
+ if attr.startswith('cmd_'):
+ if self.need_wrap:
+ context[attr[4:]] = getattr(self, attr[4:])
+ else:
+ context[attr[4:]] = getattr(self, attr)
+ return context
+
+ def update_context(self, key, value):
+ for context in self._context_stack:
+ context[key] = value
+ self.__context[key] = value
+
+ def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs):
+ """execute a migration script in interactive mode
+
+ Display the migration script path, ask for confirmation and execute it
+ if confirmed
+
+ Allowed input file formats for migration scripts:
+ - `python` (.py)
+ - `sql` (.sql)
+ - `doctest` (.txt or .rst)
+
+ .. warning:: sql migration scripts are not available in web-only instance
+
+ You can pass script parameters with using double dash (--) in the
+ command line
+
+ Context environment can have these variables defined:
+ - __name__ : will be determine by funcname parameter
+ - __file__ : is the name of the script if it exists
+ - __args__ : script arguments coming from command-line
+
+ :param migrscript: name of the script
+ :param funcname: defines __name__ inside the shell (or use __main__)
+ :params args: optional arguments for funcname
+ :keyword scriptargs: optional arguments of the script
+ """
+ ftypes = {'python': ('.py',),
+ 'doctest': ('.txt', '.rst'),
+ 'sql': ('.sql',)}
+ # sql migration scripts are not available in web-only instance
+ if not hasattr(self, "session"):
+ ftypes.pop('sql')
+ migrscript = os.path.normpath(migrscript)
+ for (script_mode, ftype) in ftypes.items():
+ if migrscript.endswith(ftype):
+ break
+ else:
+ ftypes = ', '.join(chain(*ftypes.values()))
+ msg = 'ignoring %s, not a valid script extension (%s)'
+ raise ExecutionError(msg % (migrscript, ftypes))
+ if not self.execscript_confirm(migrscript):
+ return
+ scriptlocals = self._create_context().copy()
+ scriptlocals.update({'__file__': migrscript,
+ '__args__': kwargs.pop("scriptargs", [])})
+ self._context_stack.append(scriptlocals)
+ if script_mode == 'python':
+ if funcname is None:
+ pyname = '__main__'
+ else:
+ pyname = splitext(basename(migrscript))[0]
+ scriptlocals['__name__'] = pyname
+ with open(migrscript, 'rb') as fobj:
+ fcontent = fobj.read()
+ try:
+ code = compile(fcontent, migrscript, 'exec')
+ except SyntaxError:
+ # try without print_function
+ code = compile(fcontent, migrscript, 'exec', 0, True)
+ warn('[3.22] script %r should be updated to work with print_function'
+ % migrscript, DeprecationWarning)
+ exec(code, scriptlocals)
+ if funcname is not None:
+ try:
+ func = scriptlocals[funcname]
+ self.info('found %s in locals', funcname)
+ assert callable(func), '%s (%s) is not callable' % (func, funcname)
+ except KeyError:
+ self.critical('no %s in script %s', funcname, migrscript)
+ return None
+ return func(*args, **kwargs)
+ elif script_mode == 'sql':
+ from cubicweb.server.sqlutils import sqlexec
+ sqlexec(open(migrscript).read(), self.session.system_sql)
+ self.commit()
+ else: # script_mode == 'doctest'
+ import doctest
+ return doctest.testfile(migrscript, module_relative=False,
+ optionflags=doctest.ELLIPSIS,
+ # verbose mode when user input is expected
+ verbose=self.verbosity==2,
+ report=True,
+ encoding='utf-8',
+ globs=scriptlocals)
+ self._context_stack.pop()
+
+ def cmd_option_renamed(self, oldname, newname):
+ """a configuration option has been renamed"""
+ self._option_changes.append(('renamed', oldname, newname))
+
+ def cmd_option_group_changed(self, option, oldgroup, newgroup):
+ """a configuration option has been moved in another group"""
+ self._option_changes.append(('moved', option, oldgroup, newgroup))
+
+ def cmd_option_added(self, optname):
+ """a configuration option has been added"""
+ self._option_changes.append(('added', optname))
+
+ def cmd_option_removed(self, optname):
+ """a configuration option has been removed"""
+ # can safely be ignored
+ #self._option_changes.append(('removed', optname))
+
+ def cmd_option_type_changed(self, optname, oldtype, newvalue):
+ """a configuration option's type has changed"""
+ self._option_changes.append(('typechanged', optname, oldtype, newvalue))
+
+ def cmd_add_cubes(self, cubes):
+ """modify the list of used cubes in the in-memory config
+ returns newly inserted cubes, including dependencies
+ """
+ if isinstance(cubes, string_types):
+ cubes = (cubes,)
+ origcubes = self.config.cubes()
+ newcubes = [p for p in self.config.expand_cubes(cubes)
+ if not p in origcubes]
+ if newcubes:
+ self.config.add_cubes(newcubes)
+ return newcubes
+
+ @deprecated('[3.20] use drop_cube() instead of remove_cube()')
+ def cmd_remove_cube(self, cube, removedeps=False):
+ return self.cmd_drop_cube(cube, removedeps)
+
+ def cmd_drop_cube(self, cube, removedeps=False):
+ if removedeps:
+ toremove = self.config.expand_cubes([cube])
+ else:
+ toremove = (cube,)
+ origcubes = self.config._cubes
+ basecubes = [c for c in origcubes if not c in toremove]
+ # don't fake-add any new ones, or we won't be able to really-add them later
+ self.config._cubes = tuple(cube for cube in self.config.expand_cubes(basecubes)
+ if cube in origcubes)
+ removed = [p for p in origcubes if not p in self.config._cubes]
+ if not cube in removed and cube in origcubes:
+ raise ConfigurationError("can't remove cube %s, "
+ "used as a dependency" % cube)
+ return removed
+
+ def rewrite_configuration(self):
+ configfile = self.config.main_config_file()
+ if self._option_changes:
+ read_old_config(self.config, self._option_changes, configfile)
+ fd, newconfig = tempfile.mkstemp()
+ for optdescr in self._option_changes:
+ if optdescr[0] == 'added':
+ optdict = self.config.get_option_def(optdescr[1])
+ if optdict.get('default') is REQUIRED:
+ self.config.input_option(optdescr[1], optdict)
+ self.config.generate_config(open(newconfig, 'w'))
+ show_diffs(configfile, newconfig, askconfirm=self.confirm is not yes)
+ os.close(fd)
+ if exists(newconfig):
+ os.unlink(newconfig)
+
+ # these are overridden by set_log_methods below
+ # only defining here to prevent pylint from complaining
+ info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
+
+
+def version_strictly_lower(a, b):
+ if a is None:
+ return True
+ if b is None:
+ return False
+ if a:
+ a = Version(a)
+ if b:
+ b = Version(b)
+ return a < b
+
+def max_version(a, b):
+ return str(max(Version(a), Version(b)))
+
+class ConfigurationProblem(object):
+ """Each cube has its own list of dependencies on other cubes/versions.
+
+ The ConfigurationProblem is used to record the loaded cubes, then to detect
+ inconsistencies in their dependencies.
+
+ See configuration management on wikipedia for litterature.
+ """
+
+ def __init__(self, config):
+ self.config = config
+ self.cubes = {'cubicweb': cwcfg.cubicweb_version()}
+
+ def add_cube(self, name, version):
+ self.cubes[name] = version
+
+ def solve(self):
+ self.warnings = []
+ self.errors = []
+ self.dependencies = {}
+ self.reverse_dependencies = {}
+ self.constraints = {}
+ # read dependencies
+ for cube in self.cubes:
+ if cube == 'cubicweb': continue
+ self.dependencies[cube] = dict(self.config.cube_dependencies(cube))
+ self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube)
+ # compute reverse dependencies
+ for cube, dependencies in self.dependencies.items():
+ for name, constraint in dependencies.items():
+ self.reverse_dependencies.setdefault(name,set())
+ if constraint:
+ try:
+ oper, version = constraint.split()
+ self.reverse_dependencies[name].add( (oper, version, cube) )
+ except Exception:
+ self.warnings.append(
+ 'cube %s depends on %s but constraint badly '
+ 'formatted: %s' % (cube, name, constraint))
+ else:
+ self.reverse_dependencies[name].add( (None, None, cube) )
+ # check consistency
+ for cube, versions in sorted(self.reverse_dependencies.items()):
+ oper, version, source = None, None, None
+ # simplify constraints
+ if versions:
+ for constraint in versions:
+ op, ver, src = constraint
+ if oper is None:
+ oper = op
+ version = ver
+ source = src
+ elif op == '>=' and oper == '>=':
+ if version_strictly_lower(version, ver):
+ version = ver
+ source = src
+ elif op == None:
+ continue
+ else:
+ print('unable to handle %s in %s, set to `%s %s` '
+ 'but currently up to `%s %s`' %
+ (cube, source, oper, version, op, ver))
+ # "solve" constraint satisfaction problem
+ if cube not in self.cubes:
+ self.errors.append( ('add', cube, version, source) )
+ elif versions:
+ lower_strict = version_strictly_lower(self.cubes[cube], version)
+ if oper in ('>=','=','=='):
+ if lower_strict:
+ self.errors.append( ('update', cube, version, source) )
+ elif oper is None:
+ pass # no constraint on version
+ else:
+ print('unknown operator', oper)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cmp_schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cmp_schema.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,24 @@
+"""This module compare the Schema on the file system to the one in the database"""
+
+from cStringIO import StringIO
+from cubicweb.web.schemaviewer import SchemaViewer
+from logilab.common.ureports import TextWriter
+import difflib
+
+viewer = SchemaViewer()
+layout_db = viewer.visit_schema(schema, display_relations=True)
+layout_fs = viewer.visit_schema(fsschema, display_relations=True)
+writer = TextWriter()
+stream_db = StringIO()
+stream_fs = StringIO()
+writer.format(layout_db, stream=stream_db)
+writer.format(layout_fs, stream=stream_fs)
+
+stream_db.seek(0)
+stream_fs.seek(0)
+db = stream_db.getvalue().splitlines()
+fs = stream_fs.getvalue().splitlines()
+open('db_schema.txt', 'w').write(stream_db.getvalue())
+open('fs_schema.txt', 'w').write(stream_fs.getvalue())
+#for diff in difflib.ndiff(fs, db):
+# print diff
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/gfx/bg.png
Binary file cubicweb/misc/cwdesklets/gfx/bg.png has changed
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/gfx/border-left.png
Binary file cubicweb/misc/cwdesklets/gfx/border-left.png has changed
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/gfx/logo_cw.png
Binary file cubicweb/misc/cwdesklets/gfx/logo_cw.png has changed
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/gfx/rss.png
Binary file cubicweb/misc/cwdesklets/gfx/rss.png has changed
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/rql_query.display
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwdesklets/rql_query.display Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/rqlsensor/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwdesklets/rqlsensor/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,118 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+import webbrowser
+reload(webbrowser)
+
+from sensor.Sensor import Sensor
+from utils import datatypes, i18n
+
+from cubicweb.dbapi import connect
+
+_ = str
+
+class RQLSensor(Sensor):
+
+ def __init__(self, *args):
+ global _; _ = i18n.Translator("rql-desklet")
+ Sensor.__init__(self)
+ # define configuration
+ self._set_config_type("appid", datatypes.TYPE_STRING, "")
+ self._set_config_type("user", datatypes.TYPE_STRING, "")
+ self._set_config_type("passwd", datatypes.TYPE_SECRET_STRING, "")
+ self._set_config_type("rql", datatypes.TYPE_STRING, "")
+ self._set_config_type("url", datatypes.TYPE_STRING, "")
+ self._set_config_type("delay", datatypes.TYPE_STRING, "600")
+ # default timer
+ self._add_timer(20, self.__update)
+
+ def get_configurator(self):
+ configurator = self._new_configurator()
+ configurator.set_name(_("RQL"))
+ configurator.add_title(_("CubicWeb source settings"))
+ configurator.add_entry(_("ID",), "appid", _("The application id of this source"))
+ configurator.add_entry(_("User",), "user", _("The user to connect to this source"))
+ configurator.add_entry(_("Password",), "passwd", _("The user's password to connect to this source"))
+ configurator.add_entry(_("URL",), "url", _("The url of the web interface for this source"))
+ configurator.add_entry(_("RQL",), "rql", _("The rql query"))
+ configurator.add_entry(_("Update interval",), "delay", _("Delay in seconds between updates"))
+ return configurator
+
+
+ def call_action(self, action, path, args=[]):
+ index = path[-1]
+ output = self._new_output()
+ if action=="enter-line":
+ # change background
+ output.set('resultbg[%s]' % index, 'yellow')
+ elif action=="leave-line":
+ # change background
+ output.set('resultbg[%s]' % index, 'black')
+ elif action=="click-line":
+ # open url
+ output.set('resultbg[%s]' % index, 'black')
+ webbrowser.open(self._urls[index])
+ self._send_output(output)
+
+ def __get_connection(self):
+ try:
+ return self._v_cnx
+ except AttributeError:
+ appid, user, passwd = self._get_config("appid"), self._get_config("user"), self._get_config("passwd")
+ cnx = connect(database=appid, login=user, password=passwd)
+ self._v_cnx = cnx
+ return cnx
+
+ def __run_query(self, output):
+ base = self._get_config('url')
+ rql = self._get_config('rql')
+ cnx = self.__get_connection()
+ cursor = cnx.cursor()
+ try:
+ rset = cursor.execute(rql)
+ except Exception:
+ del self._v_cnx
+ raise
+ self._urls = []
+ output.set('layout', 'vertical, 14')
+ output.set('length', rset.rowcount)
+ i = 0
+ for line in rset:
+ output.set('result[%s]' % i, ', '.join([str(v) for v in line[1:]]))
+ output.set('resultbg[%s]' % i, 'black')
+ try:
+ self._urls.append(base % 'Any X WHERE X eid %s' % line[0])
+ except Exception:
+ self._urls.append('')
+ i += 1
+
+ def __update(self):
+ output = self._new_output()
+ try:
+ self.__run_query(output)
+ except Exception as ex:
+ import traceback
+ traceback.print_exc()
+ output.set('layout', 'vertical, 10')
+ output.set('length', 1)
+ output.set('result[0]', str(ex))
+ self._send_output(output)
+ self._add_timer(int(self._get_config('delay'))*1000, self.__update)
+
+
+def new_sensor(args):
+ return RQLSensor(*args)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwdesklets/web_query.display
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwdesklets/web_query.display Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwfs/A_FAIRE
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwfs/A_FAIRE Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,14 @@
+TACHES
+======
+
+-- crire objet stocke/manipule les donnes
+
+-- extraire tests de chane de caractre
+
+* utiliser sqlite
+
+* crire fonction prend chemin en argument et renvoie contenu
+
+* extraire tests (chane de caractre) de spec
+
+* utiliser yams pour schma
\ No newline at end of file
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwfs/cwfs-spec.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwfs/cwfs-spec.txt Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,180 @@
+=======================
+ Specification cubicwebfs
+=======================
+
+Remarque: cubicwebfs c'est le siamois de yamsfs
+en fait c'est un yamsfs avec une interrogation
+de base RQL
+
+Modle
+-------
+
+Description du modle;
+::
+ societe
+ nom
+ ville
+
+ affaire
+ ref
+
+ document
+ annee
+ mois
+ jour
+ type {RAP,CLI,OFR,FCT}
+ fichier
+
+document concerne affaire
+affaire concerne societe
+
+Contenu de la base exemple
+---------------------------
+
+societe | nom | ville |
+ | CETIAD | Dijon |
+ | EDF R&D | Clamart |
+ | Logilab | Paris |
+
+affaire | ref | concerne |
+ | CTIA01 | CETIAD |
+ | EDFR01 | EDF R&D |
+ | EDFR02 | EDF R&D |
+
+document | annee | mois | jour | type | concerne | fichier |
+ | 2004 | 09 | 06 | PRE | CTIA01 | depodoc/2004/09/CTIA01-040906-PRE-1-01.pdf |
+ | 2005 | 02 | 01 | CLI | EDFR01 | depodoc/2005/02/EDFR01-050201-CLI-1-01.pdf |
+ | 2005 | 03 | 22 | OFR | EDFR01 | depodoc/2005/02/EDFR01-050322-OFR-1-01.pdf |
+
+
+Exemples de chemins/recherches
+-------------------------------
+
+Cherche documents de mars 2005;
+::
+ /document/annee/2005/mois/03/
+
+
+Dont le contenu successif serait;
+
+Test::
+
+ $ ls /document
+ annee/ mois/ jour/ type/
+ affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf
+ EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf
+
+ $ ls /document/annee/
+ 2004/ 2005/
+
+ $ ls /document/annee/2005/
+ mois/ jour/ type/ affaire/
+ concerne/ EDFR01-050201-CLI-1-01.pdf EDFR01-050322-OFR-1-01.pdf
+
+ $ ls /document/annee/2005/mois/
+ 02/ 03/
+
+ $ ls /document/annee/2005/mois/03/
+ jour/ type/ affaire/ concerne/
+ EDFR01-050322-OFR-1-01.pdf
+
+
+Question: est-ce que fichier/ ne va pas nous manquer ?
+
+
+Cherche documents relatifs CTIA01;
+::
+ /affaire/ref/CTIA01/document/
+
+Dont le contenu des rpertoires successifs serait:
+
+Test::
+
+ $ ls /affaire/
+ ref/ societe/ concerne/ document/
+ concerne_par/ CTIA01 EDFR01 EDFR02
+
+ $ ls /affaire/ref/
+ CTIA01/ EDFR01/ EDFR02/
+
+ $ ls /affaire/ref/CTIA01/
+ societe/ concerne/ document/ concerne_par/
+
+ $ ls /affaire/ref/CTIA01/document/
+ annee/ mois/ jour/ type/
+ CTIA01-040906-PRE-1-01.pdf
+
+
+Cherche documents des affaires qui concernent CETIAD;
+::
+ /societe/nom/CETIAD/affaire/document/
+
+Dont le contenu des rpertoires successifs serait;
+
+Test::
+
+ $ ls /societe/
+ nom/ ville/ affaire/ concerne_par/
+ CETIAD EDF R&D Logilab
+
+ $ ls /societe/nom/
+ CETIAD EDF R&D Logilab
+
+ $ ls /societe/nom/CETIAD/
+ ville/ affaire/ concerne_par/ CETIAD Logilab
+
+ $ ls /societe/nom/CETIAD/affaire/
+ ref/ societe/ concerne/ document/
+ concerne_par/ CTIA01
+
+ $ ls /societe/nom/CETIAD/affaire/document/
+ annee/ mois/ jour/ type/
+ affaire/ concerne/ CTIA01-040906-PRE-1-01.pdf
+
+
+En particulier, pour la recherche ci-dessus on ne peut pas crire;
+::
+ /document/affaire/concerne/societe/CETIAD/
+
+La logique est que si on est dans un rpertoire document, il faut
+qu'il contienne des documents.
+
+Cherche documents de 2002 qui concernent des affaires
+qui concernent CETIAD;
+::
+ /societe/CETIAD/affaire/document/annee/2002/
+
+Question: est-ce que les relations doivent tre des composants
+du chemin ?
+Question : si les relations ne font pas partie du chemin, il faudrait
+pouvoir faire des recherches en utilisant des relations anonymes (ce
+qui est impossible en RQL par exemple);
+::
+ /document/affaire/... s'il existe plusieurs relations entre
+ les entits document et affaire, on ne peut pas s'en sortir
+
+Question: que va-t-il se passer pour des chemins du type;
+::
+ /affaire/CTIA*/document/
+
+Nicolas: mon avis on a rien faire, car c'est le shell qui
+s'en occupe. De la mme faon, le systme de fichier n'a pas
+ se proccuper de ~/ et les programmes reoivent pas le "qqch*"
+en argument, mais directement la liste.
+
+Attention: si jamais l'arborescence est sans fond, les
+commandes rcursives vont prendre du temps...
+
+Attention: dans un premier temps, un systme de fichiers en
+lecture seule est satisfaisant. on verra ensuite pour l'dition.
+pour l'dition, on peut s'inspirer du external editor de zope
+et avoir un format d'change XML entre le serveur et l'diteur.
+
+Le cas suivant est dbile, faut-il l'interdire ?
+::
+ /document/affaire/societe/concerne_par/affaire/concerne_par/document
+
+
+NB: manque dtail d'un cas comme /document/annee/2005/concerne/affaire/
+
+
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwfs/cwfs.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwfs/cwfs.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,175 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+
+"""
+class Schema :
+
+ def __init__(self, schema) :
+ self._schema = schema
+
+ def get_attrs(self, entity) :
+ return self._schema[entity][0]
+
+ def get_relations(self, entity) :
+ return self._schema[entity][1]
+
+ def get_attr_index(self, entity, attr) :
+ return list(self._schema[entity][0]).index(attr)
+
+SCHEMA = Schema({'societe': ( ('nom','ville'),
+ [('concerne_par','affaire'),
+ ] ),
+ 'affaire': ( ('ref',),
+ [('concerne','societe'),
+ ('concerne_par', 'document')
+ ] ),
+ 'document':( ('fichier', 'annee','mois','jour','type'),
+ [('concerne','affaire'),
+ ] ),
+ })
+
+
+
+DATA = { 'societe': [ ('CETIAD', 'Dijon'),
+ ('EDF_R&D', 'Clamart'),
+ ('Logilab', 'Paris'),
+ ],
+ 'affaire': [ ('CTIA01', 'CETIAD'),
+ ('EDFR01', 'EDF_R&D'),
+ ('EDFR02', 'EDF_R&D'),
+ ],
+ 'document':[ ('CTIA01-040906-PRE-1-01.pdf','2004','09','06','PRE','CTIA01'),
+ ('EDFR01-050201-CLI-1-01.pdf','2005','02','01','CLI','EDFR01'),
+ ('EDFR01-050322-OFR-1-01.pdf','2005','03','22','OFR','EDFR01'),
+ ],
+ }
+
+def get_data(entity, where=[]) :
+ for value in DATA[entity] :
+ for index, val in where :
+ if value[index] != val :
+ break
+ else :
+ yield value
+
+class PathParser :
+
+ def __init__(self, schema, path) :
+ self.schema = schema
+ self.path = path
+ self._components = iter([comp for comp in self.path.split('/') if comp])
+ self._entity = None
+ self._attr = None
+ self._rel = None
+ self._restrictions = []
+
+ def parse(self) :
+ self._entity = next(self._components)
+ try:
+ self.process_entity()
+ except StopIteration :
+ pass
+
+ def process_entity(self) :
+ _next = next(self._components)
+ if _next in self.schema.get_attrs(self._entity) :
+ self._attr = _next
+ _next = next(self._components)
+ self._restrictions.append( (self._entity, self._attr, _next) )
+ self._attr = None
+ self._rel = None
+ self.process_entity()
+
+ def get_list(self) :
+ if self._rel :
+ return
+ elif self._attr :
+ where = []
+ for e,a,v in self._restrictions :
+ i = self.schema.get_attr_index(e, a)
+ where.append( (i,v) )
+ i = self.schema.get_attr_index(self._entity, self._attr)
+ for values in get_data(self._entity,where) :
+ yield values[i]+'/'
+ else :
+ attr_restrict = [a for e,a,v in self._restrictions]
+ for attr in self.schema.get_attrs(self._entity) :
+ if attr not in attr_restrict :
+ yield attr+'/'
+ for data in DATA[self._entity]:
+ yield data[0]
+ for nom, entity in self.schema.get_relations(self._entity) :
+ yield nom+'/'
+ yield entity+'/'
+
+def ls(path) :
+ p = PathParser(SCHEMA,path)
+ p.parse()
+ return list(p.get_list())
+
+
+class SytPathParser :
+
+ def __init__(self, schema, path) :
+ self.schema = schema
+ self.path = path
+ self._components = iter([comp for comp in self.path.split('/') if comp])
+ self._e_type = None
+ self._restrictions = []
+ self._alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
+
+ def parse(self):
+ self._var = self._alphabet.pop(0)
+ self._e_type = next(self._components)
+ e_type = self._e_type.capitalize()
+ self._restrictions.append('%s is %s' % (self._var, e_type))
+ try:
+ self.process_entity()
+ except StopIteration :
+ pass
+ return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions))
+
+ def process_entity(self) :
+ _next = next(self._components)
+ if _next in self.schema.get_attrs(self._e_type) :
+ attr = _next
+ try:
+ _next = next(self._components)
+ self._restrictions.append('%s %s %s' % (self._var, attr, _next))
+ except StopIteration:
+ a_var = self._alphabet.pop(0)
+ self._restrictions.append('%s %s %s' % (self._var, attr, a_var) )
+ self._var = a_var
+ raise
+ elif _next in [r for r,e in self.schema.get_relations(self._e_type)]:
+ rel = _next
+ r_var = self._alphabet.pop(0)
+ self._restrictions.append('%s %s %s' % (self._var, rel, r_var))
+ self._var = r_var
+ try:
+ _next = next(self._components)
+ self._restrictions.append('%s is %s' % (r_var, _next.capitalize()))
+ except StopIteration:
+ raise
+ self.process_entity()
+
+
+def to_rql(path) :
+ p = SytPathParser(SCHEMA,path)
+ return p.parse()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwfs/cwfs_test.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwfs/cwfs_test.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,66 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+
+"""
+from logilab.common.testlib import TestCase, unittest_main
+
+import cubicwebfs
+import sre
+
+def spec_parser(filename) :
+ """
+ extract tests from specification
+ """
+ sections = []
+ buffer = ""
+ in_section = False
+ for line in open(filename) :
+ if line.startswith('Test::'):
+ in_section = True
+ buffer = ""
+ elif in_section :
+ if line.startswith(" ") or not line.strip() :
+ buffer += line.lstrip()
+ else :
+ sections.append(buffer)
+ in_section = False
+ tests = []
+ for section in sections :
+ subsections = [t for t in section.strip().split('$ ls') if t]
+ for subsection in subsections :
+ path, results = subsection.splitlines()[0], subsection.splitlines()[1:]
+ path = path.strip()
+ items = set([i for i in sre.split('[\t\n]', '\n'.join(results)) if i])
+ tests.append((path, items))
+ return tests
+
+tests = spec_parser("cubicwebfs-spec.txt")
+
+class monTC(TestCase) :
+ pass
+
+for index, (path, results) in enumerate(tests) :
+ def f(self, p=path, r=results) :
+ res = set(cubicwebfs.ls(p))
+ self.assertEqual(r, res) #, 'en trop %s\nmanque %s' % (r-results,results-r))
+ f.__doc__ = "%s %s"%(index,path)
+ setattr(monTC,'test_%s'%index,f)
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/cwzope/cwzope.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/cwzope/cwzope.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,50 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""
+
+"""
+from AccessControl import getSecurityManager
+
+from cubicweb.dbapi import connect, Connection, Cursor
+from cubicweb.common.utils import ResultSet, ResultSetIterator, ResultSetRow, Entity
+
+Connection.__allow_access_to_unprotected_subobjects__ = 1
+Cursor.__allow_access_to_unprotected_subobjects__ = 1
+ResultSet.__allow_access_to_unprotected_subobjects__ = 1
+ResultSetIterator.__allow_access_to_unprotected_subobjects__ = 1
+ResultSetRow.__allow_access_to_unprotected_subobjects__ = 1
+Entity.__allow_access_to_unprotected_subobjects__ = 1
+
+CNX_CACHE = {}
+
+def get_connection(context, user=None, password=None,
+ host=None, database=None, group='cubicweb'):
+ """get a connection on an cubicweb server"""
+ request = context.REQUEST
+ zope_user = getSecurityManager().getUser()
+ if user is None:
+ user = zope_user.getId()
+ key = (user, host, database)
+ try:
+ return CNX_CACHE[key]
+ except KeyError:
+ if password is None:
+ password = zope_user._getPassword()
+ cnx = connect(user, password, host, database, group)
+ CNX_CACHE[key] = cnx
+ return cnx
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,35 @@
+from six import text_type
+
+for uri, cfg in config.read_sources_file().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy())
+
+add_entity_type('CWSource')
+add_relation_definition('CWSource', 'cw_source', 'CWSource')
+add_entity_type('CWSourceHostConfig')
+
+with session.allow_all_hooks_but('cw.sources'):
+ create_entity('CWSource', type=u'native', name=u'system')
+commit()
+
+sql('INSERT INTO cw_source_relation(eid_from,eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.type')
+commit()
+
+for uri, cfg in config.read_sources_file().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri.pop(uri)
+ config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items()
+ if key != 'adapter' and value is not None)
+ create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']),
+ config=config)
+commit()
+
+# rename cwprops for boxes/contentnavigation
+for x in rql('Any X,XK WHERE X pkey XK, '
+ 'X pkey ~= "boxes.%" OR '
+ 'X pkey ~= "contentnavigation.%"').entities():
+ x.cw_set(pkey=u'ctxcomponents.' + x.pkey.split('.', 1)[1])
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.0_common.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.0_common.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+option_group_changed('cleanup-session-time', 'web', 'main')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.4_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.4_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,8 @@
+for eschema in schema.entities():
+ if not (eschema.final or 'cw_source' in eschema.subjrels):
+ add_relation_definition(eschema.type, 'cw_source', 'CWSource', ask_confirm=False)
+
+sql('INSERT INTO cw_source_relation(eid_from, eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)')
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.5_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,6 @@
+sync_schema_props_perms('CWSourceHostConfig', syncperms=False)
+
+sql('INSERT INTO cw_source_relation(eid_from, eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)')
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.7_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.7_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+add_attribute('TrInfo', 'tr_count')
+sync_schema_props_perms('TrInfo')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.8_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.8_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+sync_schema_props_perms('CWSource', syncprops=False)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.10.9_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.10.9_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,32 @@
+import sys
+
+if confirm('fix some corrupted entities noticed on several instances?'):
+ rql('DELETE CWConstraint X WHERE NOT E constrained_by X')
+ rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y')
+ commit()
+
+if confirm('fix existing cwuri?'):
+ from logilab.common.shellutils import progress
+ from cubicweb.server.session import hooks_control
+ rset = rql('Any X, XC WHERE X cwuri XC, X cwuri ~= "%/eid/%"')
+ title = "%i entities to fix" % len(rset)
+ nbops = rset.rowcount
+ enabled = interactive_mode
+ with progress(title=title, nbops=nbops, size=30, enabled=enabled) as pb:
+ for i, row in enumerate(rset):
+ with session.deny_all_hooks_but('integrity'):
+ data = {'eid': row[0], 'cwuri': row[1].replace(u'/eid', u'')}
+ rql('SET X cwuri %(cwuri)s WHERE X eid %(eid)s', data)
+ if not i % 100: # commit every 100 entities to limit memory consumption
+ pb.text = "%i committed" % i
+ commit(ask_confirm=False)
+ pb.update()
+ commit(ask_confirm=False)
+
+try:
+ from cubicweb import devtools
+ option_group_changed('anonymous-user', 'main', 'web')
+ option_group_changed('anonymous-password', 'main', 'web')
+except ImportError:
+ # cubicweb-dev unavailable, nothing needed
+ pass
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.11.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.11.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,11 @@
+from datetime import datetime
+
+for rtype in ('cw_support', 'cw_dont_cross', 'cw_may_cross'):
+ drop_relation_type(rtype)
+
+add_entity_type('CWSourceSchemaConfig')
+
+if not 'url' in schema['CWSource'].subjrels:
+ add_attribute('CWSource', 'url')
+ add_attribute('CWSource', 'parser')
+ add_attribute('CWSource', 'latest_retrieval')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.12.9_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.12.9_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+sync_schema_props_perms('cw_source')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.13.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.13.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,3 @@
+sync_schema_props_perms('cw_source', syncprops=False)
+if schema['BigInt'].eid is None:
+ add_entity_type('BigInt')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.13.3_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.13.3_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+drop_relation_definition('CWSourceSchemaConfig', 'cw_schema', 'CWAttribute')
+sync_schema_props_perms('cw_schema')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.13.6_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.13.6_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+sync_schema_props_perms('CWSourceSchemaConfig')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.13.8_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.13.8_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,5 @@
+change_attribute_type('CWCache', 'timestamp', 'TZDatetime')
+change_attribute_type('CWUser', 'last_login_time', 'TZDatetime')
+change_attribute_type('CWSource', 'latest_retrieval', 'TZDatetime')
+drop_attribute('CWSource', 'synchronizing')
+add_attribute('CWSource', 'in_synchronization')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.14.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.14.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,15 @@
+from __future__ import print_function
+
+config['rql-cache-size'] = config['rql-cache-size'] * 10
+
+add_entity_type('CWDataImport')
+
+from cubicweb.schema import CONSTRAINTS, guess_rrqlexpr_mainvars
+for rqlcstr in rql('Any X,XT,XV WHERE X is CWConstraint, X cstrtype XT, X value XV,'
+ 'X cstrtype XT, XT name IN ("RQLUniqueConstraint","RQLConstraint","RQLVocabularyConstraint"),'
+ 'NOT X value ~= ";%"').entities():
+ expression = rqlcstr.value
+ mainvars = guess_rrqlexpr_mainvars(expression)
+ yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars)
+ rqlcstr.cw_set(value=yamscstr.serialize())
+ print('updated', rqlcstr.type, rqlcstr.value.strip())
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.14.7_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.14.7_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+# migrate default format for TriInfo `comment_format` attribute
+sync_schema_props_perms('TrInfo')
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.15.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.15.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,10 @@
+sync_schema_props_perms('EmailAddress')
+
+for source in rql('CWSource X WHERE X type "ldapuser"').entities():
+ config = source.dictconfig
+ host = config.pop('host', u'ldap')
+ protocol = config.pop('protocol', u'ldap')
+ source.cw_set(url=u'%s://%s' % (protocol, host))
+ source.update_config(skip_unknown=True, **config)
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.15.0_common.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.15.0_common.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,7 @@
+import ConfigParser
+try:
+ undo_actions = config.cfgfile_parser.get('MAIN', 'undo-support', False)
+except ConfigParser.NoOptionError:
+ pass # this conf. file was probably already migrated
+else:
+ config.global_set_option('undo-enabled', bool(undo_actions))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.15.4_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.15.4_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,13 @@
+from __future__ import print_function
+
+from logilab.common.shellutils import generate_password
+from cubicweb.server.utils import crypt_password
+
+for user in rql('CWUser U WHERE U cw_source S, S name "system", U upassword P, U login L').entities():
+ salt = user.upassword.getvalue()
+ if crypt_password('', salt) == salt:
+ passwd = generate_password()
+ print('setting random password for user %s' % user.login)
+ user.set_attributes(upassword=passwd)
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.15.9_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.15.9_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False)
+sync_schema_props_perms(('State', 'name', 'String'))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.16.1_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.16.1_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+sync_schema_props_perms(('State', 'state_of', 'Workflow'), commit=False)
+sync_schema_props_perms(('State', 'name', 'String'))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.17.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.17.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+add_attribute('CWAttribute', 'extra_props')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.17.11_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.17.11_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,7 @@
+for table, column in [
+ ('transactions', 'tx_time'),
+ ('tx_entity_actions', 'tx_uuid'),
+ ('tx_relation_actions', 'tx_uuid')]:
+ repo.system_source.create_index(session, table, column)
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.18.2_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.18.2_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+install_custom_sql_scripts()
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.18.4_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.18.4_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+sync_schema_props_perms('CWSource')
+sync_schema_props_perms('CWSourceHostConfig')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.19.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.19.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,3 @@
+sql('DROP TABLE "deleted_entities"')
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.20.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.20.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,6 @@
+sync_schema_props_perms('state_of')
+sync_schema_props_perms('transition_of')
+sync_schema_props_perms('State')
+sync_schema_props_perms('BaseTransition')
+sync_schema_props_perms('Transition')
+sync_schema_props_perms('WorkflowTransition')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.20.7_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.20.7_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+if repo.system_source.dbdriver == 'postgres':
+ install_custom_sql_scripts()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.20.8_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.20.8_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+sync_schema_props_perms('cwuri')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.21.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.21.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,173 @@
+from __future__ import print_function
+
+from cubicweb.schema import PURE_VIRTUAL_RTYPES
+from cubicweb.server.schema2sql import rschema_has_table
+
+
+def add_foreign_keys():
+ source = repo.system_source
+ if not source.dbhelper.alter_column_support:
+ return
+ for rschema in schema.relations():
+ if rschema.inlined:
+ add_foreign_keys_inlined(rschema)
+ elif rschema_has_table(rschema, skip_relations=PURE_VIRTUAL_RTYPES):
+ add_foreign_keys_relation(rschema)
+ for eschema in schema.entities():
+ if eschema.final:
+ continue
+ add_foreign_key_etype(eschema)
+
+
+def add_foreign_keys_relation(rschema):
+ args = {'r': rschema.type}
+ count = sql('SELECT COUNT(*) FROM ('
+ ' SELECT eid_from FROM %(r)s_relation'
+ ' UNION'
+ ' SELECT eid_to FROM %(r)s_relation'
+ ' EXCEPT'
+ ' SELECT eid FROM entities) AS eids' % args,
+ ask_confirm=False)[0][0]
+ if count:
+ print('%s references %d unknown entities, deleting' % (rschema, count))
+ sql('DELETE FROM %(r)s_relation '
+ 'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args)
+ sql('DELETE FROM %(r)s_relation '
+ 'WHERE eid_to IN (SELECT eid_to FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args)
+
+ args['from_fk'] = '%(r)s_relation_eid_from_fkey' % args
+ args['to_fk'] = '%(r)s_relation_eid_to_fkey' % args
+ args['table'] = '%(r)s_relation' % args
+ if repo.system_source.dbdriver == 'postgres':
+ sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(from_fk)s' % args,
+ ask_confirm=False)
+ sql('ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(to_fk)s' % args,
+ ask_confirm=False)
+ elif repo.system_source.dbdriver.startswith('sqlserver'):
+ sql("IF OBJECT_ID('%(from_fk)s', 'F') IS NOT NULL "
+ "ALTER TABLE %(table)s DROP CONSTRAINT %(from_fk)s" % args,
+ ask_confirm=False)
+ sql("IF OBJECT_ID('%(to_fk)s', 'F') IS NOT NULL "
+ "ALTER TABLE %(table)s DROP CONSTRAINT %(to_fk)s" % args,
+ ask_confirm=False)
+ sql('ALTER TABLE %(table)s ADD CONSTRAINT %(from_fk)s '
+ 'FOREIGN KEY (eid_from) REFERENCES entities (eid)' % args,
+ ask_confirm=False)
+ sql('ALTER TABLE %(table)s ADD CONSTRAINT %(to_fk)s '
+ 'FOREIGN KEY (eid_to) REFERENCES entities (eid)' % args,
+ ask_confirm=False)
+
+
+def add_foreign_keys_inlined(rschema):
+ for eschema in rschema.subjects():
+ args = {'e': eschema.type, 'r': rschema.type}
+ args['c'] = 'cw_%(e)s_cw_%(r)s_fkey' % args
+
+ if eschema.rdef(rschema).cardinality[0] == '1':
+ broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args,
+ ask_confirm=False)
+ if broken_eids:
+ print('Required relation %(e)s.%(r)s missing' % args)
+ args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
+ rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
+ broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s '
+ 'EXCEPT SELECT eid FROM entities)' % args,
+ ask_confirm=False)
+ if broken_eids:
+ print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args)
+ args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
+ rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
+ else:
+ if sql('SELECT COUNT(*) FROM ('
+ ' SELECT cw_%(r)s FROM cw_%(e)s WHERE cw_%(r)s IS NOT NULL'
+ ' EXCEPT'
+ ' SELECT eid FROM entities) AS eids' % args,
+ ask_confirm=False)[0][0]:
+ print('%(e)s.%(r)s references unknown entities, deleting relation' % args)
+ sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN '
+ '(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
+
+ if repo.system_source.dbdriver == 'postgres':
+ sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args,
+ ask_confirm=False)
+ elif repo.system_source.dbdriver.startswith('sqlserver'):
+ sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL "
+ "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args,
+ ask_confirm=False)
+ sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s '
+ 'FOREIGN KEY (cw_%(r)s) references entities(eid)' % args,
+ ask_confirm=False)
+
+
+def add_foreign_key_etype(eschema):
+ args = {'e': eschema.type}
+ if sql('SELECT COUNT(*) FROM ('
+ ' SELECT cw_eid FROM cw_%(e)s'
+ ' EXCEPT'
+ ' SELECT eid FROM entities) AS eids' % args,
+ ask_confirm=False)[0][0]:
+ print('%(e)s has nonexistent entities, deleting' % args)
+ sql('DELETE FROM cw_%(e)s WHERE cw_eid IN '
+ '(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
+ args['c'] = 'cw_%(e)s_cw_eid_fkey' % args
+ if repo.system_source.dbdriver == 'postgres':
+ sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args,
+ ask_confirm=False)
+ elif repo.system_source.dbdriver.startswith('sqlserver'):
+ sql("IF OBJECT_ID('%(c)s', 'F') IS NOT NULL "
+ "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args,
+ ask_confirm=False)
+ sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s '
+ 'FOREIGN KEY (cw_eid) REFERENCES entities (eid)' % args,
+ ask_confirm=False)
+
+
+add_foreign_keys()
+
+cu = session.cnxset.cu
+helper = repo.system_source.dbhelper
+
+helper.drop_index(cu, 'entities', 'extid', False)
+# don't use create_index because it doesn't work for columns that may be NULL
+# on sqlserver
+for query in helper.sqls_create_multicol_unique_index('entities', ['extid']):
+ cu.execute(query)
+
+if 'moved_entities' not in helper.list_tables(cu):
+ sql('''
+ CREATE TABLE moved_entities (
+ eid INTEGER PRIMARY KEY NOT NULL,
+ extid VARCHAR(256) UNIQUE
+ )
+ ''')
+
+moved_entities = sql('SELECT -eid, extid FROM entities WHERE eid < 0',
+ ask_confirm=False)
+if moved_entities:
+ cu.executemany('INSERT INTO moved_entities (eid, extid) VALUES (%s, %s)',
+ moved_entities)
+ sql('DELETE FROM entities WHERE eid < 0')
+
+commit()
+
+sync_schema_props_perms('CWEType')
+
+sync_schema_props_perms('cwuri')
+
+from cubicweb.server.schema2sql import check_constraint
+
+for cwconstraint in rql('Any C WHERE R constrained_by C').entities():
+ cwrdef = cwconstraint.reverse_constrained_by[0]
+ rdef = cwrdef.yams_schema()
+ cstr = rdef.constraint_by_eid(cwconstraint.eid)
+ if cstr.type() not in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'):
+ continue
+ cstrname, check = check_constraint(rdef, cstr, helper, prefix='cw_')
+ args = {'e': rdef.subject.type, 'c': cstrname, 'v': check}
+ if repo.system_source.dbdriver == 'postgres':
+ sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, ask_confirm=False)
+ elif repo.system_source.dbdriver.startswith('sqlserver'):
+ sql("IF OBJECT_ID('%(c)s', 'C') IS NOT NULL "
+ "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, ask_confirm=False)
+ sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args, ask_confirm=False)
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.21.1_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.21.1_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+# re-read ComputedRelation permissions from schema.py now that we're
+# able to serialize them
+for computedrtype in schema.iter_computed_relations():
+ sync_schema_props_perms(computedrtype.type)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.21.2_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.21.2_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,7 @@
+sync_schema_props_perms('cwuri')
+
+helper = repo.system_source.dbhelper
+cu = session.cnxset.cu
+helper.set_null_allowed(cu, 'moved_entities', 'extid', 'VARCHAR(256)', False)
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.22.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.22.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,21 @@
+if confirm('use Europe/Paris as timezone?'):
+ timezone = 'Europe/Paris'
+else:
+ import pytz
+ while True:
+ timezone = raw_input('enter your timezone')
+ if timezone in pytz.common_timezones:
+ break
+
+dbdriver = repo.system_source.dbdriver
+if dbdriver == 'postgres':
+ sql("SET TIME ZONE '%s'" % timezone)
+
+for entity in schema.entities():
+ if entity.final or entity.type not in fsschema:
+ continue
+ change_attribute_type(entity.type, 'creation_date', 'TZDatetime', ask_confirm=False)
+ change_attribute_type(entity.type, 'modification_date', 'TZDatetime', ask_confirm=False)
+
+if dbdriver == 'postgres':
+ sql("SET TIME ZONE UTC")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.22.1_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.22.1_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,12 @@
+from os import unlink
+from os.path import isfile, join
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+
+regdir = cwcfg.instances_dir()
+
+if isfile(join(regdir, 'startorder')):
+ if confirm('The startorder file is not used anymore in Cubicweb 3.22. '
+ 'Should I delete it?',
+ shell=False, pdb=False):
+ unlink(join(regdir, 'startorder'))
+
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.22.3_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.22.3_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,11 @@
+from yams.constraints import UniqueConstraint
+
+for rschema in schema.relations():
+ if rschema.rule or not rschema.final:
+ continue
+ for rdef in rschema.rdefs.values():
+ if (rdef.object != 'String'
+ and any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints)):
+ table = 'cw_{0}'.format(rdef.subject)
+ column = 'cw_{0}'.format(rdef.rtype)
+ repo.system_source.create_index(cnx, table, column, unique=True)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.23.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.23.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,64 @@
+
+from functools import partial
+
+from yams.constraints import UniqueConstraint
+
+from cubicweb.schema import PURE_VIRTUAL_RTYPES
+from cubicweb.server.schema2sql import build_index_name, check_constraint
+
+sql = partial(sql, ask_confirm=False)
+
+source = repo.system_source
+helper = source.dbhelper
+
+for rschema in schema.relations():
+ if rschema.rule or rschema in PURE_VIRTUAL_RTYPES:
+ continue
+ if rschema.final or rschema.inlined:
+ for rdef in rschema.rdefs.values():
+ table = 'cw_{0}'.format(rdef.subject)
+ column = 'cw_{0}'.format(rdef.rtype)
+ if any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints):
+ old_name = '%s_%s_key' % (table.lower(), column.lower())
+ sql('ALTER TABLE %s DROP CONSTRAINT IF EXISTS %s' % (table, old_name))
+ source.create_index(cnx, table, column, unique=True)
+ if rschema.inlined or rdef.indexed:
+ old_name = '%s_%s_idx' % (table.lower(), column.lower())
+ sql('DROP INDEX IF EXISTS %s' % old_name)
+ source.create_index(cnx, table, column)
+ else:
+ table = '{0}_relation'.format(rschema)
+ sql('ALTER TABLE %s DROP CONSTRAINT IF EXISTS %s_p_key' % (table, table))
+ sql('ALTER TABLE %s ADD CONSTRAINT %s PRIMARY KEY(eid_from, eid_to)'
+ % (table, build_index_name(table, ['eid_from', 'eid_to'], 'key_')))
+ for column in ('from', 'to'):
+ sql('DROP INDEX IF EXISTS %s_%s_idx' % (table, column))
+ sql('CREATE INDEX %s ON %s(eid_%s);'
+ % (build_index_name(table, ['eid_' + column], 'idx_'), table, column))
+
+
+# we changed constraint serialization, which also changes their name
+
+for table, cstr in sql("""
+ SELECT table_name, constraint_name FROM information_schema.constraint_column_usage
+ WHERE constraint_name LIKE 'cstr%'"""):
+ sql("ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(cstr)s" % locals())
+
+for cwconstraint in rql('Any C WHERE R constrained_by C').entities():
+ cwrdef = cwconstraint.reverse_constrained_by[0]
+ rdef = cwrdef.yams_schema()
+ cstr = rdef.constraint_by_eid(cwconstraint.eid)
+ with cnx.deny_all_hooks_but():
+ cwconstraint.cw_set(value=unicode(cstr.serialize()))
+ if cstr.type() not in ('BoundaryConstraint', 'IntervalBoundConstraint',
+ 'StaticVocabularyConstraint'):
+ # These cannot be translate into backend CHECK.
+ continue
+ cstrname, check = check_constraint(rdef, cstr, helper, prefix='cw_')
+ args = {'e': rdef.subject.type, 'c': cstrname, 'v': check}
+ sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args)
+
+commit()
+
+if 'identity_relation' in helper.list_tables(cnx.cnxset.cu):
+ sql('DROP TABLE identity_relation')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.3.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.3.5_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.4.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.4.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.4.0_common.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.4.0_common.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.4.3_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.4.3_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.5.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.5.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.5.10_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.5.10_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.5.3_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.5.3_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.6.1_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.6.1_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.7.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.7.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,40 @@
+typemap = repo.system_source.dbhelper.TYPE_MAPPING
+sqls = """
+CREATE TABLE transactions (
+ tx_uuid CHAR(32) PRIMARY KEY NOT NULL,
+ tx_user INTEGER NOT NULL,
+ tx_time %s NOT NULL
+);;
+CREATE INDEX transactions_tx_user_idx ON transactions(tx_user);;
+
+CREATE TABLE tx_entity_actions (
+ tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE,
+ txa_action CHAR(1) NOT NULL,
+ txa_public %s NOT NULL,
+ txa_order INTEGER,
+ eid INTEGER NOT NULL,
+ etype VARCHAR(64) NOT NULL,
+ changes %s
+);;
+CREATE INDEX tx_entity_actions_txa_action_idx ON tx_entity_actions(txa_action);;
+CREATE INDEX tx_entity_actions_txa_public_idx ON tx_entity_actions(txa_public);;
+CREATE INDEX tx_entity_actions_eid_idx ON tx_entity_actions(eid);;
+CREATE INDEX tx_entity_actions_etype_idx ON tx_entity_actions(etype);;
+
+CREATE TABLE tx_relation_actions (
+ tx_uuid CHAR(32) REFERENCES transactions(tx_uuid) ON DELETE CASCADE,
+ txa_action CHAR(1) NOT NULL,
+ txa_public %s NOT NULL,
+ txa_order INTEGER,
+ eid_from INTEGER NOT NULL,
+ eid_to INTEGER NOT NULL,
+ rtype VARCHAR(256) NOT NULL
+);;
+CREATE INDEX tx_relation_actions_txa_action_idx ON tx_relation_actions(txa_action);;
+CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);;
+CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);;
+CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to)
+""" % (typemap['Datetime'],
+ typemap['Boolean'], typemap['Bytes'], typemap['Boolean'])
+for statement in sqls.split(';;'):
+ sql(statement)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.7.2_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.7.2_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+sql('DROP FUNCTION IF EXISTS _fsopen(bytea)')
+sql('DROP FUNCTION IF EXISTS fspath(bigint, text, text)')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.7.4_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.7.4_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1 @@
+sync_schema_props_perms('TrInfo', syncprops=False)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.7.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.7.5_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+if versions_map['cubicweb'][0] == (3, 7, 4):
+ config['http-session-time'] *= 60
+ config['cleanup-session-time'] *= 60
+ config['cleanup-anonymous-session-time'] *= 60
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.8.1_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.8.1_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,2 @@
+rql('SET X name "BoundaryConstraint" '
+ 'WHERE X is CWConstraintType, X name "BoundConstraint"')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.8.3_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.8.3_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,3 @@
+if 'same_as' in schema:
+ sync_schema_props_perms('same_as', syncperms=False)
+sync_schema_props_perms('Bookmark', syncperms=False)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.8.3_common.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.8.3_common.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+option_group_changed('port', 'main', 'web')
+option_group_changed('query-log-file', 'main', 'web')
+option_group_changed('profile', 'main', 'web')
+option_group_changed('max-post-length', 'main', 'web')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.8.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.8.5_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,61 @@
+from __future__ import print_function
+
+def migrate_varchar_to_nvarchar():
+ dbdriver = config.system_source_config['db-driver']
+ if dbdriver != "sqlserver2005":
+ return
+
+ introspection_sql = """\
+SELECT table_schema, table_name, column_name, is_nullable, character_maximum_length
+FROM information_schema.columns
+WHERE data_type = 'VARCHAR' and table_name <> 'SYSDIAGRAMS'
+"""
+ has_index_sql = """\
+SELECT i.name AS index_name,
+ i.type_desc,
+ i.is_unique,
+ i.is_unique_constraint
+FROM sys.indexes AS i, sys.index_columns as j, sys.columns as k
+WHERE is_hypothetical = 0 AND i.index_id <> 0
+AND i.object_id = j.object_id
+AND i.index_id = j.index_id
+AND i.object_id = OBJECT_ID('%(table)s')
+AND k.name = '%(col)s'
+AND k.object_id=i.object_id
+AND j.column_id = k.column_id;"""
+
+ generated_statements = []
+ for schema, table, column, is_nullable, length in sql(introspection_sql, ask_confirm=False):
+ qualified_table = '[%s].[%s]' % (schema, table)
+ rset = sql(has_index_sql % {'table': qualified_table, 'col':column},
+ ask_confirm = False)
+ drops = []
+ creates = []
+ for idx_name, idx_type, idx_unique, is_unique_constraint in rset:
+ if is_unique_constraint:
+ drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (qualified_table, idx_name))
+ creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (qualified_table, idx_name, column))
+ else:
+ drops.append('DROP INDEX %s ON %s' % (idx_name, qualified_table))
+ if idx_unique:
+ unique = 'UNIQUE'
+ else:
+ unique = ''
+ creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, qualified_table, column))
+
+ if length == -1:
+ length = 'max'
+ if is_nullable == 'YES':
+ not_null = 'NULL'
+ else:
+ not_null = 'NOT NULL'
+ alter_sql = 'ALTER TABLE %s ALTER COLUMN %s NVARCHAR(%s) %s' % (qualified_table, column, length, not_null)
+ generated_statements+= drops + [alter_sql] + creates
+
+
+ for statement in generated_statements:
+ print(statement)
+ sql(statement, ask_confirm=False)
+ commit()
+
+migrate_varchar_to_nvarchar()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.9.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.9.0_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,3 @@
+if repo.system_source.dbdriver == 'postgres':
+ sql('ALTER TABLE appears ADD COLUMN weight float')
+ sql('UPDATE appears SET weight=1.0 ')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/3.9.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.9.5_Any.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,4 @@
+if not rql('CWConstraintType X WHERE X name "RQLUniqueConstraint"',
+ ask_confirm=False):
+ rql('INSERT CWConstraintType X: X name "RQLUniqueConstraint"',
+ ask_confirm=False)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/bootstrapmigration_repository.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/bootstrapmigration_repository.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,461 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""allways executed before all others in server migration
+
+it should only include low level schema changes
+"""
+from __future__ import print_function
+
+from six import text_type
+
+from cubicweb import ConfigurationError
+from cubicweb.server.session import hooks_control
+from cubicweb.server import schemaserial as ss
+
+applcubicwebversion, cubicwebversion = versions_map['cubicweb']
+
+def _add_relation_definition_no_perms(subjtype, rtype, objtype):
+ rschema = fsschema.rschema(rtype)
+ rdef = rschema.rdefs[(subjtype, objtype)]
+ rdef.rtype = schema.rschema(rtype)
+ rdef.subject = schema.eschema(subjtype)
+ rdef.object = schema.eschema(objtype)
+ ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None))
+ commit(ask_confirm=False)
+
+def replace_eid_sequence_with_eid_numrange(session):
+ dbh = session.repo.system_source.dbhelper
+ cursor = session.cnxset.cu
+ try:
+ cursor.execute(dbh.sql_sequence_current_state('entities_id_seq'))
+ lasteid = cursor.fetchone()[0]
+ except: # programming error, already migrated
+ session.rollback()
+ return
+
+ cursor.execute(dbh.sql_drop_sequence('entities_id_seq'))
+ cursor.execute(dbh.sql_create_numrange('entities_id_seq'))
+ cursor.execute(dbh.sql_restart_numrange('entities_id_seq', initial_value=lasteid))
+ session.commit()
+
+if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1):
+ sql('ALTER TABLE entities ADD asource VARCHAR(64)')
+ sql('UPDATE entities SET asource=cw_name '
+ 'FROM cw_CWSource, cw_source_relation '
+ 'WHERE entities.eid=cw_source_relation.eid_from AND cw_source_relation.eid_to=cw_CWSource.cw_eid')
+ commit()
+
+if applcubicwebversion <= (3, 14, 4) and cubicwebversion >= (3, 14, 4):
+ from cubicweb.server import schema2sql as y2sql
+ dbhelper = repo.system_source.dbhelper
+ rdefdef = schema['CWSource'].rdef('name')
+ attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0]
+ cursor = session.cnxset.cu
+ sql('UPDATE entities SET asource = source WHERE asource is NULL')
+ dbhelper.change_col_type(cursor, 'entities', 'asource', attrtype, False)
+ dbhelper.change_col_type(cursor, 'entities', 'source', attrtype, False)
+
+ # we now have a functional asource column, start using the normal eid_type_source method
+ if repo.system_source.eid_type_source == repo.system_source.eid_type_source_pre_131:
+ del repo.system_source.eid_type_source
+
+if applcubicwebversion < (3, 19, 0) and cubicwebversion >= (3, 19, 0):
+ try:
+ # need explicit drop of the indexes on some database systems (sqlserver)
+ sql(repo.system_source.dbhelper.sql_drop_index('entities', 'mtime'))
+ sql('ALTER TABLE "entities" DROP COLUMN "mtime"')
+ sql('ALTER TABLE "entities" DROP COLUMN "source"')
+ except: # programming error, already migrated
+ print("Failed to drop mtime or source database columns")
+ print("'entities' table of the database has probably been already updated")
+
+ commit()
+
+ replace_eid_sequence_with_eid_numrange(session)
+
+
+if applcubicwebversion < (3, 18, 0) and cubicwebversion >= (3, 18, 0):
+ driver = config.system_source_config['db-driver']
+ if not (driver == 'postgres' or driver.startswith('sqlserver')):
+ import sys
+ print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr)
+ sys.exit(1)
+
+ add_relation_definition('CWAttribute', 'add_permission', 'CWGroup')
+ add_relation_definition('CWAttribute', 'add_permission', 'RQLExpression')
+
+ # a bad defaultval in 3.13.8 schema was fixed in 3.13.9, but the migration was missed
+ rql('SET ATTR defaultval NULL WHERE ATTR from_entity E, E name "CWSource", ATTR relation_type T, T name "in_synchronization"')
+
+ # the migration gets confused when we change rdefs out from under it. So
+ # explicitly remove this size constraint so it doesn't stick around and break
+ # things later.
+ rdefeid = schema['defaultval'].rdefs.values()[0].eid
+ rql('DELETE CWConstraint C WHERE C cstrtype T, T name "SizeConstraint", R constrained_by C, R eid %(eid)s', {'eid': rdefeid})
+
+ sync_schema_props_perms('defaultval')
+
+ def convert_defaultval(cwattr, default):
+ from decimal import Decimal
+ import yams
+ from cubicweb import Binary
+ if default is None:
+ return
+ if isinstance(default, Binary):
+ # partially migrated instance, try to be idempotent
+ return default
+ atype = cwattr.to_entity[0].name
+ if atype == 'Boolean':
+ # boolean attributes with default=False were stored as ''
+ assert default in ('True', 'False', ''), repr(default)
+ default = default == 'True'
+ elif atype in ('Int', 'BigInt'):
+ default = int(default)
+ elif atype == 'Float':
+ default = float(default)
+ elif atype == 'Decimal':
+ default = Decimal(default)
+ elif atype in ('Date', 'Datetime', 'TZDatetime', 'Time'):
+ try:
+ # handle NOW and TODAY, keep them stored as strings
+ yams.KEYWORD_MAP[atype][default.upper()]
+ default = default.upper()
+ except KeyError:
+ # otherwise get an actual date or datetime
+ default = yams.DATE_FACTORY_MAP[atype](default)
+ else:
+ assert atype == 'String', atype
+ default = text_type(default)
+ return Binary.zpickle(default)
+
+ dbh = repo.system_source.dbhelper
+
+
+ sql('ALTER TABLE cw_cwattribute ADD new_defaultval %s' % dbh.TYPE_MAPPING['Bytes'])
+
+ for cwattr in rql('CWAttribute X').entities():
+ olddefault = cwattr.defaultval
+ if olddefault is not None:
+ req = "UPDATE cw_cwattribute SET new_defaultval = %(val)s WHERE cw_eid = %(eid)s"
+ args = {'val': dbh.binary_value(convert_defaultval(cwattr, olddefault).getvalue()), 'eid': cwattr.eid}
+ sql(req, args, ask_confirm=False)
+
+ sql('ALTER TABLE cw_cwattribute DROP COLUMN cw_defaultval')
+ if driver == 'postgres':
+ sql('ALTER TABLE cw_cwattribute RENAME COLUMN new_defaultval TO cw_defaultval')
+ else: # sqlserver
+ sql("sp_rename 'cw_cwattribute.new_defaultval', 'cw_defaultval', 'COLUMN'")
+
+
+ # Set object type to "Bytes" for CWAttribute's "defaultval" attribute
+ rql('SET X to_entity B WHERE X is CWAttribute, X from_entity Y, Y name "CWAttribute", '
+ 'X relation_type Z, Z name "defaultval", B name "Bytes", NOT X to_entity B')
+
+ oldrdef = schema['CWAttribute'].rdef('defaultval')
+ import yams.buildobjs as ybo
+ newrdef = ybo.RelationDefinition('CWAttribute', 'defaultval', 'Bytes')
+ newrdef.eid = oldrdef.eid
+ schema.add_relation_def(newrdef)
+ schema.del_relation_def('CWAttribute', 'defaultval', 'String')
+
+ commit()
+
+ sync_schema_props_perms('defaultval')
+
+ for rschema in schema.relations():
+ if rschema.symmetric:
+ subjects = set(repr(e.type) for e in rschema.subjects())
+ objects = set(repr(e.type) for e in rschema.objects())
+ assert subjects == objects
+ martians = set(str(eid) for eid, in sql('SELECT eid_to FROM %s_relation, entities WHERE eid_to = eid AND type NOT IN (%s)' %
+ (rschema.type, ','.join(subjects))))
+ martians |= set(str(eid) for eid, in sql('SELECT eid_from FROM %s_relation, entities WHERE eid_from = eid AND type NOT IN (%s)' %
+ (rschema.type, ','.join(subjects))))
+ if martians:
+ martians = ','.join(martians)
+ print('deleting broken relations %s for eids %s' % (rschema.type, martians))
+ sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians))
+ with session.deny_all_hooks_but():
+ rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type})
+ commit()
+
+
+ # multi columns unique constraints regeneration
+ from cubicweb.server import schemaserial
+
+ # syncschema hooks would try to remove indices but
+ # 1) we already do that below
+ # 2) the hook expects the CWUniqueTogetherConstraint.name attribute that hasn't
+ # yet been added
+ with session.allow_all_hooks_but('syncschema'):
+ rql('DELETE CWUniqueTogetherConstraint C')
+ commit()
+ add_attribute('CWUniqueTogetherConstraint', 'name')
+
+ # low-level wipe code for postgres & sqlserver, plain sql ...
+ if driver == 'postgres':
+ for indexname, in sql('select indexname from pg_indexes'):
+ if indexname.startswith('unique_'):
+ print('dropping index', indexname)
+ sql('DROP INDEX %s' % indexname)
+ commit()
+ elif driver.startswith('sqlserver'):
+ for viewname, in sql('select name from sys.views'):
+ if viewname.startswith('utv_'):
+ print('dropping view (index should be cascade-deleted)', viewname)
+ sql('DROP VIEW %s' % viewname)
+ commit()
+
+ # recreate the constraints, hook will lead to low-level recreation
+ for eschema in sorted(schema.entities()):
+ if eschema._unique_together:
+ print('recreate unique indexes for', eschema)
+ rql_args = schemaserial.uniquetogether2rqls(eschema)
+ for rql, args in rql_args:
+ args['x'] = eschema.eid
+ session.execute(rql, args)
+ commit()
+
+ # all attributes perms have to be refreshed ...
+ for rschema in sorted(schema.relations()):
+ if rschema.final:
+ if rschema.type in fsschema:
+ print('sync perms for', rschema.type)
+ sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False)
+ else:
+ print('WARNING: attribute %s missing from fs schema' % rschema.type)
+ commit()
+
+if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0):
+ try:
+ add_cube('sioc', update_database=False)
+ except ConfigurationError:
+ if not confirm('In cubicweb 3.17 sioc views have been moved to the sioc '
+ 'cube, which is not installed. Continue anyway?'):
+ raise
+ try:
+ add_cube('embed', update_database=False)
+ except ConfigurationError:
+ if not confirm('In cubicweb 3.17 embedding views have been moved to the embed '
+ 'cube, which is not installed. Continue anyway?'):
+ raise
+ try:
+ add_cube('geocoding', update_database=False)
+ except ConfigurationError:
+ if not confirm('In cubicweb 3.17 geocoding views have been moved to the geocoding '
+ 'cube, which is not installed. Continue anyway?'):
+ raise
+
+
+if applcubicwebversion <= (3, 14, 0) and cubicwebversion >= (3, 14, 0):
+ if 'require_permission' in schema and not 'localperms'in repo.config.cubes():
+ from cubicweb import ExecutionError
+ try:
+ add_cube('localperms', update_database=False)
+ except ConfigurationError:
+ raise ExecutionError('In cubicweb 3.14, CWPermission and related stuff '
+ 'has been moved to cube localperms. Install it first.')
+
+
+if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0):
+ CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
+ ask_confirm=False))
+ _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'CWGroup')
+ _add_relation_definition_no_perms('CWAttribute', 'update_permission', 'RQLExpression')
+ rql('SET X update_permission Y WHERE X is CWAttribute, X add_permission Y')
+ drop_relation_definition('CWAttribute', 'delete_permission', 'CWGroup')
+ drop_relation_definition('CWAttribute', 'delete_permission', 'RQLExpression')
+
+elif applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0):
+ CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
+ ask_confirm=False))
+ session.set_cnxset()
+ permsdict = ss.deserialize_ertype_permissions(session)
+
+ with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
+ for rschema in repo.schema.relations():
+ rpermsdict = permsdict.get(rschema.eid, {})
+ for rdef in rschema.rdefs.values():
+ for action in rdef.ACTIONS:
+ actperms = []
+ for something in rpermsdict.get(action == 'update' and 'add' or action, ()):
+ if isinstance(something, tuple):
+ actperms.append(rdef.rql_expression(*something))
+ else: # group name
+ actperms.append(something)
+ rdef.set_action_permissions(action, actperms)
+ for action in ('read', 'add', 'delete'):
+ _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'CWGroup')
+ _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'RQLExpression')
+ for action in ('read', 'update'):
+ _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'CWGroup')
+ _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'RQLExpression')
+ for action in ('read', 'add', 'delete'):
+ rql('SET X %s_permission Y WHERE X is CWRelation, '
+ 'RT %s_permission Y, X relation_type RT, Y is CWGroup' % (action, action))
+ rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, '
+ 'X %s_permission Y WHERE X is CWRelation, '
+ 'X relation_type RT, RT %s_permission Y2, Y2 exprtype YET, '
+ 'Y2 mainvars YMV, Y2 expression YEX' % (action, action))
+ rql('SET X read_permission Y WHERE X is CWAttribute, '
+ 'RT read_permission Y, X relation_type RT, Y is CWGroup')
+ rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, '
+ 'X read_permission Y WHERE X is CWAttribute, '
+ 'X relation_type RT, RT read_permission Y2, Y2 exprtype YET, '
+ 'Y2 mainvars YMV, Y2 expression YEX')
+ rql('SET X update_permission Y WHERE X is CWAttribute, '
+ 'RT add_permission Y, X relation_type RT, Y is CWGroup')
+ rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, '
+ 'X update_permission Y WHERE X is CWAttribute, '
+ 'X relation_type RT, RT add_permission Y2, Y2 exprtype YET, '
+ 'Y2 mainvars YMV, Y2 expression YEX')
+ for action in ('read', 'add', 'delete'):
+ drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False)
+ drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression')
+ sync_schema_props_perms('read_permission', syncperms=False) # fix read_permission cardinality
+
+if applcubicwebversion < (3, 9, 6) and cubicwebversion >= (3, 9, 6) and not 'CWUniqueTogetherConstraint' in schema:
+ add_entity_type('CWUniqueTogetherConstraint')
+
+if not ('CWUniqueTogetherConstraint', 'CWRType') in schema['relations'].rdefs:
+ add_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRType')
+ rql('SET C relations RT WHERE C relations RDEF, RDEF relation_type RT')
+ commit()
+ drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWAttribute')
+ drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRelation')
+
+
+if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0):
+
+ with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
+ session.set_shared_data('do-not-insert-cwuri', True)
+ add_relation_type('cwuri')
+ base_url = session.base_url()
+ for eid, in rql('Any X', ask_confirm=False):
+ type, source, extid = session.describe(eid)
+ if source == 'system':
+ rql('SET X cwuri %(u)s WHERE X eid %(x)s',
+ {'x': eid, 'u': u'%s%s' % (base_url, eid)})
+ isession.commit()
+ session.set_shared_data('do-not-insert-cwuri', False)
+
+if applcubicwebversion < (3, 5, 0) and cubicwebversion >= (3, 5, 0):
+ # check that migration is not doomed
+ rset = rql('Any X,Y WHERE X transition_of E, Y transition_of E, '
+ 'X name N, Y name N, NOT X identity Y',
+ ask_confirm=False)
+ if rset:
+ from logilab.common.shellutils import ASK
+ if not ASK.confirm('Migration will fail because of transitions with the same name. '
+ 'Continue anyway ?'):
+ import sys
+ sys.exit(1)
+ # proceed with migration
+ add_entity_type('Workflow')
+ add_entity_type('BaseTransition')
+ add_entity_type('WorkflowTransition')
+ add_entity_type('SubWorkflowExitPoint')
+ # drop explicit 'State allowed_transition Transition' since it should be
+ # infered due to yams inheritance. However we've to disable the schema
+ # sync hook first to avoid to destroy existing data...
+ try:
+ from cubicweb.hooks import syncschema
+ repo.vreg.unregister(syncschema.AfterDelRelationTypeHook)
+ try:
+ drop_relation_definition('State', 'allowed_transition', 'Transition')
+ finally:
+ repo.vreg.register(syncschema.AfterDelRelationTypeHook)
+ except ImportError: # syncschema is in CW >= 3.6 only
+ from cubicweb.server.schemahooks import after_del_relation_type
+ repo.hm.unregister_hook(after_del_relation_type,
+ 'after_delete_relation', 'relation_type')
+ try:
+ drop_relation_definition('State', 'allowed_transition', 'Transition')
+ finally:
+ repo.hm.register_hook(after_del_relation_type,
+ 'after_delete_relation', 'relation_type')
+ schema.rebuild_infered_relations() # need to be explicitly called once everything is in place
+
+ for et in rql('DISTINCT Any ET,ETN WHERE S state_of ET, ET name ETN',
+ ask_confirm=False).entities():
+ wf = add_workflow(u'default %s workflow' % et.name, et.name,
+ ask_confirm=False)
+ rql('SET S state_of WF WHERE S state_of ET, ET eid %(et)s, WF eid %(wf)s',
+ {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False)
+ rql('SET T transition_of WF WHERE T transition_of ET, ET eid %(et)s, WF eid %(wf)s',
+ {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False)
+ rql('SET WF initial_state S WHERE ET initial_state S, ET eid %(et)s, WF eid %(wf)s',
+ {'et': et.eid, 'wf': wf.eid}, 'et', ask_confirm=False)
+
+
+ rql('DELETE TrInfo TI WHERE NOT TI from_state S')
+ rql('SET TI by_transition T WHERE TI from_state FS, TI to_state TS, '
+ 'FS allowed_transition T, T destination_state TS')
+ commit()
+
+ drop_relation_definition('State', 'state_of', 'CWEType')
+ drop_relation_definition('Transition', 'transition_of', 'CWEType')
+ drop_relation_definition('CWEType', 'initial_state', 'State')
+
+ sync_schema_props_perms()
+
+if applcubicwebversion < (3, 2, 2) and cubicwebversion >= (3, 2, 1):
+ from base64 import b64encode
+ for eid, extid in sql('SELECT eid, extid FROM entities '
+ 'WHERE extid is NOT NULL',
+ ask_confirm=False):
+ sql('UPDATE entities SET extid=%(extid)s WHERE eid=%(eid)s',
+ {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False)
+ commit()
+
+if applcubicwebversion < (3, 2, 0) and cubicwebversion >= (3, 2, 0):
+ add_cube('card', update_database=False)
+
+if applcubicwebversion < (3, 20, 0) and cubicwebversion >= (3, 20, 0):
+ ss._IGNORED_PROPS.append('formula')
+ add_attribute('CWAttribute', 'formula', commit=False)
+ ss._IGNORED_PROPS.remove('formula')
+ commit()
+ add_entity_type('CWComputedRType')
+ commit()
+
+if schema['TZDatetime'].eid is None:
+ add_entity_type('TZDatetime', auto=False)
+if schema['TZTime'].eid is None:
+ add_entity_type('TZTime', auto=False)
+
+
+if applcubicwebversion < (3, 21, 1) and cubicwebversion >= (3, 21, 1):
+ add_relation_definition('CWComputedRType', 'read_permission', 'CWGroup')
+ add_relation_definition('CWComputedRType', 'read_permission', 'RQLExpression')
+
+
+def sync_constraint_types():
+ """Make sure the repository knows about all constraint types defined in the code"""
+ from cubicweb.schema import CONSTRAINTS
+ repo_constraints = set(row[0] for row in rql('Any N WHERE X is CWConstraintType, X name N'))
+
+ for cstrtype in set(CONSTRAINTS) - repo_constraints:
+ if cstrtype == 'BoundConstraint':
+ # was renamed to BoundaryConstraint, we don't need the old name
+ continue
+ rql('INSERT CWConstraintType X: X name %(name)s', {'name': cstrtype})
+
+ commit()
+
+sync_constraint_types()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/migration/postcreate.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/postcreate.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,77 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""cubicweb post creation script, set user's workflow"""
+from __future__ import print_function
+
+from six import text_type
+
+from cubicweb import _
+
+
+# insert versions
+create_entity('CWProperty', pkey=u'system.version.cubicweb',
+ value=text_type(config.cubicweb_version()))
+for cube in config.cubes():
+ create_entity('CWProperty', pkey=u'system.version.%s' % cube.lower(),
+ value=text_type(config.cube_version(cube)))
+
+# some entities have been added before schema entities, add their missing 'is' and
+# 'is_instance_of' relations
+for rtype in ('is', 'is_instance_of'):
+ sql('INSERT INTO %s_relation '
+ 'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET '
+ 'WHERE X.type=ET.cw_name AND NOT EXISTS('
+ ' SELECT 1 from %s_relation '
+ ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % (rtype, rtype))
+
+# user workflow
+userwf = add_workflow(_('default user workflow'), 'CWUser')
+activated = userwf.add_state(_('activated'), initial=True)
+deactivated = userwf.add_state(_('deactivated'))
+userwf.add_transition(_('deactivate'), (activated,), deactivated,
+ requiredgroups=(u'managers',))
+userwf.add_transition(_('activate'), (deactivated,), activated,
+ requiredgroups=(u'managers',))
+
+# create anonymous user if all-in-one config and anonymous user has been specified
+if hasattr(config, 'anonymous_user'):
+ anonlogin, anonpwd = config.anonymous_user()
+ if anonlogin == session.user.login:
+ print('you are using a manager account as anonymous user.')
+ print('Hopefully this is not a production instance...')
+ elif anonlogin:
+ from cubicweb.server import create_user
+ create_user(session, text_type(anonlogin), anonpwd, u'guests')
+
+# need this since we already have at least one user in the database (the default admin)
+for user in rql('Any X WHERE X is CWUser').entities():
+ rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+ {'x': user.eid, 's': activated.eid})
+
+# on interactive mode, ask for level 0 persistent options
+if interactive_mode:
+ cfg = config.persistent_options_configuration()
+ cfg.input_config(inputlevel=0)
+ for section, options in cfg.options_by_section():
+ for optname, optdict, value in options:
+ key = u'%s.%s' % (section, optname)
+ default = cfg.option_default(optname, optdict)
+ # only record values differing from default
+ if value != default:
+ rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s',
+ {'k': key, 'v': value})
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/chpasswd.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/chpasswd.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,48 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+import sys
+import getpass
+
+from cubicweb import Binary
+from cubicweb.server.utils import crypt_password
+
+
+if __args__:
+ login = __args__.pop()
+else:
+ login = raw_input("login? ")
+
+rset = rql('Any U WHERE U is CWUser, U login %(login)s', {'login': login})
+
+if len(rset) != 1:
+ sys.exit("user '%s' does not exist!" % login)
+
+pass1 = getpass.getpass(prompt='Enter new password? ')
+pass2 = getpass.getpass(prompt='Confirm? ')
+
+if pass1 != pass2:
+ sys.exit("passwords don't match!")
+
+crypted = crypt_password(pass1)
+
+cwuser = rset.get_entity(0,0)
+cwuser.cw_set(upassword=Binary(crypted))
+commit()
+
+print("password updated.")
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/cwuser_ldap2system.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/cwuser_ldap2system.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,42 @@
+from __future__ import print_function
+
+import base64
+from cubicweb.server.utils import crypt_password
+
+dbdriver = config.system_source_config['db-driver']
+from logilab.database import get_db_helper
+dbhelper = get_db_helper(driver)
+
+insert = ('INSERT INTO cw_cwuser (cw_creation_date,'
+ ' cw_eid,'
+ ' cw_modification_date,'
+ ' cw_login,'
+ ' cw_firstname,'
+ ' cw_surname,'
+ ' cw_last_login_time,'
+ ' cw_upassword,'
+ ' cw_cwuri) '
+ "VALUES (%(mtime)s, %(eid)s, %(mtime)s, %(login)s, "
+ " %(firstname)s, %(surname)s, %(mtime)s, %(pwd)s, 'foo');")
+update = "UPDATE entities SET source='system' WHERE eid=%(eid)s;"
+rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False)
+for eid, type, source, extid, mtime in rset:
+ if type != 'CWUser':
+ print("don't know what to do with entity type", type)
+ continue
+ if not source.lower().startswith('ldap'):
+ print("don't know what to do with source type", source)
+ continue
+ extid = base64.decodestring(extid)
+ ldapinfos = [x.strip().split('=') for x in extid.split(',')]
+ login = ldapinfos[0][1]
+ firstname = login.capitalize()
+ surname = login.capitalize()
+ args = dict(eid=eid, type=type, source=source, login=login,
+ firstname=firstname, surname=surname, mtime=mtime,
+ pwd=dbhelper.binary_value(crypt_password('toto')))
+ print(args)
+ sql(insert, args)
+ sql(update, args)
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/detect_cycle.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/detect_cycle.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,16 @@
+from __future__ import print_function
+
+try:
+ rtype, = __args__
+except ValueError:
+ print('USAGE: cubicweb-ctl shell detect_cycle.py -- ')
+ print()
+
+graph = {}
+for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype):
+ graph.setdefault(fromeid, []).append(toeid)
+
+from logilab.common.graph import get_cycles
+
+for cycle in get_cycles(graph):
+ print('cycle', '->'.join(str(n) for n in cycle))
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/ldap_change_base_dn.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/ldap_change_base_dn.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,26 @@
+from __future__ import print_function
+
+from base64 import b64decode, b64encode
+try:
+ uri, newdn = __args__
+except ValueError:
+ print('USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ')
+ print()
+ print('you should not have updated your sources file yet')
+
+olddn = repo.sources_by_uri[uri].config['user-base-dn']
+
+assert olddn != newdn
+
+raw_input("Ensure you've stopped the instance, type enter when done.")
+
+for eid, extid in sql("SELECT eid, extid FROM entities WHERE source='%s'" % uri):
+ olduserdn = b64decode(extid)
+ newuserdn = olduserdn.replace(olddn, newdn)
+ if newuserdn != olduserdn:
+ print(olduserdn, '->', newuserdn)
+ sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid))
+
+commit()
+
+print('you can now update the sources file to the new dn and restart the instance')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/ldapuser2ldapfeed.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/ldapuser2ldapfeed.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,98 @@
+"""turn a pyro source into a datafeed source
+
+Once this script is run, execute c-c db-check to cleanup relation tables.
+"""
+from __future__ import print_function
+
+import sys
+from collections import defaultdict
+from logilab.common.shellutils import generate_password
+
+try:
+ source_name, = __args__
+ source = repo.sources_by_uri[source_name]
+except ValueError:
+ print('you should specify the source name as script argument (i.e. after --'
+ ' on the command line)')
+ sys.exit(1)
+except KeyError:
+ print('%s is not an active source' % source_name)
+ sys.exit(1)
+
+# check source is reachable before doing anything
+if not source.get_connection().cnx:
+ print('%s is not reachable. Fix this before running this script' % source_name)
+ sys.exit(1)
+
+raw_input('Ensure you have shutdown all instances of this application before continuing.'
+ ' Type enter when ready.')
+
+system_source = repo.system_source
+
+from datetime import datetime
+from cubicweb.server.edition import EditedEntity
+
+
+print('******************** backport entity content ***************************')
+
+todelete = defaultdict(list)
+extids = set()
+duplicates = []
+for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
+ etype = entity.cw_etype
+ if not source.support_entity(etype):
+ print("source doesn't support %s, delete %s" % (etype, entity.eid))
+ todelete[etype].append(entity)
+ continue
+ try:
+ entity.complete()
+ except Exception:
+ print('%s %s much probably deleted, delete it (extid %s)' % (
+ etype, entity.eid, entity.cw_metainformation()['extid']))
+ todelete[etype].append(entity)
+ continue
+ print('get back', etype, entity.eid)
+ entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+ if not entity.creation_date:
+ entity.cw_edited['creation_date'] = datetime.utcnow()
+ if not entity.modification_date:
+ entity.cw_edited['modification_date'] = datetime.utcnow()
+ if not entity.upassword:
+ entity.cw_edited['upassword'] = generate_password()
+ extid = entity.cw_metainformation()['extid']
+ if not entity.cwuri:
+ entity.cw_edited['cwuri'] = '%s/?dn=%s' % (
+ source.urls[0], extid.decode('utf-8', 'ignore'))
+ print(entity.cw_edited)
+ if extid in extids:
+ duplicates.append(extid)
+ continue
+ extids.add(extid)
+ system_source.add_entity(session, entity)
+ sql("UPDATE entities SET source='system' "
+ "WHERE eid=%(eid)s", {'eid': entity.eid})
+
+# only cleanup entities table, remaining stuff should be cleaned by a c-c
+# db-check to be run after this script
+if duplicates:
+ print('found %s duplicate entries' % len(duplicates))
+ from pprint import pprint
+ pprint(duplicates)
+
+print(len(todelete), 'entities will be deleted')
+for etype, entities in todelete.items():
+ print('deleting', etype, [e.login for e in entities])
+ system_source.delete_info_multi(session, entities, source_name)
+
+
+
+source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0)
+source_ent.cw_set(type=u"ldapfeed", parser=u"ldapfeed")
+
+
+if raw_input('Commit?') in 'yY':
+ print('committing')
+ commit()
+else:
+ rollback()
+ print('rolled back')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/migration_helper.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/migration_helper.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,72 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+
+"""Helper functions for migrations that aren't reliable enough or too dangerous
+to be available in the standard migration environment
+"""
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+
+def drop_entity_types_fast(*etypes, **kwargs):
+ """drop an entity type bypassing all hooks
+
+ here be dragons.
+ """
+ # XXX cascade deletion through composite relations?
+
+ for etype in etypes:
+
+ if etype not in schema:
+ print('%s does not exist' % etype)
+ continue
+ etype = schema[etype]
+
+ # ignore attributes and inlined rels since they'll be dropped anyway
+ srels = [x.type for x in etype.subject_relations() if x.eid and not (x.final or x.inlined)]
+
+ orels = [x.type for x in etype.object_relations() if x.eid and not x.inlined]
+ inlined_rels = [x for x in etype.object_relations() if x.eid and x.inlined]
+
+ # eids to be deleted could be listed in some other entity tables through inlined relations
+ for rtype in inlined_rels:
+ for subjtype in rtype.subjects(etype):
+ if subjtype in etypes:
+ continue
+ sql('UPDATE cw_%(stype)s SET cw_%(rtype)s = NULL '
+ 'WHERE cw_%(rtype)s IN (SELECT eid FROM entities WHERE type = %%s)' %
+ {'stype': subjtype.type, 'rtype': rtype.type},
+ (etype.type,))
+
+ for rel in srels:
+ if all(subj in etypes for subj in rel.subjects()) or all(obj in etypes for obj in rel.objects()):
+ sql('DELETE FROM %s_relation' % rel.type)
+ else:
+ sql('DELETE FROM %s_relation WHERE eid_from IN (SELECT eid FROM entities WHERE type = %%s)' % rel.type, (etype.type,))
+ for rel in orels:
+ if all(subj in etypes for subj in rel.subjects()) or all(obj in etypes for obj in rel.objects()):
+ sql('DELETE FROM %s_relation' % rel.type)
+ else:
+ sql('DELETE FROM %s_relation WHERE eid_to IN (SELECT eid FROM entities WHERE type = %%s)' % rel, (etype.type,))
+
+ sql('DELETE FROM appears WHERE uid IN (SELECT eid FROM entities WHERE type = %s)', (etype.type,))
+ sql('DELETE FROM cw_%s' % etype.type)
+ sql('DELETE FROM entities WHERE type = %s', (etype.type,))
+
+ for etype in etypes:
+ drop_entity_type(etype, **kwargs)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/pyroforge2datafeed.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/pyroforge2datafeed.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,134 @@
+"""turn a pyro source into a datafeed source
+
+Once this script is run, execute c-c db-check to cleanup relation tables.
+"""
+from __future__ import print_function
+
+import sys
+
+try:
+ source_name, = __args__
+ source = repo.sources_by_uri[source_name]
+except ValueError:
+ print('you should specify the source name as script argument (i.e. after --'
+ ' on the command line)')
+ sys.exit(1)
+except KeyError:
+ print('%s is not an active source' % source_name)
+ sys.exit(1)
+
+# check source is reachable before doing anything
+try:
+ source.get_connection()._repo
+except AttributeError:
+ print('%s is not reachable. Fix this before running this script' % source_name)
+ sys.exit(1)
+
+raw_input('Ensure you have shutdown all instances of this application before continuing.'
+ ' Type enter when ready.')
+
+system_source = repo.system_source
+
+from base64 import b64encode
+from cubicweb.server.edition import EditedEntity
+
+DONT_GET_BACK_ETYPES = set(( # XXX edit as desired
+ 'State',
+ 'RecipeStep', 'RecipeStepInput', 'RecipeStepOutput',
+ 'RecipeTransition', 'RecipeTransitionCondition',
+ 'NarvalConditionExpression', 'Recipe',
+ # XXX TestConfig
+ ))
+
+
+print('******************** backport entity content ***************************')
+
+from cubicweb.server import debugged
+todelete = {}
+host = source.config['base-url'].split('://')[1]
+for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
+ etype = entity.cw_etype
+ if not source.support_entity(etype):
+ print("source doesn't support %s, delete %s" % (etype, entity.eid))
+ elif etype in DONT_GET_BACK_ETYPES:
+ print('ignore %s, delete %s' % (etype, entity.eid))
+ else:
+ try:
+ entity.complete()
+ if not host in entity.cwuri:
+ print('SKIP foreign entity', entity.cwuri, source.config['base-url'])
+ continue
+ except Exception:
+ print('%s %s much probably deleted, delete it (extid %s)' % (
+ etype, entity.eid, entity.cw_metainformation()['extid']))
+ else:
+ print('get back', etype, entity.eid)
+ entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+ system_source.add_entity(session, entity)
+ sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s "
+ "WHERE eid=%(eid)s", {'asource': source_name,
+ 'extid': b64encode(entity.cwuri),
+ 'eid': entity.eid})
+ continue
+ todelete.setdefault(etype, []).append(entity)
+
+# only cleanup entities table, remaining stuff should be cleaned by a c-c
+# db-check to be run after this script
+for entities in todelete.values():
+ system_source.delete_info_multi(session, entities, source_name)
+
+
+print('******************** backport mapping **********************************')
+session.disable_hook_categories('cw.sources')
+mapping = []
+for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
+ {'s': source.eid}).entities():
+ schemaent = mappart.cw_schema[0]
+ if schemaent.cw_etype != 'CWEType':
+ assert schemaent.cw_etype == 'CWRType'
+ sch = schema._eid_index[schemaent.eid]
+ for rdef in sch.rdefs.values():
+ if not source.support_entity(rdef.subject) \
+ or not source.support_entity(rdef.object):
+ continue
+ if rdef.subject in DONT_GET_BACK_ETYPES \
+ and rdef.object in DONT_GET_BACK_ETYPES:
+ print('dont map', rdef)
+ continue
+ if rdef.subject in DONT_GET_BACK_ETYPES:
+ options = u'action=link\nlinkattr=name'
+ roles = 'object',
+ elif rdef.object in DONT_GET_BACK_ETYPES:
+ options = u'action=link\nlinkattr=name'
+ roles = 'subject',
+ else:
+ options = u'action=copy'
+ if rdef.rtype in ('use_environment',):
+ roles = 'object',
+ else:
+ roles = 'subject',
+ print('map', rdef, options, roles)
+ for role in roles:
+ mapping.append( (
+ (str(rdef.subject), str(rdef.rtype), str(rdef.object)),
+ options + '\nrole=%s' % role) )
+ mappart.cw_delete()
+
+source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0)
+source_ent.init_mapping(mapping)
+
+# change source properties
+config = u'''synchronize=yes
+synchronization-interval=10min
+delete-entities=no
+'''
+rql('SET X type "datafeed", X parser "cw.entityxml", X url %(url)s, X config %(config)s '
+ 'WHERE X eid %(x)s',
+ {'x': source.eid, 'config': config,
+ 'url': source.config['base-url']+'/project'})
+
+
+commit()
+
+from cubes.apycot import recipes
+recipes.create_quick_recipe(session)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/repair_file_1-9_migration.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/repair_file_1-9_migration.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,52 @@
+"""execute this script if you've migration to file >= 1.9.0 with cubicweb <= 3.9.2
+
+FYI, this migration occurred :
+* on our intranet on July 07 2010
+* on our extranet on July 16 2010
+"""
+from __future__ import print_function
+
+try:
+ backupinstance, = __args__
+except ValueError:
+ print('USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ')
+ print()
+ print('you should restored the backup on a new instance, accessible through pyro')
+
+from cubicweb import cwconfig, dbapi
+from cubicweb.server.session import hooks_control
+
+defaultadmin = repo.config.default_admin_config
+backupcfg = cwconfig.instance_configuration(backupinstance)
+backupcfg.repairing = True
+backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, defaultadmin['login'],
+ password=defaultadmin['password'],
+ host='localhost')
+backupcu = backupcnx.cursor()
+
+with hooks_control(session, session.HOOKS_DENY_ALL):
+ rql('SET X is Y WHERE X is File, Y name "File", NOT X is Y')
+ rql('SET X is_instance_of Y WHERE X is File, Y name "File", NOT X is_instance_of Y')
+ for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,'
+ 'X from_entity Y, Y name "Image", X is CWRelation, '
+ 'EXISTS(XX is CWRelation, XX relation_type RT, '
+ 'XX from_entity YY, YY name "File")'):
+ if rtype in ('is', 'is_instance_of'):
+ continue
+ print(rtype)
+ for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype):
+ print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ')
+ print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype),
+ {'f': feid, 'x': xeid}))
+
+ for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,'
+ 'X to_entity Y, Y name "Image", X is CWRelation, '
+ 'EXISTS(XX is CWRelation, XX relation_type RT, '
+ 'XX to_entity YY, YY name "File")'):
+ print(rtype)
+ for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype):
+ print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ')
+ print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype),
+ {'f': feid, 'x': xeid}))
+
+commit()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/misc/scripts/repair_splitbrain_ldapuser_source.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/scripts/repair_splitbrain_ldapuser_source.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,109 @@
+"""
+CAUTION: READ THIS CAREFULLY
+
+Sometimes it happens that ldap (specifically ldapuser type) source
+yield "ghost" users. The reasons may vary (server upgrade while some
+instances are still running & syncing with the ldap source, unmanaged
+updates to the upstream ldap, etc.).
+
+This script was written and refined enough times that we are confident
+in that it does something reasonnable (at least it did for the
+target application).
+
+However you should really REALLY understand what it does before
+deciding to apply it for you. And then ADAPT it tou your needs.
+
+"""
+from __future__ import print_function
+
+import base64
+from collections import defaultdict
+
+from cubicweb.server.session import hooks_control
+
+try:
+ source_name, = __args__
+ source = repo.sources_by_uri[source_name]
+except ValueError:
+ print('you should specify the source name as script argument (i.e. after --'
+ ' on the command line)')
+ sys.exit(1)
+except KeyError:
+ print('%s is not an active source' % source_name)
+ sys.exit(1)
+
+# check source is reachable before doing anything
+if not source.get_connection().cnx:
+ print('%s is not reachable. Fix this before running this script' % source_name)
+ sys.exit(1)
+
+def find_dupes():
+ # XXX this retrieves entities from a source name "ldap"
+ # you will want to adjust
+ rset = sql("SELECT eid, extid FROM entities WHERE source='%s'" % source_name)
+ extid2eids = defaultdict(list)
+ for eid, extid in rset:
+ extid2eids[extid].append(eid)
+ return dict((base64.b64decode(extid).lower(), eids)
+ for extid, eids in extid2eids.items()
+ if len(eids) > 1)
+
+def merge_dupes(dupes, docommit=False):
+ gone_eids = []
+ CWUser = schema['CWUser']
+ for extid, eids in dupes.items():
+ newest = eids.pop() # we merge everything on the newest
+ print('merging ghosts of', extid, 'into', newest)
+ # now we merge pairwise into the newest
+ for old in eids:
+ subst = {'old': old, 'new': newest}
+ print(' merging', old)
+ gone_eids.append(old)
+ for rschema in CWUser.subject_relations():
+ if rschema.final or rschema == 'identity':
+ continue
+ if CWUser.rdef(rschema, 'subject').composite == 'subject':
+ # old 'composite' property is wiped ...
+ # think about email addresses, excel preferences
+ for eschema in rschema.objects():
+ rql('DELETE %s X WHERE U %s X, U eid %%(old)s' % (eschema, rschema), subst)
+ else:
+ # relink the new user to its old relations
+ rql('SET NU %s X WHERE NU eid %%(new)s, NOT NU %s X, OU %s X, OU eid %%(old)s' %
+ (rschema, rschema, rschema), subst)
+ # delete the old relations
+ rql('DELETE U %s X WHERE U eid %%(old)s' % rschema, subst)
+ # same thing ...
+ for rschema in CWUser.object_relations():
+ if rschema.final or rschema == 'identity':
+ continue
+ rql('SET X %s NU WHERE NU eid %%(new)s, NOT X %s NU, X %s OU, OU eid %%(old)s' %
+ (rschema, rschema, rschema), subst)
+ rql('DELETE X %s U WHERE U eid %%(old)s' % rschema, subst)
+ if not docommit:
+ rollback()
+ return
+ commit() # XXX flushing operations is wanted rather than really committing
+ print('clean up entities table')
+ sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids)))
+ commit()
+
+def main():
+ dupes = find_dupes()
+ if not dupes:
+ print('No duplicate user')
+ return
+
+ print('Found %s duplicate user instances' % len(dupes))
+
+ while True:
+ print('Fix or dry-run? (f/d) ... or Ctrl-C to break out')
+ answer = raw_input('> ')
+ if answer.lower() not in 'fd':
+ continue
+ print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.')
+ raw_input('')
+ with hooks_control(session, session.HOOKS_DENY_ALL):
+ merge_dupes(dupes, docommit=answer=='f')
+
+main()
diff -r 1400aee10df4 -r faf279e33298 cubicweb/mttransforms.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/mttransforms.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,121 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""mime type transformation engine for cubicweb, based on mtconverter"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab import mtconverter
+
+from logilab.mtconverter.engine import TransformEngine
+from logilab.mtconverter.transform import Transform
+from logilab.mtconverter import (register_base_transforms,
+ register_pil_transforms,
+ register_pygments_transforms)
+
+from cubicweb.utils import UStringIO
+from cubicweb.uilib import rest_publish, markdown_publish, html_publish
+
+HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml')
+
+# CubicWeb specific transformations
+
+class rest_to_html(Transform):
+ inputs = ('text/rest', 'text/x-rst')
+ output = 'text/html'
+ def _convert(self, trdata):
+ return rest_publish(trdata.appobject, trdata.decode())
+
+class markdown_to_html(Transform):
+ inputs = ('text/markdown', 'text/x-markdown')
+ output = 'text/html'
+ def _convert(self, trdata):
+ return markdown_publish(trdata.appobject, trdata.decode())
+
+class html_to_html(Transform):
+ inputs = HTML_MIMETYPES
+ output = 'text/html'
+ def _convert(self, trdata):
+ return html_publish(trdata.appobject, trdata.data)
+
+
+# Instantiate and configure the transformation engine
+
+mtconverter.UNICODE_POLICY = 'replace'
+
+ENGINE = TransformEngine()
+ENGINE.add_transform(rest_to_html())
+ENGINE.add_transform(markdown_to_html())
+ENGINE.add_transform(html_to_html())
+
+try:
+ from cubicweb.ext.tal import CubicWebContext, compile_template
+except ImportError:
+ HAS_TAL = False
+ from cubicweb import schema
+ schema.NEED_PERM_FORMATS.remove('text/cubicweb-page-template')
+
+else:
+ HAS_TAL = True
+
+ class ept_to_html(Transform):
+ inputs = ('text/cubicweb-page-template',)
+ output = 'text/html'
+ output_encoding = 'utf-8'
+ def _convert(self, trdata):
+ context = CubicWebContext()
+ appobject = trdata.appobject
+ context.update({'self': appobject, 'rset': appobject.cw_rset,
+ 'req': appobject._cw,
+ '_' : appobject._cw._,
+ 'user': appobject._cw.user})
+ output = UStringIO()
+ template = compile_template(trdata.encode(self.output_encoding))
+ template.expand(context, output)
+ return output.getvalue()
+
+ ENGINE.add_transform(ept_to_html())
+
+if register_pil_transforms(ENGINE, verb=False):
+ HAS_PIL_TRANSFORMS = True
+else:
+ HAS_PIL_TRANSFORMS = False
+
+try:
+ from logilab.mtconverter.transforms import pygmentstransforms
+ for mt in ('text/plain',) + HTML_MIMETYPES:
+ try:
+ pygmentstransforms.mimetypes.remove(mt)
+ except ValueError:
+ continue
+ register_pygments_transforms(ENGINE, verb=False)
+
+ def patch_convert(cls):
+ def _convert(self, trdata, origconvert=cls._convert):
+ add_css = getattr(trdata.appobject._cw, 'add_css', None)
+ if add_css is not None:
+ # session has no add_css, only http request
+ add_css('pygments.css')
+ return origconvert(self, trdata)
+ cls._convert = _convert
+ patch_convert(pygmentstransforms.PygmentsHTMLTransform)
+
+ HAS_PYGMENTS_TRANSFORMS = True
+except ImportError:
+ HAS_PYGMENTS_TRANSFORMS = False
+
+register_base_transforms(ENGINE, verb=False)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/multipart.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/multipart.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,416 @@
+# -*- coding: utf-8 -*-
+'''
+Parser for multipart/form-data
+==============================
+
+This module provides a parser for the multipart/form-data format. It can read
+from a file, a socket or a WSGI environment. The parser can be used to replace
+cgi.FieldStorage (without the bugs) and works with Python 2.5+ and 3.x (2to3).
+
+Licence (MIT)
+-------------
+
+ Copyright (c) 2010, Marcel Hellkamp.
+ Inspired by the Werkzeug library: http://werkzeug.pocoo.org/
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+'''
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.1'
+__license__ = 'MIT'
+
+from tempfile import TemporaryFile
+from wsgiref.headers import Headers
+import re, sys
+try:
+ from io import BytesIO
+except ImportError: # pragma: no cover (fallback for Python 2.5)
+ from StringIO import StringIO as BytesIO
+
+from six import PY3, text_type
+from six.moves.urllib.parse import parse_qs
+
+##############################################################################
+################################ Helper & Misc ################################
+##############################################################################
+# Some of these were copied from bottle: http://bottle.paws.de/
+
+try:
+ from collections import MutableMapping as DictMixin
+except ImportError: # pragma: no cover (fallback for Python 2.5)
+ from UserDict import DictMixin
+
+class MultiDict(DictMixin):
+ """ A dict that remembers old values for each key """
+ def __init__(self, *a, **k):
+ self.dict = dict()
+ for k, v in dict(*a, **k).items():
+ self[k] = v
+
+ def __len__(self): return len(self.dict)
+ def __iter__(self): return iter(self.dict)
+ def __contains__(self, key): return key in self.dict
+ def __delitem__(self, key): del self.dict[key]
+ def keys(self): return self.dict.keys()
+ def __getitem__(self, key): return self.get(key, KeyError, -1)
+ def __setitem__(self, key, value): self.append(key, value)
+
+ def append(self, key, value): self.dict.setdefault(key, []).append(value)
+ def replace(self, key, value): self.dict[key] = [value]
+ def getall(self, key): return self.dict.get(key) or []
+
+ def get(self, key, default=None, index=-1):
+ if key not in self.dict and default != KeyError:
+ return [default][index]
+ return self.dict[key][index]
+
+ def iterallitems(self):
+ for key, values in self.dict.items():
+ for value in values:
+ yield key, value
+
+def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3)
+ return data.encode(enc) if isinstance(data, text_type) else data
+
+def copy_file(stream, target, maxread=-1, buffer_size=2*16):
+ ''' Read from :stream and write to :target until :maxread or EOF. '''
+ size, read = 0, stream.read
+ while 1:
+ to_read = buffer_size if maxread < 0 else min(buffer_size, maxread-size)
+ part = read(to_read)
+ if not part: return size
+ target.write(part)
+ size += len(part)
+
+##############################################################################
+################################ Header Parser ################################
+##############################################################################
+
+_special = re.escape('()<>@,;:\\"/[]?={} \t')
+_re_special = re.compile('[%s]' % _special)
+_qstr = '"(?:\\\\.|[^"])*"' # Quoted string
+_value = '(?:[^%s]+|%s)' % (_special, _qstr) # Save or quoted string
+_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value)
+_re_option = re.compile(_option) # key=value part of an Content-Type like header
+
+def header_quote(val):
+ if not _re_special.search(val):
+ return val
+ return '"' + val.replace('\\','\\\\').replace('"','\\"') + '"'
+
+def header_unquote(val, filename=False):
+ if val[0] == val[-1] == '"':
+ val = val[1:-1]
+ if val[1:3] == ':\\' or val[:2] == '\\\\':
+ val = val.split('\\')[-1] # fix ie6 bug: full path --> filename
+ return val.replace('\\\\','\\').replace('\\"','"')
+ return val
+
+def parse_options_header(header, options=None):
+ if ';' not in header:
+ return header.lower().strip(), {}
+ ctype, tail = header.split(';', 1)
+ options = options or {}
+ for match in _re_option.finditer(tail):
+ key = match.group(1).lower()
+ value = header_unquote(match.group(2), key=='filename')
+ options[key] = value
+ return ctype, options
+
+##############################################################################
+################################## Multipart ##################################
+##############################################################################
+
+
+class MultipartError(ValueError): pass
+
+
+class MultipartParser(object):
+
+ def __init__(self, stream, boundary, content_length=-1,
+ disk_limit=2**30, mem_limit=2**20, memfile_limit=2**18,
+ buffer_size=2**16, charset='latin1'):
+ ''' Parse a multipart/form-data byte stream. This object is an iterator
+ over the parts of the message.
+
+ :param stream: A file-like stream. Must implement ``.read(size)``.
+ :param boundary: The multipart boundary as a byte string.
+ :param content_length: The maximum number of bytes to read.
+ '''
+ self.stream, self.boundary = stream, boundary
+ self.content_length = content_length
+ self.disk_limit = disk_limit
+ self.memfile_limit = memfile_limit
+ self.mem_limit = min(mem_limit, self.disk_limit)
+ self.buffer_size = min(buffer_size, self.mem_limit)
+ self.charset = charset
+ if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n"
+ raise MultipartError('Boundary does not fit into buffer_size.')
+ self._done = []
+ self._part_iter = None
+
+ def __iter__(self):
+ ''' Iterate over the parts of the multipart message. '''
+ if not self._part_iter:
+ self._part_iter = self._iterparse()
+ for part in self._done:
+ yield part
+ for part in self._part_iter:
+ self._done.append(part)
+ yield part
+
+ def parts(self):
+ ''' Returns a list with all parts of the multipart message. '''
+ return list(iter(self))
+
+ def get(self, name, default=None):
+ ''' Return the first part with that name or a default value (None). '''
+ for part in self:
+ if name == part.name:
+ return part
+ return default
+
+ def get_all(self, name):
+ ''' Return a list of parts with that name. '''
+ return [p for p in self if p.name == name]
+
+ def _lineiter(self):
+ ''' Iterate over a binary file-like object line by line. Each line is
+ returned as a (line, line_ending) tuple. If the line does not fit
+ into self.buffer_size, line_ending is empty and the rest of the line
+ is returned with the next iteration.
+ '''
+ read = self.stream.read
+ maxread, maxbuf = self.content_length, self.buffer_size
+ _bcrnl = tob('\r\n')
+ _bcr = _bcrnl[:1]
+ _bnl = _bcrnl[1:]
+ _bempty = _bcrnl[:0] # b'rn'[:0] -> b''
+ buffer = _bempty # buffer for the last (partial) line
+ while 1:
+ data = read(maxbuf if maxread < 0 else min(maxbuf, maxread))
+ maxread -= len(data)
+ lines = (buffer+data).splitlines(True)
+ len_first_line = len(lines[0])
+ # be sure that the first line does not become too big
+ if len_first_line > self.buffer_size:
+ # at the same time don't split a '\r\n' accidentally
+ if (len_first_line == self.buffer_size+1 and
+ lines[0].endswith(_bcrnl)):
+ splitpos = self.buffer_size - 1
+ else:
+ splitpos = self.buffer_size
+ lines[:1] = [lines[0][:splitpos],
+ lines[0][splitpos:]]
+ if data:
+ buffer = lines[-1]
+ lines = lines[:-1]
+ for line in lines:
+ if line.endswith(_bcrnl): yield line[:-2], _bcrnl
+ elif line.endswith(_bnl): yield line[:-1], _bnl
+ elif line.endswith(_bcr): yield line[:-1], _bcr
+ else: yield line, _bempty
+ if not data:
+ break
+
+ def _iterparse(self):
+ lines, line = self._lineiter(), ''
+ separator = tob('--') + tob(self.boundary)
+ terminator = tob('--') + tob(self.boundary) + tob('--')
+ # Consume first boundary. Ignore leading blank lines
+ for line, nl in lines:
+ if line: break
+ if line != separator:
+ raise MultipartError("Stream does not start with boundary")
+ # For each part in stream...
+ mem_used, disk_used = 0, 0 # Track used resources to prevent DoS
+ is_tail = False # True if the last line was incomplete (cutted)
+ opts = {'buffer_size': self.buffer_size,
+ 'memfile_limit': self.memfile_limit,
+ 'charset': self.charset}
+ part = MultipartPart(**opts)
+ for line, nl in lines:
+ if line == terminator and not is_tail:
+ part.file.seek(0)
+ yield part
+ break
+ elif line == separator and not is_tail:
+ if part.is_buffered(): mem_used += part.size
+ else: disk_used += part.size
+ part.file.seek(0)
+ yield part
+ part = MultipartPart(**opts)
+ else:
+ is_tail = not nl # The next line continues this one
+ part.feed(line, nl)
+ if part.is_buffered():
+ if part.size + mem_used > self.mem_limit:
+ raise MultipartError("Memory limit reached.")
+ elif part.size + disk_used > self.disk_limit:
+ raise MultipartError("Disk limit reached.")
+ if line != terminator:
+ raise MultipartError("Unexpected end of multipart stream.")
+
+
+class MultipartPart(object):
+
+ def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'):
+ self.headerlist = []
+ self.headers = None
+ self.file = False
+ self.size = 0
+ self._buf = tob('')
+ self.disposition, self.name, self.filename = None, None, None
+ self.content_type, self.charset = None, charset
+ self.memfile_limit = memfile_limit
+ self.buffer_size = buffer_size
+
+ def feed(self, line, nl=''):
+ if self.file:
+ return self.write_body(line, nl)
+ return self.write_header(line, nl)
+
+ def write_header(self, line, nl):
+ line = line.decode(self.charset or 'latin1')
+ if not nl: raise MultipartError('Unexpected end of line in header.')
+ if not line.strip(): # blank line -> end of header segment
+ self.finish_header()
+ elif line[0] in ' \t' and self.headerlist:
+ name, value = self.headerlist.pop()
+ self.headerlist.append((name, value+line.strip()))
+ else:
+ if ':' not in line:
+ raise MultipartError("Syntax error in header: No colon.")
+ name, value = line.split(':', 1)
+ self.headerlist.append((name.strip(), value.strip()))
+
+ def write_body(self, line, nl):
+ if not line and not nl: return # This does not even flush the buffer
+ self.size += len(line) + len(self._buf)
+ self.file.write(self._buf + line)
+ self._buf = nl
+ if self.content_length > 0 and self.size > self.content_length:
+ raise MultipartError('Size of body exceeds Content-Length header.')
+ if self.size > self.memfile_limit and isinstance(self.file, BytesIO):
+ # TODO: What about non-file uploads that exceed the memfile_limit?
+ self.file, old = TemporaryFile(mode='w+b'), self.file
+ old.seek(0)
+ copy_file(old, self.file, self.size, self.buffer_size)
+
+ def finish_header(self):
+ self.file = BytesIO()
+ self.headers = Headers(self.headerlist)
+ cdis = self.headers.get('Content-Disposition','')
+ ctype = self.headers.get('Content-Type','')
+ clen = self.headers.get('Content-Length','-1')
+ if not cdis:
+ raise MultipartError('Content-Disposition header is missing.')
+ self.disposition, self.options = parse_options_header(cdis)
+ self.name = self.options.get('name')
+ self.filename = self.options.get('filename')
+ self.content_type, options = parse_options_header(ctype)
+ self.charset = options.get('charset') or self.charset
+ self.content_length = int(self.headers.get('Content-Length','-1'))
+
+ def is_buffered(self):
+ ''' Return true if the data is fully buffered in memory.'''
+ return isinstance(self.file, BytesIO)
+
+ @property
+ def value(self):
+ ''' Data decoded with the specified charset '''
+ pos = self.file.tell()
+ self.file.seek(0)
+ val = self.file.read()
+ self.file.seek(pos)
+ return val.decode(self.charset)
+
+ def save_as(self, path):
+ fp = open(path, 'wb')
+ pos = self.file.tell()
+ try:
+ self.file.seek(0)
+ size = copy_file(self.file, fp)
+ finally:
+ self.file.seek(pos)
+ return size
+
+##############################################################################
+#################################### WSGI ####################################
+##############################################################################
+
+def parse_form_data(environ, charset='utf8', strict=False, **kw):
+ ''' Parse form data from an environ dict and return a (forms, files) tuple.
+ Both tuple values are dictionaries with the form-field name as a key
+ (unicode) and lists as values (multiple values per key are possible).
+ The forms-dictionary contains form-field values as unicode strings.
+ The files-dictionary contains :class:`MultipartPart` instances, either
+ because the form-field was a file-upload or the value is to big to fit
+ into memory limits.
+
+ :param environ: An WSGI environment dict.
+ :param charset: The charset to use if unsure. (default: utf8)
+ :param strict: If True, raise :exc:`MultipartError` on any parsing
+ errors. These are silently ignored by default.
+ '''
+
+ forms, files = MultiDict(), MultiDict()
+ try:
+ if environ.get('REQUEST_METHOD','GET').upper() not in ('POST', 'PUT'):
+ raise MultipartError("Request method other than POST or PUT.")
+ content_length = int(environ.get('CONTENT_LENGTH', '-1'))
+ content_type = environ.get('CONTENT_TYPE', '')
+ if not content_type:
+ raise MultipartError("Missing Content-Type header.")
+ content_type, options = parse_options_header(content_type)
+ stream = environ.get('wsgi.input') or BytesIO()
+ kw['charset'] = charset = options.get('charset', charset)
+ if content_type == 'multipart/form-data':
+ boundary = options.get('boundary','')
+ if not boundary:
+ raise MultipartError("No boundary for multipart/form-data.")
+ for part in MultipartParser(stream, boundary, content_length, **kw):
+ if part.filename or not part.is_buffered():
+ files[part.name] = part
+ else: # TODO: Big form-fields are in the files dict. really?
+ forms[part.name] = part.value
+ elif content_type in ('application/x-www-form-urlencoded',
+ 'application/x-url-encoded'):
+ mem_limit = kw.get('mem_limit', 2**20)
+ if content_length > mem_limit:
+ raise MultipartError("Request too big. Increase MAXMEM.")
+ data = stream.read(mem_limit)
+ if stream.read(1): # These is more that does not fit mem_limit
+ raise MultipartError("Request too big. Increase MAXMEM.")
+ if PY3:
+ data = data.decode('ascii')
+ data = parse_qs(data, keep_blank_values=True)
+ for key, values in data.items():
+ for value in values:
+ if PY3:
+ forms[key] = value
+ else:
+ forms[key.decode(charset)] = value.decode(charset)
+ else:
+ raise MultipartError("Unsupported content type.")
+ except MultipartError:
+ if strict: raise
+ return forms, files
diff -r 1400aee10df4 -r faf279e33298 cubicweb/predicates.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/predicates.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,1422 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Predicate classes
+"""
+
+__docformat__ = "restructuredtext en"
+
+import logging
+from warnings import warn
+from operator import eq
+
+from six import string_types, integer_types
+from six.moves import range
+
+from logilab.common.deprecation import deprecated
+from logilab.common.registry import Predicate, objectify_predicate, yes
+
+from yams.schema import BASE_TYPES, role_name
+from rql.nodes import Function
+
+from cubicweb import (Unauthorized, NoSelectableObject, NotAnEntity,
+ CW_EVENT_MANAGER, role)
+from cubicweb.uilib import eid_param
+from cubicweb.schema import split_expression
+
+yes = deprecated('[3.15] import yes() from use logilab.common.registry')(yes)
+
+
+# abstract predicates / mixin helpers ###########################################
+
+class PartialPredicateMixIn(object):
+ """convenience mix-in for predicates that will look into the containing
+ class to find missing information.
+
+ cf. `cubicweb.web.action.LinkToEntityAction` for instance
+ """
+ def __call__(self, cls, *args, **kwargs):
+ self.complete(cls)
+ return super(PartialPredicateMixIn, self).__call__(cls, *args, **kwargs)
+
+
+class EClassPredicate(Predicate):
+ """abstract class for predicates working on *entity class(es)* specified
+ explicitly or found of the result set.
+
+ Here are entity lookup / scoring rules:
+
+ * if `entity` is specified, return score for this entity's class
+
+ * elif `rset`, `select` and `filtered_variable` are specified, return score
+ for the possible classes for variable in the given rql :class:`Select`
+ node
+
+ * elif `rset` and `row` are specified, return score for the class of the
+ entity found in the specified cell, using column specified by `col` or 0
+
+ * elif `rset` is specified return score for each entity class found in the
+ column specified specified by the `col` argument or in column 0 if not
+ specified
+
+ When there are several classes to be evaluated, return the sum of scores for
+ each entity class unless:
+
+ - `mode` == 'all' (the default) and some entity class is scored
+ to 0, in which case 0 is returned
+
+ - `mode` == 'any', in which case the first non-zero score is
+ returned
+
+ - `accept_none` is False and some cell in the column has a None value
+ (this may occurs with outer join)
+ """
+ def __init__(self, once_is_enough=None, accept_none=True, mode='all'):
+ if once_is_enough is not None:
+ warn("[3.14] once_is_enough is deprecated, use mode='any'",
+ DeprecationWarning, stacklevel=2)
+ if once_is_enough:
+ mode = 'any'
+ assert mode in ('any', 'all'), 'bad mode %s' % mode
+ self.once_is_enough = mode == 'any'
+ self.accept_none = accept_none
+
+ def __call__(self, cls, req, rset=None, row=None, col=0, entity=None,
+ select=None, filtered_variable=None,
+ accept_none=None,
+ **kwargs):
+ if entity is not None:
+ return self.score_class(entity.__class__, req)
+ if not rset:
+ return 0
+ if select is not None and filtered_variable is not None:
+ etypes = set(sol[filtered_variable.name] for sol in select.solutions)
+ elif row is None:
+ if accept_none is None:
+ accept_none = self.accept_none
+ if not accept_none and \
+ any(row[col] is None for row in rset):
+ return 0
+ etypes = rset.column_types(col)
+ else:
+ etype = rset.description[row][col]
+ # may have None in rset.description on outer join
+ if etype is None or rset.rows[row][col] is None:
+ return 0
+ etypes = (etype,)
+ score = 0
+ for etype in etypes:
+ escore = self.score(cls, req, etype)
+ if not escore and not self.once_is_enough:
+ return 0
+ elif self.once_is_enough:
+ return escore
+ score += escore
+ return score
+
+ def score(self, cls, req, etype):
+ if etype in BASE_TYPES:
+ return 0
+ return self.score_class(req.vreg['etypes'].etype_class(etype), req)
+
+ def score_class(self, eclass, req):
+ raise NotImplementedError()
+
+
+class EntityPredicate(EClassPredicate):
+ """abstract class for predicates working on *entity instance(s)* specified
+ explicitly or found of the result set.
+
+ Here are entity lookup / scoring rules:
+
+ * if `entity` is specified, return score for this entity
+
+ * elif `row` is specified, return score for the entity found in the
+ specified cell, using column specified by `col` or 0
+
+ * else return the sum of scores for each entity found in the column
+ specified specified by the `col` argument or in column 0 if not specified,
+ unless:
+
+ - `mode` == 'all' (the default) and some entity class is scored
+ to 0, in which case 0 is returned
+
+ - `mode` == 'any', in which case the first non-zero score is
+ returned
+
+ - `accept_none` is False and some cell in the column has a None value
+ (this may occurs with outer join)
+
+ .. Note::
+ using :class:`EntityPredicate` or :class:`EClassPredicate` as base predicate
+ class impacts performance, since when no entity or row is specified the
+ later works on every different *entity class* found in the result set,
+ while the former works on each *entity* (eg each row of the result set),
+ which may be much more costly.
+ """
+
+ def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None,
+ entity=None, **kwargs):
+ if not rset and entity is None:
+ return 0
+ score = 0
+ if entity is not None:
+ score = self.score_entity(entity)
+ elif row is None:
+ col = col or 0
+ if accept_none is None:
+ accept_none = self.accept_none
+ for row, rowvalue in enumerate(rset.rows):
+ if rowvalue[col] is None: # outer join
+ if not accept_none:
+ return 0
+ continue
+ escore = self.score(req, rset, row, col)
+ if not escore and not self.once_is_enough:
+ return 0
+ elif self.once_is_enough:
+ return escore
+ score += escore
+ else:
+ col = col or 0
+ etype = rset.description[row][col]
+ if etype is not None: # outer join
+ score = self.score(req, rset, row, col)
+ return score
+
+ def score(self, req, rset, row, col):
+ try:
+ return self.score_entity(rset.get_entity(row, col))
+ except NotAnEntity:
+ return 0
+
+ def score_entity(self, entity):
+ raise NotImplementedError()
+
+
+class ExpectedValuePredicate(Predicate):
+ """Take a list of expected values as initializer argument and store them
+ into the :attr:`expected` set attribute. You may also give a set as single
+ argument, which will then be referenced as set of expected values,
+ allowing modifications to the given set to be considered.
+
+ You should implement one of :meth:`_values_set(cls, req, **kwargs)` or
+ :meth:`_get_value(cls, req, **kwargs)` method which should respectively
+ return the set of values or the unique possible value for the given context.
+
+ You may also specify a `mode` behaviour as argument, as explained below.
+
+ Returned score is:
+
+ - 0 if `mode` == 'all' (the default) and at least one expected
+ values isn't found
+
+ - 0 if `mode` == 'any' and no expected values isn't found at all
+
+ - else the number of matching values
+
+ Notice `mode` = 'any' with a single expected value has no effect at all.
+ """
+ def __init__(self, *expected, **kwargs):
+ assert expected, self
+ if len(expected) == 1 and isinstance(expected[0], (set, dict)):
+ self.expected = expected[0]
+ else:
+ self.expected = frozenset(expected)
+ mode = kwargs.pop('mode', 'all')
+ assert mode in ('any', 'all'), 'bad mode %s' % mode
+ self.once_is_enough = mode == 'any'
+ assert not kwargs, 'unexpected arguments %s' % kwargs
+
+ def __str__(self):
+ return '%s(%s)' % (self.__class__.__name__,
+ ','.join(sorted(str(s) for s in self.expected)))
+
+ def __call__(self, cls, req, **kwargs):
+ values = self._values_set(cls, req, **kwargs)
+ if isinstance(values, dict):
+ if isinstance(self.expected, dict):
+ matching = 0
+ for key, expected_value in self.expected.items():
+ if key in values:
+ if (isinstance(expected_value, (list, tuple, frozenset, set))
+ and values[key] in expected_value):
+ matching += 1
+ elif values[key] == expected_value:
+ matching += 1
+ if isinstance(self.expected, (set, frozenset)):
+ values = frozenset(values)
+ matching = len(values & self.expected)
+ else:
+ matching = len(values & self.expected)
+ if self.once_is_enough:
+ return matching
+ if matching == len(self.expected):
+ return matching
+ return 0
+
+ def _values_set(self, cls, req, **kwargs):
+ return frozenset( (self._get_value(cls, req, **kwargs),) )
+
+ def _get_value(self, cls, req, **kwargs):
+ raise NotImplementedError()
+
+
+# bare predicates ##############################################################
+
+class match_kwargs(ExpectedValuePredicate):
+ """Return non-zero score if parameter names specified as initializer
+ arguments are specified in the input context.
+
+
+ Return a score corresponding to the number of expected parameters.
+
+ When multiple parameters are expected, all of them should be found in
+ the input context unless `mode` keyword argument is given to 'any',
+ in which case a single matching parameter is enough.
+ """
+
+ def _values_set(self, cls, req, **kwargs):
+ return kwargs
+
+
+class appobject_selectable(Predicate):
+ """Return 1 if another appobject is selectable using the same input context.
+
+ Initializer arguments:
+
+ * `registry`, a registry name
+
+ * `regids`, object identifiers in this registry, one of them should be
+ selectable.
+ """
+ selectable_score = 1
+ def __init__(self, registry, *regids):
+ self.registry = registry
+ self.regids = regids
+
+ def __call__(self, cls, req, **kwargs):
+ for regid in self.regids:
+ if req.vreg[self.registry].select_or_none(regid, req, **kwargs) is not None:
+ return self.selectable_score
+ return 0
+
+
+class adaptable(appobject_selectable):
+ """Return 1 if another appobject is selectable using the same input context.
+
+ Initializer arguments:
+
+ * `regids`, adapter identifiers (e.g. interface names) to which the context
+ (usually entities) should be adaptable. One of them should be selectable
+ when multiple identifiers are given.
+ """
+ def __init__(self, *regids):
+ super(adaptable, self).__init__('adapters', *regids)
+
+ def __call__(self, cls, req, **kwargs):
+ kwargs.setdefault('accept_none', False)
+ score = super(adaptable, self).__call__(cls, req, **kwargs)
+ if score == 0 and kwargs.get('rset') and len(kwargs['rset']) > 1 and not 'row' in kwargs:
+ # on rset containing several entity types, each row may be
+ # individually adaptable, while the whole rset won't be if the
+ # same adapter can't be used for each type
+ for row in range(len(kwargs['rset'])):
+ kwargs.setdefault('col', 0)
+ _score = super(adaptable, self).__call__(cls, req, row=row, **kwargs)
+ if not _score:
+ return 0
+ # adjust score per row as expected by default adjust_score
+ # implementation
+ score += self.adjust_score(_score)
+ else:
+ score = self.adjust_score(score)
+ return score
+
+ @staticmethod
+ def adjust_score(score):
+ # being adaptable to an interface should takes precedence other
+ # is_instance('Any'), but not other explicit
+ # is_instance('SomeEntityType'), and, for **a single entity**:
+ # * is_instance('Any') score is 1
+ # * is_instance('SomeEntityType') score is at least 2
+ if score >= 2:
+ return score - 0.5
+ if score == 1:
+ return score + 0.5
+ return score
+
+
+class configuration_values(Predicate):
+ """Return 1 if the instance has an option set to a given value(s) in its
+ configuration file.
+ """
+ # XXX this predicate could be evaluated on startup
+ def __init__(self, key, values):
+ self._key = key
+ if not isinstance(values, (tuple, list)):
+ values = (values,)
+ self._values = frozenset(values)
+
+ def __call__(self, cls, req, **kwargs):
+ try:
+ return self._score
+ except AttributeError:
+ if req is None:
+ config = kwargs['repo'].config
+ else:
+ config = req.vreg.config
+ self._score = config[self._key] in self._values
+ return self._score
+
+
+# rset predicates ##############################################################
+
+@objectify_predicate
+def none_rset(cls, req, rset=None, **kwargs):
+ """Return 1 if the result set is None (eg usually not specified)."""
+ if rset is None:
+ return 1
+ return 0
+
+
+# XXX == ~ none_rset
+@objectify_predicate
+def any_rset(cls, req, rset=None, **kwargs):
+ """Return 1 for any result set, whatever the number of rows in it, even 0."""
+ if rset is not None:
+ return 1
+ return 0
+
+
+@objectify_predicate
+def nonempty_rset(cls, req, rset=None, **kwargs):
+ """Return 1 for result set containing one ore more rows."""
+ if rset:
+ return 1
+ return 0
+
+
+# XXX == ~ nonempty_rset
+@objectify_predicate
+def empty_rset(cls, req, rset=None, **kwargs):
+ """Return 1 for result set which doesn't contain any row."""
+ if rset is not None and len(rset) == 0:
+ return 1
+ return 0
+
+
+# XXX == multi_lines_rset(1)
+@objectify_predicate
+def one_line_rset(cls, req, rset=None, row=None, **kwargs):
+ """Return 1 if the result set is of size 1, or greater but a specific row in
+ the result set is specified ('row' argument).
+ """
+ if rset is None and 'entity' in kwargs:
+ return 1
+ if rset is not None and (row is not None or len(rset) == 1):
+ return 1
+ return 0
+
+
+class multi_lines_rset(Predicate):
+ """Return 1 if the operator expression matches between `num` elements
+ in the result set and the `expected` value if defined.
+
+ By default, multi_lines_rset(expected) matches equality expression:
+ `nb` row(s) in result set equals to expected value
+ But, you can perform richer comparisons by overriding default operator:
+ multi_lines_rset(expected, operator.gt)
+
+ If `expected` is None, return 1 if the result set contains *at least*
+ two rows.
+ If rset is None, return 0.
+ """
+ def __init__(self, expected=None, operator=eq):
+ self.expected = expected
+ self.operator = operator
+
+ def match_expected(self, num):
+ if self.expected is None:
+ return num > 1
+ return self.operator(num, self.expected)
+
+ def __call__(self, cls, req, rset=None, **kwargs):
+ return int(rset is not None and self.match_expected(len(rset)))
+
+
+class multi_columns_rset(multi_lines_rset):
+ """If `nb` is specified, return 1 if the result set has exactly `nb` column
+ per row. Else (`nb` is None), return 1 if the result set contains *at least*
+ two columns per row. Return 0 for empty result set.
+ """
+
+ def __call__(self, cls, req, rset=None, **kwargs):
+ # 'or 0' since we *must not* return None. Also don't use rset.rows so
+ # this selector will work if rset is a simple list of list.
+ return rset and self.match_expected(len(rset[0])) or 0
+
+
+class paginated_rset(Predicate):
+ """Return 1 or more for result set with more rows than one or more page
+ size. You can specify expected number of pages to the initializer (default
+ to one), and you'll get that number of pages as score if the result set is
+ big enough.
+
+ Page size is searched in (respecting order):
+ * a `page_size` argument
+ * a `page_size` form parameters
+ * the `navigation.page-size` property (see :ref:`PersistentProperties`)
+ """
+ def __init__(self, nbpages=1):
+ assert nbpages > 0
+ self.nbpages = nbpages
+
+ def __call__(self, cls, req, rset=None, **kwargs):
+ if rset is None:
+ return 0
+ page_size = kwargs.get('page_size')
+ if page_size is None:
+ page_size = req.form.get('page_size')
+ if page_size is not None:
+ try:
+ page_size = int(page_size)
+ except ValueError:
+ page_size = None
+ if page_size is None:
+ page_size_prop = getattr(cls, 'page_size_property', 'navigation.page-size')
+ page_size = req.property_value(page_size_prop)
+ if len(rset) <= (page_size*self.nbpages):
+ return 0
+ return self.nbpages
+
+
+@objectify_predicate
+def sorted_rset(cls, req, rset=None, **kwargs):
+ """Return 1 for sorted result set (e.g. from an RQL query containing an
+ ORDERBY clause), with exception that it will return 0 if the rset is
+ 'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index).
+ """
+ if rset is None:
+ return 0
+ selects = rset.syntax_tree().children
+ if (len(selects) > 1 or
+ not selects[0].orderby or
+ (isinstance(selects[0].orderby[0].term, Function) and
+ selects[0].orderby[0].term.name == 'FTIRANK')
+ ):
+ return 0
+ return 2
+
+
+# XXX == multi_etypes_rset(1)
+@objectify_predicate
+def one_etype_rset(cls, req, rset=None, col=0, **kwargs):
+ """Return 1 if the result set contains entities which are all of the same
+ type in the column specified by the `col` argument of the input context, or
+ in column 0.
+ """
+ if rset is None:
+ return 0
+ if len(rset.column_types(col)) != 1:
+ return 0
+ return 1
+
+
+class multi_etypes_rset(multi_lines_rset):
+ """If `nb` is specified, return 1 if the result set contains `nb` different
+ types of entities in the column specified by the `col` argument of the input
+ context, or in column 0. If `nb` is None, return 1 if the result set contains
+ *at least* two different types of entities.
+ """
+
+ def __call__(self, cls, req, rset=None, col=0, **kwargs):
+ # 'or 0' since we *must not* return None
+ return rset and self.match_expected(len(rset.column_types(col))) or 0
+
+
+@objectify_predicate
+def logged_user_in_rset(cls, req, rset=None, row=None, col=0, **kwargs):
+ """Return positive score if the result set at the specified row / col
+ contains the eid of the logged user.
+ """
+ if rset is None:
+ return 0
+ return req.user.eid == rset[row or 0][col]
+
+
+# entity predicates #############################################################
+
+class composite_etype(Predicate):
+ """Return 1 for composite entities.
+
+ A composite entity has an etype for which at least one relation
+ definition points in its direction with the
+ composite='subject'/'object' notation.
+ """
+
+ def __call__(self, cls, req, **kwargs):
+ entity = kwargs.pop('entity', None)
+ if entity is None:
+ return 0
+ return entity.e_schema.is_composite
+
+
+
+class non_final_entity(EClassPredicate):
+ """Return 1 for entity of a non final entity type(s). Remember, "final"
+ entity types are String, Int, etc... This is equivalent to
+ `is_instance('Any')` but more optimized.
+
+ See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity
+ class lookup / score rules according to the input context.
+ """
+ def score(self, cls, req, etype):
+ if etype in BASE_TYPES:
+ return 0
+ return 1
+
+ def score_class(self, eclass, req):
+ return 1 # necessarily true if we're there
+
+
+
+def _reset_is_instance_cache(vreg):
+ vreg._is_instance_predicate_cache = {}
+
+CW_EVENT_MANAGER.bind('before-registry-reset', _reset_is_instance_cache)
+
+class is_instance(EClassPredicate):
+ """Return non-zero score for entity that is an instance of the one of given
+ type(s). If multiple arguments are given, matching one of them is enough.
+
+ Entity types should be given as string, the corresponding class will be
+ fetched from the registry at selection time.
+
+ See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity
+ class lookup / score rules according to the input context.
+
+ .. note:: the score will reflect class proximity so the most specific object
+ will be selected.
+ """
+
+ def __init__(self, *expected_etypes, **kwargs):
+ super(is_instance, self).__init__(**kwargs)
+ self.expected_etypes = expected_etypes
+ for etype in self.expected_etypes:
+ assert isinstance(etype, string_types), etype
+
+ def __str__(self):
+ return '%s(%s)' % (self.__class__.__name__,
+ ','.join(str(s) for s in self.expected_etypes))
+
+ def score_class(self, eclass, req):
+ # cache on vreg to avoid reloading issues
+ try:
+ cache = req.vreg._is_instance_predicate_cache
+ except AttributeError:
+ # XXX 'before-registry-reset' not called for db-api connections
+ cache = req.vreg._is_instance_predicate_cache = {}
+ try:
+ expected_eclasses = cache[self]
+ except KeyError:
+ # turn list of entity types as string into a list of
+ # (entity class, parent classes)
+ etypesreg = req.vreg['etypes']
+ expected_eclasses = cache[self] = []
+ for etype in self.expected_etypes:
+ try:
+ expected_eclasses.append(etypesreg.etype_class(etype))
+ except KeyError:
+ continue # entity type not in the schema
+ parents, any = req.vreg['etypes'].parent_classes(eclass.__regid__)
+ score = 0
+ for expectedcls in expected_eclasses:
+ # adjust score according to class proximity
+ if expectedcls is eclass:
+ score += len(parents) + 4
+ elif expectedcls is any: # Any
+ score += 1
+ else:
+ for index, basecls in enumerate(reversed(parents)):
+ if expectedcls is basecls:
+ score += index + 3
+ break
+ return score
+
+
+class score_entity(EntityPredicate):
+ """Return score according to an arbitrary function given as argument which
+ will be called with input content entity as argument.
+
+ This is a very useful predicate that will usually interest you since it
+ allows a lot of things without having to write a specific predicate.
+
+ The function can return arbitrary value which will be casted to an integer
+ value at the end.
+
+ See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity
+ lookup / score rules according to the input context.
+ """
+ def __init__(self, scorefunc, once_is_enough=None, mode='all'):
+ super(score_entity, self).__init__(mode=mode, once_is_enough=once_is_enough)
+ def intscore(*args, **kwargs):
+ score = scorefunc(*args, **kwargs)
+ if not score:
+ return 0
+ if isinstance(score, integer_types):
+ return score
+ return 1
+ self.score_entity = intscore
+
+
+class has_mimetype(EntityPredicate):
+ """Return 1 if the entity adapt to IDownloadable and has the given MIME type.
+
+ You can give 'image/' to match any image for instance, or 'image/png' to match
+ only PNG images.
+ """
+ def __init__(self, mimetype, once_is_enough=None, mode='all'):
+ super(has_mimetype, self).__init__(mode=mode, once_is_enough=once_is_enough)
+ self.mimetype = mimetype
+
+ def score_entity(self, entity):
+ idownloadable = entity.cw_adapt_to('IDownloadable')
+ if idownloadable is None:
+ return 0
+ mt = idownloadable.download_content_type()
+ if not (mt and mt.startswith(self.mimetype)):
+ return 0
+ return 1
+
+
+class relation_possible(EntityPredicate):
+ """Return 1 for entity that supports the relation, provided that the
+ request's user may do some `action` on it (see below).
+
+ The relation is specified by the following initializer arguments:
+
+ * `rtype`, the name of the relation
+
+ * `role`, the role of the entity in the relation, either 'subject' or
+ 'object', default to 'subject'
+
+ * `target_etype`, optional name of an entity type that should be supported
+ at the other end of the relation
+
+ * `action`, a relation schema action (e.g. one of 'read', 'add', 'delete',
+ default to 'read') which must be granted to the user, else a 0 score will
+ be returned. Give None if you don't want any permission checking.
+
+ * `strict`, boolean (default to False) telling what to do when the user has
+ not globally the permission for the action (eg the action is not granted
+ to one of the user's groups)
+
+ - when strict is False, if there are some local role defined for this
+ action (e.g. using rql expressions), then the permission will be
+ considered as granted
+
+ - when strict is True, then the permission will be actually checked for
+ each entity
+
+ Setting `strict` to True impacts performance for large result set since
+ you'll then get the :class:`~cubicweb.predicates.EntityPredicate` behaviour
+ while otherwise you get the :class:`~cubicweb.predicates.EClassPredicate`'s
+ one. See those classes documentation for entity lookup / score rules
+ according to the input context.
+ """
+
+ def __init__(self, rtype, role='subject', target_etype=None,
+ action='read', strict=False, **kwargs):
+ super(relation_possible, self).__init__(**kwargs)
+ self.rtype = rtype
+ self.role = role
+ self.target_etype = target_etype
+ self.action = action
+ self.strict = strict
+
+ # hack hack hack
+ def __call__(self, cls, req, **kwargs):
+ # hack hack hack
+ if self.strict:
+ return EntityPredicate.__call__(self, cls, req, **kwargs)
+ return EClassPredicate.__call__(self, cls, req, **kwargs)
+
+ def score(self, *args):
+ if self.strict:
+ return EntityPredicate.score(self, *args)
+ return EClassPredicate.score(self, *args)
+
+ def _get_rschema(self, eclass):
+ eschema = eclass.e_schema
+ try:
+ if self.role == 'object':
+ return eschema.objrels[self.rtype]
+ else:
+ return eschema.subjrels[self.rtype]
+ except KeyError:
+ return None
+
+ def score_class(self, eclass, req):
+ rschema = self._get_rschema(eclass)
+ if rschema is None:
+ return 0 # relation not supported
+ eschema = eclass.e_schema
+ if self.target_etype is not None:
+ try:
+ rdef = rschema.role_rdef(eschema, self.target_etype, self.role)
+ except KeyError:
+ return 0
+ if self.action and not rdef.may_have_permission(self.action, req):
+ return 0
+ teschema = req.vreg.schema.eschema(self.target_etype)
+ if not teschema.may_have_permission('read', req):
+ return 0
+ elif self.action:
+ return rschema.may_have_permission(self.action, req, eschema, self.role)
+ return 1
+
+ def score_entity(self, entity):
+ rschema = self._get_rschema(entity)
+ if rschema is None:
+ return 0 # relation not supported
+ if self.action:
+ if self.target_etype is not None:
+ try:
+ rschema = rschema.role_rdef(entity.e_schema,
+ self.target_etype, self.role)
+ except KeyError:
+ return 0
+ if self.role == 'subject':
+ if not rschema.has_perm(entity._cw, self.action, fromeid=entity.eid):
+ return 0
+ elif not rschema.has_perm(entity._cw, self.action, toeid=entity.eid):
+ return 0
+ if self.target_etype is not None:
+ req = entity._cw
+ teschema = req.vreg.schema.eschema(self.target_etype)
+ if not teschema.may_have_permission('read', req):
+ return 0
+ return 1
+
+
+class partial_relation_possible(PartialPredicateMixIn, relation_possible):
+ """Same as :class:~`cubicweb.predicates.relation_possible`, but will look for
+ attributes of the selected class to get information which is otherwise
+ expected by the initializer, except for `action` and `strict` which are kept
+ as initializer arguments.
+
+ This is useful to predefine predicate of an abstract class designed to be
+ customized.
+ """
+ def __init__(self, action='read', **kwargs):
+ super(partial_relation_possible, self).__init__(None, None, None,
+ action, **kwargs)
+
+ def complete(self, cls):
+ self.rtype = cls.rtype
+ self.role = role(cls)
+ self.target_etype = getattr(cls, 'target_etype', None)
+
+
+class has_related_entities(EntityPredicate):
+ """Return 1 if entity support the specified relation and has some linked
+ entities by this relation , optionally filtered according to the specified
+ target type.
+
+ The relation is specified by the following initializer arguments:
+
+ * `rtype`, the name of the relation
+
+ * `role`, the role of the entity in the relation, either 'subject' or
+ 'object', default to 'subject'.
+
+ * `target_etype`, optional name of an entity type that should be found
+ at the other end of the relation
+
+ See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity
+ lookup / score rules according to the input context.
+ """
+ def __init__(self, rtype, role='subject', target_etype=None, **kwargs):
+ super(has_related_entities, self).__init__(**kwargs)
+ self.rtype = rtype
+ self.role = role
+ self.target_etype = target_etype
+
+ def score_entity(self, entity):
+ relpossel = relation_possible(self.rtype, self.role, self.target_etype)
+ if not relpossel.score_class(entity.__class__, entity._cw):
+ return 0
+ rset = entity.related(self.rtype, self.role)
+ if self.target_etype:
+ return any(r for r in rset.description if r[0] == self.target_etype)
+ return rset and 1 or 0
+
+
+class partial_has_related_entities(PartialPredicateMixIn, has_related_entities):
+ """Same as :class:~`cubicweb.predicates.has_related_entity`, but will look
+ for attributes of the selected class to get information which is otherwise
+ expected by the initializer.
+
+ This is useful to predefine predicate of an abstract class designed to be
+ customized.
+ """
+ def __init__(self, **kwargs):
+ super(partial_has_related_entities, self).__init__(None, None, None,
+ **kwargs)
+
+ def complete(self, cls):
+ self.rtype = cls.rtype
+ self.role = role(cls)
+ self.target_etype = getattr(cls, 'target_etype', None)
+
+
+class has_permission(EntityPredicate):
+ """Return non-zero score if request's user has the permission to do the
+ requested action on the entity. `action` is an entity schema action (eg one
+ of 'read', 'add', 'delete', 'update').
+
+ Here are entity lookup / scoring rules:
+
+ * if `entity` is specified, check permission is granted for this entity
+
+ * elif `row` is specified, check permission is granted for the entity found
+ in the specified cell
+
+ * else check permission is granted for each entity found in the column
+ specified specified by the `col` argument or in column 0
+ """
+ def __init__(self, action):
+ self.action = action
+
+ # don't use EntityPredicate.__call__ but this optimized implementation to
+ # avoid considering each entity when it's not necessary
+ def __call__(self, cls, req, rset=None, row=None, col=0, entity=None, **kwargs):
+ if entity is not None:
+ return self.score_entity(entity)
+ if rset is None:
+ return 0
+ if row is None:
+ score = 0
+ need_local_check = []
+ geteschema = req.vreg.schema.eschema
+ user = req.user
+ action = self.action
+ for etype in rset.column_types(0):
+ if etype in BASE_TYPES:
+ return 0
+ eschema = geteschema(etype)
+ if not user.matching_groups(eschema.get_groups(action)):
+ if eschema.has_local_role(action):
+ # have to ckeck local roles
+ need_local_check.append(eschema)
+ continue
+ else:
+ # even a local role won't be enough
+ return 0
+ score += 1
+ if need_local_check:
+ # check local role for entities of necessary types
+ for i, row in enumerate(rset):
+ if not rset.description[i][col] in need_local_check:
+ continue
+ # micro-optimisation instead of calling self.score(req,
+ # rset, i, col): rset may be large
+ if not rset.get_entity(i, col).cw_has_perm(action):
+ return 0
+ score += 1
+ return score
+ return self.score(req, rset, row, col)
+
+ def score_entity(self, entity):
+ if entity.cw_has_perm(self.action):
+ return 1
+ return 0
+
+
+class has_add_permission(EClassPredicate):
+ """Return 1 if request's user has the add permission on entity type
+ specified in the `etype` initializer argument, or according to entity found
+ in the input content if not specified.
+
+ It also check that then entity type is not a strict subobject (e.g. may only
+ be used as a composed of another entity).
+
+ See :class:`~cubicweb.predicates.EClassPredicate` documentation for entity
+ class lookup / score rules according to the input context when `etype` is
+ not specified.
+ """
+ def __init__(self, etype=None, **kwargs):
+ super(has_add_permission, self).__init__(**kwargs)
+ self.etype = etype
+
+ def __call__(self, cls, req, **kwargs):
+ if self.etype is None:
+ return super(has_add_permission, self).__call__(cls, req, **kwargs)
+ return self.score(cls, req, self.etype)
+
+ def score_class(self, eclass, req):
+ eschema = eclass.e_schema
+ if eschema.final or eschema.is_subobject(strict=True) \
+ or not eschema.has_perm(req, 'add'):
+ return 0
+ return 1
+
+
+class rql_condition(EntityPredicate):
+ """Return non-zero score if arbitrary rql specified in `expression`
+ initializer argument return some results for entity found in the input
+ context. Returned score is the number of items returned by the rql
+ condition.
+
+ `expression` is expected to be a string containing an rql expression, which
+ must use 'X' variable to represent the context entity and may use 'U' to
+ represent the request's user.
+
+ .. warning::
+ If simply testing value of some attribute/relation of context entity (X),
+ you should rather use the :class:`score_entity` predicate which will
+ benefit from the ORM's request entities cache.
+
+ See :class:`~cubicweb.predicates.EntityPredicate` documentation for entity
+ lookup / score rules according to the input context.
+ """
+ def __init__(self, expression, once_is_enough=None, mode='all', user_condition=False):
+ super(rql_condition, self).__init__(mode=mode, once_is_enough=once_is_enough)
+ self.user_condition = user_condition
+ if user_condition:
+ rql = 'Any COUNT(U) WHERE U eid %%(u)s, %s' % expression
+ elif 'U' in frozenset(split_expression(expression)):
+ rql = 'Any COUNT(X) WHERE X eid %%(x)s, U eid %%(u)s, %s' % expression
+ else:
+ rql = 'Any COUNT(X) WHERE X eid %%(x)s, %s' % expression
+ self.rql = rql
+
+ def __str__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.rql)
+
+ def __call__(self, cls, req, **kwargs):
+ if self.user_condition:
+ try:
+ return req.execute(self.rql, {'u': req.user.eid})[0][0]
+ except Unauthorized:
+ return 0
+ else:
+ return super(rql_condition, self).__call__(cls, req, **kwargs)
+
+ def _score(self, req, eid):
+ try:
+ return req.execute(self.rql, {'x': eid, 'u': req.user.eid})[0][0]
+ except Unauthorized:
+ return 0
+
+ def score(self, req, rset, row, col):
+ return self._score(req, rset[row][col])
+
+ def score_entity(self, entity):
+ return self._score(entity._cw, entity.eid)
+
+
+# workflow predicates ###########################################################
+
+class is_in_state(score_entity):
+ """Return 1 if entity is in one of the states given as argument list
+
+ You should use this instead of your own :class:`score_entity` predicate to
+ avoid some gotchas:
+
+ * possible views gives a fake entity with no state
+ * you must use the latest tr info thru the workflow adapter for repository
+ side checking of the current state
+
+ In debug mode, this predicate can raise :exc:`ValueError` for unknown states names
+ (only checked on entities without a custom workflow)
+
+ :rtype: int
+ """
+ def __init__(self, *expected):
+ assert expected, self
+ self.expected = frozenset(expected)
+ def score(entity, expected=self.expected):
+ adapted = entity.cw_adapt_to('IWorkflowable')
+ # in debug mode only (time consuming)
+ if entity._cw.vreg.config.debugmode:
+ # validation can only be done for generic etype workflow because
+ # expected transition list could have been changed for a custom
+ # workflow (for the current entity)
+ if not entity.custom_workflow:
+ self._validate(adapted)
+ return self._score(adapted)
+ super(is_in_state, self).__init__(score)
+
+ def _score(self, adapted):
+ trinfo = adapted.latest_trinfo()
+ if trinfo is None: # entity is probably in it's initial state
+ statename = adapted.state
+ else:
+ statename = trinfo.new_state.name
+ return statename in self.expected
+
+ def _validate(self, adapted):
+ wf = adapted.current_workflow
+ valid = [n.name for n in wf.reverse_state_of]
+ unknown = sorted(self.expected.difference(valid))
+ if unknown:
+ raise ValueError("%s: unknown state(s): %s"
+ % (wf.name, ",".join(unknown)))
+
+ def __str__(self):
+ return '%s(%s)' % (self.__class__.__name__,
+ ','.join(str(s) for s in self.expected))
+
+
+def on_fire_transition(etype, tr_names, from_state_name=None):
+ """Return 1 when entity of the type `etype` is going through transition of
+ a name included in `tr_names`.
+
+ You should use this predicate on 'after_add_entity' hook, since it's actually
+ looking for addition of `TrInfo` entities. Hence in the hook, `self.entity`
+ will reference the matching `TrInfo` entity, allowing to get all the
+ transition details (including the entity to which is applied the transition
+ but also its original state, transition, destination state, user...).
+
+ See :class:`cubicweb.entities.wfobjs.TrInfo` for more information.
+ """
+ if from_state_name is not None:
+ warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning)
+ if isinstance(tr_names, string_types):
+ tr_names = set((tr_names,))
+ def match_etype_and_transition(trinfo):
+ # take care trinfo.transition is None when calling change_state
+ return (trinfo.transition and trinfo.transition.name in tr_names
+ # is_instance() first two arguments are 'cls' (unused, so giving
+ # None is fine) and the request/session
+ and is_instance(etype)(None, trinfo._cw, entity=trinfo.for_entity))
+
+ return is_instance('TrInfo') & score_entity(match_etype_and_transition)
+
+
+class match_transition(ExpectedValuePredicate):
+ """Return 1 if `transition` argument is found in the input context which has
+ a `.name` attribute matching one of the expected names given to the
+ initializer.
+
+ This predicate is expected to be used to customise the status change form in
+ the web ui.
+ """
+ def __call__(self, cls, req, transition=None, **kwargs):
+ # XXX check this is a transition that apply to the object?
+ if transition is None:
+ treid = req.form.get('treid', None)
+ if treid:
+ transition = req.entity_from_eid(treid)
+ if transition is not None and getattr(transition, 'name', None) in self.expected:
+ return 1
+ return 0
+
+
+# logged user predicates ########################################################
+
+@objectify_predicate
+def no_cnx(cls, req, **kwargs):
+ """Return 1 if the web session has no connection set. This occurs when
+ anonymous access is not allowed and user isn't authenticated.
+ """
+ if not req.cnx:
+ return 1
+ return 0
+
+
+@objectify_predicate
+def authenticated_user(cls, req, **kwargs):
+ """Return 1 if the user is authenticated (i.e. not the anonymous user).
+ """
+ if req.session.anonymous_session:
+ return 0
+ return 1
+
+
+@objectify_predicate
+def anonymous_user(cls, req, **kwargs):
+ """Return 1 if the user is not authenticated (i.e. is the anonymous user).
+ """
+ if req.session.anonymous_session:
+ return 1
+ return 0
+
+
+class match_user_groups(ExpectedValuePredicate):
+ """Return a non-zero score if request's user is in at least one of the
+ groups given as initializer argument. Returned score is the number of groups
+ in which the user is.
+
+ If the special 'owners' group is given and `rset` is specified in the input
+ context:
+
+ * if `row` is specified check the entity at the given `row`/`col` (default
+ to 0) is owned by the user
+
+ * else check all entities in `col` (default to 0) are owned by the user
+ """
+
+ def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs):
+ if not getattr(req, 'cnx', True): # default to True for repo session instances
+ return 0
+ user = req.user
+ if user is None:
+ return int('guests' in self.expected)
+ score = user.matching_groups(self.expected)
+ if not score and 'owners' in self.expected and rset:
+ if row is not None:
+ if not user.owns(rset[row][col]):
+ return 0
+ score = 1
+ else:
+ score = all(user.owns(r[col]) for r in rset)
+ return score
+
+# Web request predicates ########################################################
+
+# XXX deprecate
+@objectify_predicate
+def primary_view(cls, req, view=None, **kwargs):
+ """Return 1 if:
+
+ * *no view is specified* in the input context
+
+ * a view is specified and its `.is_primary()` method return True
+
+ This predicate is usually used by contextual components that only want to
+ appears for the primary view of an entity.
+ """
+ if view is not None and not view.is_primary():
+ return 0
+ return 1
+
+
+@objectify_predicate
+def contextual(cls, req, view=None, **kwargs):
+ """Return 1 if view's contextual property is true"""
+ if view is not None and view.contextual:
+ return 1
+ return 0
+
+
+class match_view(ExpectedValuePredicate):
+ """Return 1 if a view is specified an as its registry id is in one of the
+ expected view id given to the initializer.
+ """
+ def __call__(self, cls, req, view=None, **kwargs):
+ if view is None or not view.__regid__ in self.expected:
+ return 0
+ return 1
+
+
+class match_context(ExpectedValuePredicate):
+
+ def __call__(self, cls, req, context=None, **kwargs):
+ if not context in self.expected:
+ return 0
+ return 1
+
+
+# XXX deprecate
+@objectify_predicate
+def match_context_prop(cls, req, context=None, **kwargs):
+ """Return 1 if:
+
+ * no `context` is specified in input context (take care to confusion, here
+ `context` refers to a string given as an argument to the input context...)
+
+ * specified `context` is matching the context property value for the
+ appobject using this predicate
+
+ * the appobject's context property value is None
+
+ This predicate is usually used by contextual components that want to appears
+ in a configurable place.
+ """
+ if context is None:
+ return 1
+ propval = req.property_value('%s.%s.context' % (cls.__registry__,
+ cls.__regid__))
+ if propval and context != propval:
+ return 0
+ return 1
+
+
+class match_search_state(ExpectedValuePredicate):
+ """Return 1 if the current request search state is in one of the expected
+ states given to the initializer.
+
+ Known search states are either 'normal' or 'linksearch' (eg searching for an
+ object to create a relation with another).
+
+ This predicate is usually used by action that want to appears or not according
+ to the ui search state.
+ """
+
+ def __call__(self, cls, req, **kwargs):
+ try:
+ if not req.search_state[0] in self.expected:
+ return 0
+ except AttributeError:
+ return 1 # class doesn't care about search state, accept it
+ return 1
+
+
+class match_form_params(ExpectedValuePredicate):
+ """Return non-zero score if parameter names specified as initializer
+ arguments are specified in request's form parameters.
+
+ Return a score corresponding to the number of expected parameters.
+
+ When multiple parameters are expected, all of them should be found in
+ the input context unless `mode` keyword argument is given to 'any',
+ in which case a single matching parameter is enough.
+ """
+
+ def __init__(self, *expected, **kwargs):
+ """override default __init__ to allow either named or positional
+ parameters.
+ """
+ if kwargs and expected:
+ raise ValueError("match_form_params() can't be called with both "
+ "positional and named arguments")
+ if expected:
+ if len(expected) == 1 and not isinstance(expected[0], string_types):
+ raise ValueError("match_form_params() positional arguments "
+ "must be strings")
+ super(match_form_params, self).__init__(*expected)
+ else:
+ super(match_form_params, self).__init__(kwargs)
+
+ def _values_set(self, cls, req, **kwargs):
+ return req.form
+
+
+class match_http_method(ExpectedValuePredicate):
+ """Return non-zero score if one of the HTTP methods specified as
+ initializer arguments is the HTTP method of the request (GET, POST, ...).
+ """
+
+ def __call__(self, cls, req, **kwargs):
+ return int(req.http_method() in self.expected)
+
+
+class match_edited_type(ExpectedValuePredicate):
+ """return non-zero if main edited entity type is the one specified as
+ initializer argument, or is among initializer arguments if `mode` == 'any'.
+ """
+
+ def _values_set(self, cls, req, **kwargs):
+ try:
+ return frozenset((req.form['__type:%s' % req.form['__maineid']],))
+ except KeyError:
+ return frozenset()
+
+
+class match_form_id(ExpectedValuePredicate):
+ """return non-zero if request form identifier is the one specified as
+ initializer argument, or is among initializer arguments if `mode` == 'any'.
+ """
+
+ def _values_set(self, cls, req, **kwargs):
+ try:
+ return frozenset((req.form['__form_id'],))
+ except KeyError:
+ return frozenset()
+
+
+class specified_etype_implements(is_instance):
+ """Return non-zero score if the entity type specified by an 'etype' key
+ searched in (by priority) input context kwargs and request form parameters
+ match a known entity type (case insensitivly), and it's associated entity
+ class is of one of the type(s) given to the initializer. If multiple
+ arguments are given, matching one of them is enough.
+
+ .. note:: as with :class:`~cubicweb.predicates.is_instance`, entity types
+ should be given as string and the score will reflect class
+ proximity so the most specific object will be selected.
+
+ This predicate is usually used by views holding entity creation forms (since
+ we've no result set to work on).
+ """
+
+ def __call__(self, cls, req, **kwargs):
+ try:
+ etype = kwargs['etype']
+ except KeyError:
+ try:
+ etype = req.form['etype']
+ except KeyError:
+ return 0
+ else:
+ # only check this is a known type if etype comes from req.form,
+ # else we want the error to propagate
+ try:
+ etype = req.vreg.case_insensitive_etypes[etype.lower()]
+ req.form['etype'] = etype
+ except KeyError:
+ return 0
+ score = self.score_class(req.vreg['etypes'].etype_class(etype), req)
+ if score:
+ eschema = req.vreg.schema.eschema(etype)
+ if eschema.may_have_permission('add', req):
+ return score
+ return 0
+
+
+class attribute_edited(EntityPredicate):
+ """Scores if the specified attribute has been edited This is useful for
+ selection of forms by the edit controller.
+
+ The initial use case is on a form, in conjunction with match_transition,
+ which will not score at edit time::
+
+ is_instance('Version') & (match_transition('ready') |
+ attribute_edited('publication_date'))
+ """
+ def __init__(self, attribute, once_is_enough=None, mode='all'):
+ super(attribute_edited, self).__init__(mode=mode, once_is_enough=once_is_enough)
+ self._attribute = attribute
+
+ def score_entity(self, entity):
+ return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form
+
+
+# Other predicates ##############################################################
+
+class match_exception(ExpectedValuePredicate):
+ """Return 1 if exception given as `exc` in the input context is an instance
+ of one of the class given on instanciation of this predicate.
+ """
+ def __init__(self, *expected):
+ assert expected, self
+ # we want a tuple, not a set as done in the parent class
+ self.expected = expected
+
+ def __call__(self, cls, req, exc=None, **kwargs):
+ if exc is not None and isinstance(exc, self.expected):
+ return 1
+ return 0
+
+
+@objectify_predicate
+def debug_mode(cls, req, rset=None, **kwargs):
+ """Return 1 if running in debug mode."""
+ return req.vreg.config.debugmode and 1 or 0
diff -r 1400aee10df4 -r faf279e33298 cubicweb/pylintext.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pylintext.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,114 @@
+"""Pylint plugin to analyse cubicweb cubes
+
+Done:
+* turn functions decorated by @objectify_predicate into classes
+* add yams base types to yams.buildobjs module
+* add data() function to uiprops module's namespace
+* avoid 'abstract method not implemented' for `cell_call`, `entity_call`, `render_body`
+* avoid invalid-name on schema relation class names
+
+TODO:
+* avoid invalid class name for predicates and predicates
+* W:188, 0: Method '__lt__' is abstract in class 'Entity' but is not overridden (abstract-method)
+* generate entity attributes from the schema?
+"""
+
+from astroid import MANAGER, InferenceError, nodes, ClassDef, FunctionDef
+from astroid.builder import AstroidBuilder
+
+from pylint.checkers.utils import unimplemented_abstract_methods, class_is_abstract
+
+
+def turn_function_to_class(node):
+ """turn a Function node into a Class node (in-place)"""
+ node.__class__ = ClassDef
+ node.bases = ()
+ # mark class as a new style class
+ node._newstyle = True
+ # remove return nodes so that we don't get warned about 'return outside
+ # function' by pylint
+ for rnode in node.nodes_of_class(nodes.Return):
+ rnode.parent.body.remove(rnode)
+ # add __init__ method to avoid no-init
+
+ # that seems to be enough :)
+
+
+def cubicweb_transform(module):
+ # handle objectify_predicate decorator (and its former name until bw compat
+ # is kept). Only look at module level functions, should be enough.
+ for assnodes in module.locals.values():
+ for node in assnodes:
+ if isinstance(node, FunctionDef) and node.decorators:
+ for decorator in node.decorators.nodes:
+ try:
+ for infered in decorator.infer():
+ if infered.name in ('objectify_predicate', 'objectify_selector'):
+ turn_function_to_class(node)
+ break
+ else:
+ continue
+ break
+ except InferenceError:
+ continue
+ # add yams base types into 'yams.buildobjs', astng doesn't grasp globals()
+ # magic in there
+ if module.name == 'yams.buildobjs':
+ from yams import BASE_TYPES
+ for etype in BASE_TYPES:
+ module.locals[etype] = [ClassDef(etype, None)]
+ # add data() to uiprops module
+ elif module.name.split('.')[-1] == 'uiprops':
+ fake = AstroidBuilder(MANAGER).string_build('''
+def data(string):
+ return u''
+''')
+ module.locals['data'] = fake.locals['data']
+ # handle lower case with underscores for relation names in schema.py
+ if not module.qname().endswith('.schema'):
+ return
+ schema_locals = module.locals
+ for assnodes in schema_locals.values():
+ for node in assnodes:
+ if not isinstance(node, ClassDef):
+ continue
+ # XXX can we infer ancestor classes? it would be better to know for sure that
+ # one of the mother classes is yams.buildobjs.RelationDefinition for instance
+ for base in node.basenames:
+ if base in ('RelationDefinition', 'ComputedRelation', 'RelationType'):
+ new_name = node.name.replace('_', '').capitalize()
+ schema_locals[new_name] = schema_locals[node.name]
+ del schema_locals[node.name]
+ node.name = new_name
+
+
+def cubicweb_abstractmethods_transform(classdef):
+ if class_is_abstract(classdef):
+ return
+
+ def is_abstract(method):
+ return method.is_abstract(pass_is_abstract=False)
+
+ methods = sorted(
+ unimplemented_abstract_methods(classdef, is_abstract).items(),
+ key=lambda item: item[0],
+ )
+
+ dummy_method = AstroidBuilder(MANAGER).string_build('''
+def dummy_method(self):
+ """"""
+''')
+
+ for name, method in methods:
+ owner = method.parent.frame()
+ if owner is classdef:
+ continue
+ if name not in classdef.locals:
+ if name in ('cell_call', 'entity_call', 'render_body'):
+ classdef.set_local(name, dummy_method)
+
+
+def register(linter):
+ """called when loaded by pylint --load-plugins, nothing to do here"""
+ MANAGER.register_transform(nodes.Module, cubicweb_transform)
+ MANAGER.register_transform(ClassDef, cubicweb_abstractmethods_transform)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/pyramid/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/__init__.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,193 @@
+import os
+from warnings import warn
+import wsgicors
+
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from pyramid.config import Configurator
+from pyramid.settings import asbool, aslist
+
+try:
+ from configparser import SafeConfigParser
+except ImportError:
+ from ConfigParser import SafeConfigParser
+
+
+def make_cubicweb_application(cwconfig, settings=None):
+ """
+ Create a pyramid-based CubicWeb instance from a cubicweb configuration.
+
+ It is initialy meant to be used by the 'pyramid' command of cubicweb-ctl.
+
+ :param cwconfig: A CubicWeb configuration
+ :returns: A Pyramid config object
+ """
+ settings = dict(settings) if settings else {}
+ settings.update(settings_from_cwconfig(cwconfig))
+ config = Configurator(settings=settings)
+ config.registry['cubicweb.config'] = cwconfig
+ config.include('cubicweb.pyramid')
+ return config
+
+def settings_from_cwconfig(cwconfig):
+ '''
+ Extract settings from pyramid.ini and pyramid-debug.ini (if in debug)
+
+ Can be used to configure middleware WSGI with settings from pyramid.ini files
+
+ :param cwconfig: A CubicWeb configuration
+ :returns: A settings dictionnary
+ '''
+ settings_filenames = [os.path.join(cwconfig.apphome, 'pyramid.ini')]
+ settings = {}
+ if cwconfig.debugmode:
+ settings_filenames.insert(
+ 0, os.path.join(cwconfig.apphome, 'pyramid-debug.ini'))
+
+ settings.update({
+ 'pyramid.debug_authorization': True,
+ 'pyramid.debug_notfound': True,
+ 'pyramid.debug_routematch': True,
+ 'pyramid.reload_templates': True,
+ })
+
+ for fname in settings_filenames:
+ if os.path.exists(fname):
+ cp = SafeConfigParser()
+ cp.read(fname)
+ settings.update(cp.items('main'))
+ break
+
+ return settings
+
+
+def wsgi_application_from_cwconfig(
+ cwconfig,
+ profile=False, profile_output=None, profile_dump_every=None):
+ """ Build a WSGI application from a cubicweb configuration
+
+ :param cwconfig: A CubicWeb configuration
+ :param profile: Enable profiling. See :ref:`profiling`.
+ :param profile_output: Profiling output filename. See :ref:`profiling`.
+ :param profile_dump_every: Profiling number of requests before dumping the
+ stats. See :ref:`profiling`.
+
+ :returns: A fully operationnal WSGI application
+ """
+ config = make_cubicweb_application(cwconfig)
+ profile = profile or asbool(config.registry.settings.get(
+ 'cubicweb.profile.enable', False))
+ if profile:
+ config.add_route('profile_ping', '_profile/ping')
+ config.add_route('profile_cnx', '_profile/cnx')
+ config.scan('cubicweb.pyramid.profile')
+ app = config.make_wsgi_app()
+ # This replaces completely web/cors.py, which is not used by
+ # cubicweb.pyramid anymore
+ app = wsgicors.CORS(
+ app,
+ origin=' '.join(cwconfig['access-control-allow-origin']),
+ headers=', '.join(cwconfig['access-control-allow-headers']),
+ methods=', '.join(cwconfig['access-control-allow-methods']),
+ credentials='true')
+
+ if profile:
+ from cubicweb.pyramid.profile import wsgi_profile
+ filename = profile_output or config.registry.settings.get(
+ 'cubicweb.profile.output', 'program.prof')
+ dump_every = profile_dump_every or config.registry.settings.get(
+ 'cubicweb.profile.dump_every', 100)
+ app = wsgi_profile(app, filename=filename, dump_every=dump_every)
+ return app
+
+
+def wsgi_application(instance_name=None, debug=None):
+ """ Build a WSGI application from a cubicweb instance name
+
+ :param instance_name: Name of the cubicweb instance (optional). If not
+ provided, :envvar:`CW_INSTANCE` must exists.
+ :param debug: Enable/disable the debug mode. If defined to True or False,
+ overrides :envvar:`CW_DEBUG`.
+
+ The following environment variables are used if they exist:
+
+ .. envvar:: CW_INSTANCE
+
+ A CubicWeb instance name.
+
+ .. envvar:: CW_DEBUG
+
+ If defined, the debugmode is enabled.
+
+ The function can be used as an entry-point for third-party wsgi containers.
+ Below is a sample uswgi configuration file:
+
+ .. code-block:: ini
+
+ [uwsgi]
+ http = 127.0.1.1:8080
+ env = CW_INSTANCE=myinstance
+ env = CW_DEBUG=1
+ module = cubicweb.pyramid:wsgi_application()
+ virtualenv = /home/user/.virtualenvs/myvirtualenv
+ processes = 1
+ threads = 8
+ stats = 127.0.0.1:9191
+ plugins = http,python
+
+ """
+ if instance_name is None:
+ instance_name = os.environ['CW_INSTANCE']
+ if debug is None:
+ debug = 'CW_DEBUG' in os.environ
+
+ cwconfig = cwcfg.config_for(instance_name, debugmode=debug)
+
+ return wsgi_application_from_cwconfig(cwconfig)
+
+
+def includeme(config):
+ """Set-up a CubicWeb instance.
+
+ The CubicWeb instance can be set in several ways:
+
+ - Provide an already loaded CubicWeb config instance in the registry:
+
+ .. code-block:: python
+
+ config.registry['cubicweb.config'] = your_config_instance
+
+ - Provide an instance name in the pyramid settings with
+ :confval:`cubicweb.instance`.
+
+ """
+ cwconfig = config.registry.get('cubicweb.config')
+
+ if cwconfig is None:
+ debugmode = asbool(
+ config.registry.settings.get('cubicweb.debug', False))
+ cwconfig = cwcfg.config_for(
+ config.registry.settings['cubicweb.instance'], debugmode=debugmode)
+ config.registry['cubicweb.config'] = cwconfig
+
+ if cwconfig.debugmode:
+ try:
+ config.include('pyramid_debugtoolbar')
+ except ImportError:
+ warn('pyramid_debugtoolbar package not available, install it to '
+ 'get UI debug features', RuntimeWarning)
+
+ config.registry['cubicweb.repository'] = repo = cwconfig.repository()
+ config.registry['cubicweb.registry'] = repo.vreg
+
+ if asbool(config.registry.settings.get('cubicweb.defaults', True)):
+ config.include('cubicweb.pyramid.defaults')
+
+ for name in aslist(config.registry.settings.get('cubicweb.includes', [])):
+ config.include(name)
+
+ config.include('cubicweb.pyramid.tools')
+ config.include('cubicweb.pyramid.predicates')
+ config.include('cubicweb.pyramid.core')
+
+ if asbool(config.registry.settings.get('cubicweb.bwcompat', True)):
+ config.include('cubicweb.pyramid.bwcompat')
diff -r 1400aee10df4 -r faf279e33298 cubicweb/pyramid/auth.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/auth.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,180 @@
+import datetime
+import logging
+import warnings
+
+from zope.interface import implementer
+
+from pyramid.settings import asbool
+from pyramid.authorization import ACLAuthorizationPolicy
+from cubicweb.pyramid.core import get_principals
+from pyramid_multiauth import MultiAuthenticationPolicy
+
+from pyramid.authentication import AuthTktAuthenticationPolicy
+
+from pyramid.interfaces import IAuthenticationPolicy
+
+log = logging.getLogger(__name__)
+
+
+@implementer(IAuthenticationPolicy)
+class UpdateLoginTimeAuthenticationPolicy(object):
+ """An authentication policy that update the user last_login_time.
+
+ The update is done in the 'remember' method, which is called by the login
+ views login,
+
+ Usually used via :func:`includeme`.
+ """
+
+ def authenticated_userid(self, request):
+ pass
+
+ def effective_principals(self, request):
+ return ()
+
+ def remember(self, request, principal, **kw):
+ try:
+ repo = request.registry['cubicweb.repository']
+ with repo.internal_cnx() as cnx:
+ cnx.execute(
+ "SET U last_login_time %(now)s WHERE U eid %(user)s", {
+ 'now': datetime.datetime.now(),
+ 'user': principal})
+ cnx.commit()
+ except:
+ log.exception("Failed to update last_login_time")
+ return ()
+
+ def forget(self, request):
+ return ()
+
+
+class CWAuthTktAuthenticationPolicy(AuthTktAuthenticationPolicy):
+ """
+ An authentication policy that inhibate the call the 'remember' if a
+ 'persistent' argument is passed to it, and is equal to the value that
+ was passed to the constructor.
+
+ This allow to combine two policies with different settings and select them
+ by just setting this argument.
+ """
+ def __init__(self, secret, persistent, defaults={}, prefix='', **settings):
+ self.persistent = persistent
+ unset = object()
+ kw = {}
+ # load string settings
+ for name in ('cookie_name', 'path', 'domain', 'hashalg'):
+ value = settings.get(prefix + name, defaults.get(name, unset))
+ if value is not unset:
+ kw[name] = value
+ # load boolean settings
+ for name in ('secure', 'include_ip', 'http_only', 'wild_domain',
+ 'parent_domain', 'debug'):
+ value = settings.get(prefix + name, defaults.get(name, unset))
+ if value is not unset:
+ kw[name] = asbool(value)
+ # load int settings
+ for name in ('timeout', 'reissue_time', 'max_age'):
+ value = settings.get(prefix + name, defaults.get(name, unset))
+ if value is not unset:
+ kw[name] = int(value)
+ super(CWAuthTktAuthenticationPolicy, self).__init__(secret, **kw)
+
+ def remember(self, request, principals, **kw):
+ if 'persistent' not in kw or kw.pop('persistent') == self.persistent:
+ return super(CWAuthTktAuthenticationPolicy, self).remember(
+ request, principals, **kw)
+ else:
+ return ()
+
+
+def includeme(config):
+ """ Activate the CubicWeb AuthTkt authentication policy.
+
+ Usually called via ``config.include('cubicweb.pyramid.auth')``.
+
+ See also :ref:`defaults_module`
+ """
+ settings = config.registry.settings
+
+ policies = []
+
+ if asbool(settings.get('cubicweb.auth.update_login_time', True)):
+ policies.append(UpdateLoginTimeAuthenticationPolicy())
+
+ if asbool(settings.get('cubicweb.auth.authtkt', True)):
+ session_prefix = 'cubicweb.auth.authtkt.session.'
+ persistent_prefix = 'cubicweb.auth.authtkt.persistent.'
+
+ try:
+ secret = config.registry['cubicweb.config']['pyramid-auth-secret']
+ warnings.warn(
+ "pyramid-auth-secret from all-in-one is now "
+ "cubicweb.auth.authtkt.[session|persistent].secret",
+ DeprecationWarning)
+ except:
+ secret = 'notsosecret'
+
+ session_secret = settings.get(
+ session_prefix + 'secret', secret)
+ persistent_secret = settings.get(
+ persistent_prefix + 'secret', secret)
+
+ if 'notsosecret' in (session_secret, persistent_secret):
+ warnings.warn('''
+
+ !! SECURITY WARNING !!
+
+ The authentication cookies are signed with a static secret key.
+
+ Configure the following options in your pyramid.ini file:
+
+ - cubicweb.auth.authtkt.session.secret
+ - cubicweb.auth.authtkt.persistent.secret
+
+ YOU SHOULD STOP THIS INSTANCE unless your really know what you
+ are doing !!
+
+ ''')
+
+ policies.append(
+ CWAuthTktAuthenticationPolicy(
+ session_secret, False,
+ defaults={
+ 'hashalg': 'sha512',
+ 'cookie_name': 'auth_tkt',
+ 'timeout': 1200,
+ 'reissue_time': 120,
+ 'http_only': True,
+ 'secure': True
+ },
+ prefix=session_prefix,
+ **settings
+ )
+ )
+
+ policies.append(
+ CWAuthTktAuthenticationPolicy(
+ persistent_secret, True,
+ defaults={
+ 'hashalg': 'sha512',
+ 'cookie_name': 'pauth_tkt',
+ 'max_age': 3600*24*30,
+ 'reissue_time': 3600*24,
+ 'http_only': True,
+ 'secure': True
+ },
+ prefix=persistent_prefix,
+ **settings
+ )
+ )
+
+ kw = {}
+ if asbool(settings.get('cubicweb.auth.groups_principals', True)):
+ kw['callback'] = get_principals
+
+ authpolicy = MultiAuthenticationPolicy(policies, **kw)
+ config.registry['cubicweb.authpolicy'] = authpolicy
+
+ config.set_authentication_policy(authpolicy)
+ config.set_authorization_policy(ACLAuthorizationPolicy())
diff -r 1400aee10df4 -r faf279e33298 cubicweb/pyramid/bwcompat.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/bwcompat.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,213 @@
+import sys
+import logging
+
+from pyramid import security
+from pyramid import tweens
+from pyramid.httpexceptions import HTTPSeeOther
+from pyramid import httpexceptions
+from pyramid.settings import asbool
+
+import cubicweb
+import cubicweb.web
+
+from cubicweb.web.application import CubicWebPublisher
+
+from cubicweb.web import LogOut, PublishException
+
+from cubicweb.pyramid.core import cw_to_pyramid
+
+
+log = logging.getLogger(__name__)
+
+
+class PyramidSessionHandler(object):
+ """A CW Session handler that rely on the pyramid API to fetch the needed
+ informations.
+
+ It implements the :class:`cubicweb.web.application.CookieSessionHandler`
+ API.
+ """
+
+ def __init__(self, appli):
+ self.appli = appli
+
+ def get_session(self, req):
+ return req._request.cw_session
+
+ def logout(self, req, goto_url):
+ raise LogOut(url=goto_url)
+
+
+class CubicWebPyramidHandler(object):
+ """ A Pyramid request handler that rely on a cubicweb instance to do the
+ whole job
+
+ :param appli: A CubicWeb 'Application' object.
+ """
+ def __init__(self, appli):
+ self.appli = appli
+
+ def __call__(self, request):
+ """
+ Handler that mimics what CubicWebPublisher.main_handle_request and
+ CubicWebPublisher.core_handle do
+ """
+
+ # XXX The main handler of CW forbid anonymous https connections
+ # I guess we can drop this "feature" but in doubt I leave this comment
+ # so we don't forget about it. (cdevienne)
+
+ req = request.cw_request
+ vreg = request.registry['cubicweb.registry']
+
+ try:
+ content = None
+ try:
+ with cw_to_pyramid(request):
+ ctrlid, rset = self.appli.url_resolver.process(req,
+ req.path)
+
+ try:
+ controller = vreg['controllers'].select(
+ ctrlid, req, appli=self.appli)
+ except cubicweb.NoSelectableObject:
+ raise httpexceptions.HTTPUnauthorized(
+ req._('not authorized'))
+
+ req.update_search_state()
+ content = controller.publish(rset=rset)
+
+ # XXX this auto-commit should be handled by the cw_request
+ # cleanup or the pyramid transaction manager.
+ # It is kept here to have the ValidationError handling bw
+ # compatible
+ if req.cnx:
+ txuuid = req.cnx.commit()
+ # commited = True
+ if txuuid is not None:
+ req.data['last_undoable_transaction'] = txuuid
+ except cubicweb.web.ValidationError as ex:
+ # XXX The validation_error_handler implementation is light, we
+ # should redo it better in cw_to_pyramid, so it can be properly
+ # handled when raised from a cubicweb view.
+ # BUT the real handling of validation errors should be done
+ # earlier in the controllers, not here. In the end, the
+ # ValidationError should never by handled here.
+ content = self.appli.validation_error_handler(req, ex)
+ except cubicweb.web.RemoteCallFailed as ex:
+ # XXX The default pyramid error handler (or one that we provide
+ # for this exception) should be enough
+ # content = self.appli.ajax_error_handler(req, ex)
+ raise
+
+ if content is not None:
+ request.response.body = content
+
+
+ except LogOut as ex:
+ # The actual 'logging out' logic should be in separated function
+ # that is accessible by the pyramid views
+ headers = security.forget(request)
+ raise HTTPSeeOther(ex.url, headers=headers)
+ except cubicweb.AuthenticationError:
+ # Will occur upon access to req.cnx which is a
+ # cubicweb.dbapi._NeedAuthAccessMock.
+ if not content:
+ content = vreg['views'].main_template(req, 'login')
+ request.response.status_code = 403
+ request.response.body = content
+ finally:
+ # XXX CubicWebPyramidRequest.headers_out should
+ # access directly the pyramid response headers.
+ request.response.headers.clear()
+ for k, v in req.headers_out.getAllRawHeaders():
+ for item in v:
+ request.response.headers.add(k, item)
+
+ return request.response
+
+ def error_handler(self, exc, request):
+ req = request.cw_request
+ if isinstance(exc, httpexceptions.HTTPException):
+ request.response = exc
+ elif isinstance(exc, PublishException) and exc.status is not None:
+ request.response = httpexceptions.exception_response(exc.status)
+ else:
+ request.response = httpexceptions.HTTPInternalServerError()
+ request.response.cache_control = 'no-cache'
+ vreg = request.registry['cubicweb.registry']
+ excinfo = sys.exc_info()
+ req.reset_message()
+ if req.ajax_request:
+ content = self.appli.ajax_error_handler(req, exc)
+ else:
+ try:
+ req.data['ex'] = exc
+ req.data['excinfo'] = excinfo
+ errview = vreg['views'].select('error', req)
+ template = self.appli.main_template_id(req)
+ content = vreg['views'].main_template(req, template, view=errview)
+ except Exception:
+ content = vreg['views'].main_template(req, 'error-template')
+ log.exception(exc)
+ request.response.body = content
+ return request.response
+
+
+class TweenHandler(object):
+ """ A Pyramid tween handler that submit unhandled requests to a Cubicweb
+ handler.
+
+ The CubicWeb handler to use is expected to be in the pyramid registry, at
+ key ``'cubicweb.handler'``.
+ """
+ def __init__(self, handler, registry):
+ self.handler = handler
+ self.cwhandler = registry['cubicweb.handler']
+
+ def __call__(self, request):
+ if request.path.startswith('/https/'):
+ request.environ['PATH_INFO'] = request.environ['PATH_INFO'][6:]
+ assert not request.path.startswith('/https/')
+ request.scheme = 'https'
+ try:
+ response = self.handler(request)
+ except httpexceptions.HTTPNotFound:
+ response = self.cwhandler(request)
+ return response
+
+
+def includeme(config):
+ """ Set up a tween app that will handle the request if the main application
+ raises a HTTPNotFound exception.
+
+ This is to keep legacy compatibility for cubes that makes use of the
+ cubicweb urlresolvers.
+
+ It provides, for now, support for cubicweb controllers, but this feature
+ will be reimplemented separatly in a less compatible way.
+
+ It is automatically included by the configuration system, but can be
+ disabled in the :ref:`pyramid_settings`:
+
+ .. code-block:: ini
+
+ cubicweb.bwcompat = no
+ """
+ cwconfig = config.registry['cubicweb.config']
+ repository = config.registry['cubicweb.repository']
+ cwappli = CubicWebPublisher(
+ repository, cwconfig,
+ session_handler_fact=PyramidSessionHandler)
+ cwhandler = CubicWebPyramidHandler(cwappli)
+
+ config.registry['cubicweb.appli'] = cwappli
+ config.registry['cubicweb.handler'] = cwhandler
+
+ config.add_tween(
+ 'cubicweb.pyramid.bwcompat.TweenHandler', under=tweens.EXCVIEW)
+ if asbool(config.registry.settings.get(
+ 'cubicweb.bwcompat.errorhandler', True)):
+ config.add_view(cwhandler.error_handler, context=Exception)
+ # XXX why do i need this?
+ config.add_view(cwhandler.error_handler, context=httpexceptions.HTTPForbidden)
diff -r 1400aee10df4 -r faf279e33298 cubicweb/pyramid/core.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/core.py Mon Sep 26 14:52:12 2016 +0200
@@ -0,0 +1,400 @@
+import itertools
+
+from contextlib import contextmanager
+from warnings import warn
+from cgi import FieldStorage
+
+import rql
+
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb import repoapi
+
+import cubicweb
+import cubicweb.web
+from cubicweb.server import session as cwsession
+
+from pyramid import httpexceptions
+
+from cubicweb.pyramid import tools
+
+import logging
+
+log = logging.getLogger(__name__)
+
+
+CW_321 = cubicweb.__pkginfo__.numversion >= (3, 21, 0)
+
+
+class Connection(cwsession.Connection):
+ """ A specialised Connection that access the session data through a
+ property.
+
+ This behavior makes sure the actual session data is not loaded until
+ actually accessed.
+ """
+ def __init__(self, session, *args, **kw):
+ super(Connection, self).__init__(session, *args, **kw)
+ self._session = session
+
+ def _get_session_data(self):
+ return self._session.data
+
+ def _set_session_data(self, data):
+ pass
+
+ _session_data = property(_get_session_data, _set_session_data)
+
+
+class Session(cwsession.Session):
+ """ A Session that access the session data through a property.
+
+ Along with :class:`Connection`, it avoid any load of the pyramid session
+ data until it is actually accessed.
+ """
+ def __init__(self, pyramid_request, user, repo):
+ super(Session, self).__init__(user, repo)
+ self._pyramid_request = pyramid_request
+
+ def get_data(self):
+ if not getattr(self, '_protect_data_access', False):
+ self._data_accessed = True
+ return self._pyramid_request.session
+
+ def set_data(self, data):
+ if getattr(self, '_data_accessed', False):
+ self._pyramid_request.session.clear()
+ self._pyramid_request.session.update(data)
+
+ data = property(get_data, set_data)
+
+ def new_cnx(self):
+ self._protect_data_access = True
+ try:
+ return Connection(self)
+ finally:
+ self._protect_data_access = False
+
+
+def cw_headers(request):
+ return itertools.chain(
+ *[[(k, item) for item in v]
+ for k, v in request.cw_request.headers_out.getAllRawHeaders()])
+
+
+@contextmanager
+def cw_to_pyramid(request):
+ """ Context manager to wrap a call to the cubicweb API.
+
+ All CW exceptions will be transformed into their pyramid equivalent.
+ When needed, some CW reponse bits may be converted too (mainly headers)"""
+ try:
+ yield
+ except cubicweb.web.Redirect as ex:
+ assert 300 <= ex.status < 400
+ raise httpexceptions.status_map[ex.status](
+ ex.location, headers=cw_headers(request))
+ except cubicweb.web.StatusResponse as ex:
+ warn('[3.16] StatusResponse is deprecated use req.status_out',
+ DeprecationWarning, stacklevel=2)
+ request.body = ex.content
+ request.status_int = ex.status
+ except cubicweb.web.Unauthorized as ex:
+ raise httpexceptions.HTTPForbidden(
+ request.cw_request._(
+ 'You\'re not authorized to access this page. '
+ 'If you think you should, please contact the site '
+ 'administrator.'),
+ headers=cw_headers(request))
+ except cubicweb.web.Forbidden:
+ raise httpexceptions.HTTPForbidden(
+ request.cw_request._(
+ 'This action is forbidden. '
+ 'If you think it should be allowed, please contact the site '
+ 'administrator.'),
+ headers=cw_headers(request))
+ except (rql.BadRQLQuery, cubicweb.web.RequestError) as ex:
+ raise
+
+
+class CubicWebPyramidRequest(CubicWebRequestBase):
+ """ A CubicWeb request that only wraps a pyramid request.
+
+ :param request: A pyramid request
+
+ """
+ def __init__(self, request):
+ self._request = request
+
+ self.path = request.upath_info
+
+ vreg = request.registry['cubicweb.registry']
+ https = request.scheme == 'https'
+
+ post = request.params.mixed()
+ headers_in = request.headers
+
+ super(CubicWebPyramidRequest, self).__init__(vreg, https, post,
+ headers=headers_in)
+
+ self.content = request.body_file_seekable
+
+ def setup_params(self, params):
+ self.form = {}
+ for param, val in params.items():
+ if param in self.no_script_form_params and val:
+ val = self.no_script_form_param(param, val)
+ if isinstance(val, FieldStorage) and val.file:
+ val = (val.filename, val.file)
+ if param == '_cwmsgid':
+ self.set_message_id(val)
+ elif param == '__message':
+ warn('[3.13] __message in request parameter is deprecated '
+ '(may only be given to .build_url). Seeing this message '
+ 'usualy means your application hold some