--- a/cwvreg.py Mon May 02 15:46:03 2011 +0200
+++ b/cwvreg.py Mon May 02 15:59:30 2011 +0200
@@ -402,10 +402,8 @@
if not isinstance(view, class_deprecated)]
try:
view = self._select_best(views, req, rset=rset, **kwargs)
- if view.linkable():
+ if view is not None and view.linkable():
yield view
- except NoSelectableObject:
- continue
except Exception:
self.exception('error while trying to select %s view for %s',
vid, rset)
--- a/devtools/testlib.py Mon May 02 15:46:03 2011 +0200
+++ b/devtools/testlib.py Mon May 02 15:59:30 2011 +0200
@@ -562,6 +562,8 @@
if views:
try:
view = viewsvreg._select_best(views, req, rset=rset)
+ if view is None:
+ raise NoSelectableObject((req,), {'rset':rset}, views)
if view.linkable():
yield view
else:
--- a/entities/test/unittest_wfobjs.py Mon May 02 15:46:03 2011 +0200
+++ b/entities/test/unittest_wfobjs.py Mon May 02 15:59:30 2011 +0200
@@ -165,7 +165,7 @@
user = self.user()
iworkflowable = user.cw_adapt_to('IWorkflowable')
iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
- user.clear_all_caches()
+ user.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'deactivated')
self._test_manager_deactivate(user)
trinfo = self._test_manager_deactivate(user)
@@ -192,7 +192,7 @@
self.commit()
iworkflowable.fire_transition('wake up')
self.commit()
- user.clear_all_caches()
+ user.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'deactivated')
# XXX test managers can change state without matching transition
@@ -274,14 +274,14 @@
self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
iworkflowable.fire_transition('swftr1', u'go')
self.commit()
- group.clear_all_caches()
+ group.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
iworkflowable.fire_transition('tr1', u'go')
self.commit()
- group.clear_all_caches()
+ group.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_state.eid, state2.eid)
self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -295,10 +295,10 @@
# force back to state1
iworkflowable.change_state('state1', u'gadget')
iworkflowable.fire_transition('swftr1', u'au')
- group.clear_all_caches()
+ group.cw_clear_all_caches()
iworkflowable.fire_transition('tr2', u'chapeau')
self.commit()
- group.clear_all_caches()
+ group.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_state.eid, state3.eid)
self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -390,7 +390,7 @@
):
iworkflowable.fire_transition(trans)
self.commit()
- group.clear_all_caches()
+ group.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, nextstate)
@@ -408,11 +408,11 @@
wf.add_state('asleep', initial=True)
self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
{'wf': wf.eid, 'x': self.member.eid})
- self.member.clear_all_caches()
+ self.member.cw_clear_all_caches()
iworkflowable = self.member.cw_adapt_to('IWorkflowable')
self.assertEqual(iworkflowable.state, 'activated')# no change before commit
self.commit()
- self.member.clear_all_caches()
+ self.member.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
self.assertEqual(iworkflowable.state, 'asleep')
self.assertEqual(iworkflowable.workflow_history, ())
@@ -429,7 +429,7 @@
self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
{'wf': wf.eid, 'x': self.member.eid})
self.commit()
- self.member.clear_all_caches()
+ self.member.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
self.assertEqual(iworkflowable.state, 'asleep')
self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -472,10 +472,10 @@
self.commit()
self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
{'wf': wf.eid, 'x': self.member.eid})
- self.member.clear_all_caches()
+ self.member.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
self.commit()
- self.member.clear_all_caches()
+ self.member.cw_clear_all_caches()
self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
self.assertEqual(iworkflowable.state, 'activated')
self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -504,13 +504,13 @@
self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
{'wf': wf.eid, 'x': user.eid})
self.commit()
- user.clear_all_caches()
+ user.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'asleep')
self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
['rest'])
iworkflowable.fire_transition('rest')
self.commit()
- user.clear_all_caches()
+ user.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'asleep')
self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
['rest'])
@@ -520,7 +520,7 @@
self.commit()
iworkflowable.fire_transition('rest')
self.commit()
- user.clear_all_caches()
+ user.cw_clear_all_caches()
self.assertEqual(iworkflowable.state, 'dead')
self.assertEqual(parse_hist(iworkflowable.workflow_history),
[('asleep', 'asleep', 'rest', None),
--- a/entities/wfobjs.py Mon May 02 15:46:03 2011 +0200
+++ b/entities/wfobjs.py Mon May 02 15:59:30 2011 +0200
@@ -326,8 +326,8 @@
result[ep.subwf_state.eid] = ep.destination and ep.destination.eid
return result
- def clear_all_caches(self):
- super(WorkflowTransition, self).clear_all_caches()
+ def cw_clear_all_caches(self):
+ super(WorkflowTransition, self).cw_clear_all_caches()
clear_cache(self, 'exit_points')
--- a/entity.py Mon May 02 15:46:03 2011 +0200
+++ b/entity.py Mon May 02 15:59:30 2011 +0200
@@ -918,7 +918,7 @@
assert role
self._cw_related_cache.pop('%s_%s' % (rtype, role), None)
- def clear_all_caches(self): # XXX cw_clear_all_caches
+ def cw_clear_all_caches(self):
"""flush all caches on this entity. Further attributes/relations access
will triggers new database queries to get back values.
@@ -991,6 +991,10 @@
# deprecated stuff #########################################################
+ @deprecated('[3.13] use entity.cw_clear_all_caches()')
+ def clear_all_caches(self):
+ return self.cw_clear_all_caches()
+
@deprecated('[3.9] use entity.cw_attr_value(attr)')
def get_value(self, name):
return self.cw_attr_value(name)
--- a/etwist/server.py Mon May 02 15:46:03 2011 +0200
+++ b/etwist/server.py Mon May 02 15:59:30 2011 +0200
@@ -17,14 +17,19 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""twisted server for CubicWeb web instances"""
+from __future__ import with_statement
+
__docformat__ = "restructuredtext en"
import sys
import os
+import os.path as osp
import select
import errno
import traceback
import threading
+import re
+import hashlib
from os.path import join
from time import mktime
from datetime import date, timedelta
@@ -41,7 +46,8 @@
from logilab.common.decorators import monkeypatch
-from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER
+from cubicweb import (AuthenticationError, ConfigurationError,
+ CW_EVENT_MANAGER, CubicWebException)
from cubicweb.utils import json_dumps
from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
from cubicweb.web.application import CubicWebPublisher
@@ -70,13 +76,70 @@
code=http.FORBIDDEN,
stream='Access forbidden')
-class File(static.File):
- """Prevent from listing directories"""
+
+class NoListingFile(static.File):
def directoryListing(self):
return ForbiddenDirectoryLister()
-class LongTimeExpiringFile(File):
+class DataLookupDirectory(NoListingFile):
+ def __init__(self, config, path):
+ self.md5_version = config.instance_md5_version()
+ NoListingFile.__init__(self, path)
+ self.config = config
+ self.here = path
+ self._defineChildResources()
+ if self.config.debugmode:
+ self.data_modconcat_basepath = '/data/??'
+ else:
+ self.data_modconcat_basepath = '/data/%s/??' % self.md5_version
+
+ def _defineChildResources(self):
+ self.putChild(self.md5_version, self)
+
+ def getChild(self, path, request):
+ if not path:
+ if request.uri.startswith(self.data_modconcat_basepath):
+ resource_relpath = request.uri[len(self.data_modconcat_basepath):]
+ if resource_relpath:
+ paths = resource_relpath.split(',')
+ try:
+ return ConcatFiles(self.config, paths)
+ except ConcatFileNotFoundError:
+ return self.childNotFound
+ return self.directoryListing()
+ childpath = join(self.here, path)
+ dirpath, rid = self.config.locate_resource(childpath)
+ if dirpath is None:
+ # resource not found
+ return self.childNotFound
+ filepath = os.path.join(dirpath, rid)
+ if os.path.isdir(filepath):
+ resource = DataLookupDirectory(self.config, childpath)
+ # cache resource for this segment path to avoid recomputing
+ # directory lookup
+ self.putChild(path, resource)
+ return resource
+ else:
+ return NoListingFile(filepath)
+
+
+class FCKEditorResource(NoListingFile):
+ def __init__(self, config, path):
+ NoListingFile.__init__(self, path)
+ self.config = config
+
+ def getChild(self, path, request):
+ pre_path = request.path.split('/')[1:]
+ if pre_path[0] == 'https':
+ pre_path.pop(0)
+ uiprops = self.config.https_uiprops
+ else:
+ uiprops = self.config.uiprops
+ return static.File(osp.join(uiprops['FCKEDITOR_PATH'], path))
+
+
+class LongTimeExpiringFile(DataLookupDirectory):
"""overrides static.File and sets a far future ``Expires`` date
on the resouce.
@@ -88,28 +151,77 @@
etc.
"""
+ def _defineChildResources(self):
+ pass
+
def render(self, request):
# XXX: Don't provide additional resource information to error responses
#
# the HTTP RFC recommands not going further than 1 year ahead
expires = date.today() + timedelta(days=6*30)
request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
- return File.render(self, request)
+ return DataLookupDirectory.render(self, request)
+
+
+class ConcatFileNotFoundError(CubicWebException):
+ pass
+
+
+class ConcatFiles(LongTimeExpiringFile):
+ def __init__(self, config, paths):
+ _, ext = osp.splitext(paths[0])
+ # create a unique / predictable filename
+ fname = hashlib.md5(';'.join(paths)).hexdigest() + ext
+ filepath = osp.join(config.appdatahome, 'uicache', fname)
+ LongTimeExpiringFile.__init__(self, config, filepath)
+ self._concat_cached_filepath(filepath, paths)
+ def _concat_cached_filepath(self, filepath, paths):
+ if not self._up_to_date(filepath, paths):
+ concat_data = []
+ for path in paths:
+ # FIXME locate_resource is called twice() in debug-mode, but
+ # it's a @cached method
+ dirpath, rid = self.config.locate_resource(path)
+ if rid is None:
+ raise ConcatFileNotFoundError(path)
+ concat_data.append(open(osp.join(dirpath, rid)).read())
+ with open(filepath, 'wb') as f:
+ f.write('\n'.join(concat_data))
+
+ def _up_to_date(self, filepath, paths):
+ """
+ The concat-file is considered up-to-date if it exists.
+ In debug mode, an additional check is performed to make sure that
+ concat-file is more recent than all concatenated files
+ """
+ if not osp.isfile(filepath):
+ return False
+ if self.config.debugmode:
+ concat_lastmod = os.stat(filepath).st_mtime
+ for path in paths:
+ dirpath, rid = self.config.locate_resource(path)
+ if rid is None:
+ raise ConcatFileNotFoundError(path)
+ path = osp.join(dirpath, rid)
+ if os.stat(path).st_mtime > concat_lastmod:
+ return False
+ return True
class CubicWebRootResource(resource.Resource):
def __init__(self, config, vreg=None):
+ resource.Resource.__init__(self)
self.config = config
# instantiate publisher here and not in init_publisher to get some
# checks done before daemonization (eg versions consistency)
self.appli = CubicWebPublisher(config, vreg=vreg)
self.base_url = config['base-url']
self.https_url = config['https-url']
- self.children = {}
- self.static_directories = set(('data%s' % config.instance_md5_version(),
- 'data', 'static', 'fckeditor'))
global MAX_POST_LENGTH
MAX_POST_LENGTH = config['max-post-length']
+ self.putChild('static', NoListingFile(config.static_directory))
+ self.putChild('fckeditor', FCKEditorResource(self.config, ''))
+ self.putChild('data', DataLookupDirectory(self.config, ''))
def init_publisher(self):
config = self.config
@@ -152,38 +264,6 @@
def getChild(self, path, request):
"""Indicate which resource to use to process down the URL's path"""
- pre_path = request.path.split('/')[1:]
- if pre_path[0] == 'https':
- pre_path.pop(0)
- uiprops = self.config.https_uiprops
- else:
- uiprops = self.config.uiprops
- directory = pre_path[0]
- # Anything in data/, static/, fckeditor/ and the generated versioned
- # data directory is treated as static files
- if directory in self.static_directories:
- # take care fckeditor may appears as root directory or as a data
- # subdirectory
- if directory == 'static':
- return File(self.config.static_directory)
- if directory == 'fckeditor':
- return File(uiprops['FCKEDITOR_PATH'])
- if directory != 'data':
- # versioned directory, use specific file with http cache
- # headers so their are cached for a very long time
- cls = LongTimeExpiringFile
- else:
- cls = File
- if path == 'fckeditor':
- return cls(uiprops['FCKEDITOR_PATH'])
- if path == directory: # recurse
- return self
- datadir, path = self.config.locate_resource(path)
- if datadir is None:
- return self # recurse
- self.debug('static file %s from %s', path, datadir)
- return cls(join(datadir, path))
- # Otherwise we use this single resource
return self
def render(self, request):
--- a/hooks/workflow.py Mon May 02 15:46:03 2011 +0200
+++ b/hooks/workflow.py Mon May 02 15:59:30 2011 +0200
@@ -148,7 +148,7 @@
class WorkflowHook(hook.Hook):
__abstract__ = True
- category = 'workflow'
+ category = 'metadata'
class SetInitialStateHook(WorkflowHook):
@@ -160,21 +160,15 @@
_SetInitialStateOp(self._cw, entity=self.entity)
-class PrepareStateChangeHook(WorkflowHook):
- """record previous state information"""
- __regid__ = 'cwdelstate'
- __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
- events = ('before_delete_relation',)
+class FireTransitionHook(WorkflowHook):
+ """check the transition is allowed and add missing information into the
+ TrInfo entity.
- def __call__(self):
- self._cw.transaction_data.setdefault('pendingrelations', []).append(
- (self.eidfrom, self.rtype, self.eidto))
-
-
-class FireTransitionHook(WorkflowHook):
- """check the transition is allowed, add missing information. Expect that:
+ Expect that:
* wf_info_for inlined relation is set
* by_transition or to_state (managers only) inlined relation is set
+
+ Check for automatic transition to be fired at the end
"""
__regid__ = 'wffiretransition'
__select__ = WorkflowHook.__select__ & is_instance('TrInfo')
@@ -273,7 +267,7 @@
class FiredTransitionHook(WorkflowHook):
- """change related entity state"""
+ """change related entity state and handle exit of subworkflow"""
__regid__ = 'wffiretransition'
__select__ = WorkflowHook.__select__ & is_instance('TrInfo')
events = ('after_add_entity',)
@@ -296,6 +290,7 @@
__regid__ = 'wfcheckinstate'
__select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
events = ('before_add_relation',)
+ category = 'integrity'
def __call__(self):
session = self._cw
--- a/rset.py Mon May 02 15:46:03 2011 +0200
+++ b/rset.py Mon May 02 15:59:30 2011 +0200
@@ -475,43 +475,57 @@
entity.eid = eid
# cache entity
req.set_entity_cache(entity)
- eschema = entity.e_schema
# try to complete the entity if there are some additional columns
if len(rowvalues) > 1:
- rqlst = self.syntax_tree()
- if rqlst.TYPE == 'select':
- # UNION query, find the subquery from which this entity has been
- # found
- select, col = rqlst.locate_subquery(col, etype, self.args)
+ eschema = entity.e_schema
+ eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col)
+ entity.eid = rowvalues[eid_col]
+ for attr, col_idx in attr_cols.items():
+ entity.cw_attr_cache[attr] = rowvalues[col_idx]
+ for (rtype, role), col_idx in rel_cols.items():
+ value = rowvalues[col_idx]
+ if value is None:
+ if role == 'subject':
+ rql = 'Any Y WHERE X %s Y, X eid %s'
+ else:
+ rql = 'Any Y WHERE Y %s X, X eid %s'
+ rrset = ResultSet([], rql % (rtype, entity.eid))
+ rrset.req = req
+ else:
+ rrset = self._build_entity(row, col_idx).as_rset()
+ entity.cw_set_relation_cache(rtype, role, rrset)
+ return entity
+
+ @cached
+ def _rset_structure(self, eschema, entity_col):
+ eid_col = col = entity_col
+ rqlst = self.syntax_tree()
+ attr_cols = {}
+ rel_cols = {}
+ if rqlst.TYPE == 'select':
+ # UNION query, find the subquery from which this entity has been
+ # found
+ select, col = rqlst.locate_subquery(entity_col, eschema.type, self.args)
+ else:
+ select = rqlst
+ # take care, due to outer join support, we may find None
+ # values for non final relation
+ for i, attr, role in attr_desc_iterator(select, col, entity_col):
+ if role == 'subject':
+ rschema = eschema.subjrels[attr]
else:
- select = rqlst
- # take care, due to outer join support, we may find None
- # values for non final relation
- for i, attr, role in attr_desc_iterator(select, col, entity.cw_col):
- if role == 'subject':
- rschema = eschema.subjrels[attr]
- if rschema.final:
- if attr == 'eid':
- entity.eid = rowvalues[i]
- else:
- entity.cw_attr_cache[attr] = rowvalues[i]
- continue
+ rschema = eschema.objrels[attr]
+ if rschema.final:
+ if attr == 'eid':
+ eid_col = i
else:
- rschema = eschema.objrels[attr]
+ attr_cols[attr] = i
+ else:
rdef = eschema.rdef(attr, role)
# only keep value if it can't be multivalued
if rdef.role_cardinality(role) in '1?':
- if rowvalues[i] is None:
- if role == 'subject':
- rql = 'Any Y WHERE X %s Y, X eid %s'
- else:
- rql = 'Any Y WHERE Y %s X, X eid %s'
- rrset = ResultSet([], rql % (attr, entity.eid))
- rrset.req = req
- else:
- rrset = self._build_entity(row, i).as_rset()
- entity.cw_set_relation_cache(attr, role, rrset)
- return entity
+ rel_cols[(attr, role)] = i
+ return eid_col, attr_cols, rel_cols
@cached
def syntax_tree(self):
--- a/server/sources/pyrorql.py Mon May 02 15:46:03 2011 +0200
+++ b/server/sources/pyrorql.py Mon May 02 15:59:30 2011 +0200
@@ -437,7 +437,7 @@
cu = session.pool[self.uri]
cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs)
self._query_cache.clear()
- entity.clear_all_caches()
+ entity.cw_clear_all_caches()
def delete_entity(self, session, entity):
"""delete an entity from the source"""
@@ -453,8 +453,8 @@
{'x': self.eid2extid(subject, session),
'y': self.eid2extid(object, session)})
self._query_cache.clear()
- session.entity_from_eid(subject).clear_all_caches()
- session.entity_from_eid(object).clear_all_caches()
+ session.entity_from_eid(subject).cw_clear_all_caches()
+ session.entity_from_eid(object).cw_clear_all_caches()
def delete_relation(self, session, subject, rtype, object):
"""delete a relation from the source"""
@@ -463,8 +463,8 @@
{'x': self.eid2extid(subject, session),
'y': self.eid2extid(object, session)})
self._query_cache.clear()
- session.entity_from_eid(subject).clear_all_caches()
- session.entity_from_eid(object).clear_all_caches()
+ session.entity_from_eid(subject).cw_clear_all_caches()
+ session.entity_from_eid(object).cw_clear_all_caches()
class RQL2RQL(object):
--- a/server/sources/rql2sql.py Mon May 02 15:46:03 2011 +0200
+++ b/server/sources/rql2sql.py Mon May 02 15:59:30 2011 +0200
@@ -1357,6 +1357,8 @@
operator = ' LIKE '
else:
operator = ' %s ' % operator
+ elif operator == 'REGEXP':
+ return ' %s' % self.dbhelper.sql_regexp_match_expression(rhs.accept(self))
elif (operator == '=' and isinstance(rhs, Constant)
and rhs.eval(self._args) is None):
if lhs is None:
@@ -1407,6 +1409,8 @@
if constant.type is None:
return 'NULL'
value = constant.value
+ if constant.type == 'etype':
+ return value
if constant.type == 'Int' and isinstance(constant.parent, SortTerm):
return value
if constant.type in ('Date', 'Datetime'):
--- a/server/test/unittest_ldapuser.py Mon May 02 15:46:03 2011 +0200
+++ b/server/test/unittest_ldapuser.py Mon May 02 15:59:30 2011 +0200
@@ -239,7 +239,7 @@
iworkflowable.fire_transition('deactivate')
try:
cnx.commit()
- adim.clear_all_caches()
+ adim.cw_clear_all_caches()
self.assertEqual(adim.in_state[0].name, 'deactivated')
trinfo = iworkflowable.latest_trinfo()
self.assertEqual(trinfo.owned_by[0].login, SYT)
--- a/server/test/unittest_querier.py Mon May 02 15:46:03 2011 +0200
+++ b/server/test/unittest_querier.py Mon May 02 15:59:30 2011 +0200
@@ -443,6 +443,15 @@
self.assertEqual(rset.rows[0][0], result)
self.assertEqual(rset.description, [('Int',)])
+ def test_regexp_based_pattern_matching(self):
+ peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+ peid2 = self.execute("INSERT Personne X: X nom 'cidule'")[0][0]
+ rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "^b"')
+ self.assertEqual(len(rset.rows), 1, rset.rows)
+ self.assertEqual(rset.rows[0][0], peid1)
+ rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "idu"')
+ self.assertEqual(len(rset.rows), 2, rset.rows)
+
def test_select_aggregat_count(self):
rset = self.execute('Any COUNT(X)')
self.assertEqual(len(rset.rows), 1)
--- a/server/test/unittest_rql2sql.py Mon May 02 15:46:03 2011 +0200
+++ b/server/test/unittest_rql2sql.py Mon May 02 15:59:30 2011 +0200
@@ -1331,6 +1331,18 @@
'''SELECT SUBSTR(_P.cw_nom, 1, 1)
FROM cw_Personne AS _P''')
+ def test_cast(self):
+ self._check("Any CAST(String, P) WHERE P is Personne",
+ '''SELECT CAST(_P.cw_eid AS text)
+FROM cw_Personne AS _P''')
+
+ def test_regexp(self):
+ self._check("Any X WHERE X login REGEXP '[0-9].*'",
+ '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login ~ [0-9].*
+''')
+
def test_parser_parse(self):
for t in self._parse(PARSER):
yield t
@@ -1630,6 +1642,9 @@
for t in self._parse(HAS_TEXT_LG_INDEXER):
yield t
+ def test_regexp(self):
+ self.skipTest('regexp-based pattern matching not implemented in sqlserver')
+
def test_or_having_fake_terms(self):
self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
'''SELECT _X.cw_eid
@@ -1726,6 +1741,10 @@
for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
yield t
+ def test_cast(self):
+ self._check("Any CAST(String, P) WHERE P is Personne",
+ '''SELECT CAST(_P.cw_eid AS nvarchar(max))
+FROM cw_Personne AS _P''')
class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
@@ -1739,6 +1758,14 @@
'''SELECT MONTH(_P.cw_creation_date)
FROM cw_Personne AS _P''')
+ def test_regexp(self):
+ self._check("Any X WHERE X login REGEXP '[0-9].*'",
+ '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
+
def test_union(self):
for t in self._parse((
('(Any N ORDERBY 1 WHERE X name N, X is State)'
@@ -1879,6 +1906,18 @@
'''SELECT EXTRACT(MONTH from _P.cw_creation_date)
FROM cw_Personne AS _P''')
+ def test_cast(self):
+ self._check("Any CAST(String, P) WHERE P is Personne",
+ '''SELECT CAST(_P.cw_eid AS mediumtext)
+FROM cw_Personne AS _P''')
+
+ def test_regexp(self):
+ self._check("Any X WHERE X login REGEXP '[0-9].*'",
+ '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
def test_from_clause_needed(self):
queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')",
'''SELECT 1
--- a/server/test/unittest_undo.py Mon May 02 15:46:03 2011 +0200
+++ b/server/test/unittest_undo.py Mon May 02 15:59:30 2011 +0200
@@ -150,8 +150,8 @@
txuuid = self.commit()
actions = self.cnx.transaction_info(txuuid).actions_list()
self.assertEqual(len(actions), 1)
- toto.clear_all_caches()
- e.clear_all_caches()
+ toto.cw_clear_all_caches()
+ e.cw_clear_all_caches()
errors = self.cnx.undo_transaction(txuuid)
undotxuuid = self.commit()
self.assertEqual(undotxuuid, None) # undo not undoable
@@ -192,7 +192,7 @@
self.commit()
errors = self.cnx.undo_transaction(txuuid)
self.commit()
- p.clear_all_caches()
+ p.cw_clear_all_caches()
self.assertEqual(p.fiche[0].eid, c2.eid)
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0],
--- a/test/unittest_entity.py Mon May 02 15:46:03 2011 +0200
+++ b/test/unittest_entity.py Mon May 02 15:59:30 2011 +0200
@@ -539,7 +539,7 @@
self.assertEqual(person.rest_path(), 'personne/doe')
# ambiguity test
person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe')
- person.clear_all_caches()
+ person.cw_clear_all_caches()
self.assertEqual(person.rest_path(), 'personne/eid/%s' % person.eid)
self.assertEqual(person2.rest_path(), 'personne/eid/%s' % person2.eid)
# unique attr with None value (wikiid in this case)
--- a/test/unittest_selectors.py Mon May 02 15:46:03 2011 +0200
+++ b/test/unittest_selectors.py Mon May 02 15:59:30 2011 +0200
@@ -193,7 +193,7 @@
class WorkflowSelectorTC(CubicWebTC):
def _commit(self):
self.commit()
- self.wf_entity.clear_all_caches()
+ self.wf_entity.cw_clear_all_caches()
def setup_database(self):
wf = self.shell().add_workflow("wf_test", 'StateFull', default=True)
--- a/test/unittest_utils.py Mon May 02 15:46:03 2011 +0200
+++ b/test/unittest_utils.py Mon May 02 15:59:30 2011 +0200
@@ -22,8 +22,8 @@
import datetime
from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList, HTMLHead
from cubicweb.entity import Entity
try:
@@ -155,6 +155,102 @@
def test_encoding_unknown_stuff(self):
self.assertEqual(self.encode(TestCase), 'null')
+class HTMLHeadTC(CubicWebTC):
+ def test_concat_urls(self):
+ base_url = u'http://test.fr/data/'
+ head = HTMLHead(base_url)
+ urls = [base_url + u'bob1.js',
+ base_url + u'bob2.js',
+ base_url + u'bob3.js']
+ result = head.concat_urls(urls)
+ expected = u'http://test.fr/data/??bob1.js,bob2.js,bob3.js'
+ self.assertEqual(result, expected)
+
+ def test_group_urls(self):
+ base_url = u'http://test.fr/data/'
+ head = HTMLHead(base_url)
+ urls_spec = [(base_url + u'bob0.js', None),
+ (base_url + u'bob1.js', None),
+ (u'http://ext.com/bob2.js', None),
+ (u'http://ext.com/bob3.js', None),
+ (base_url + u'bob4.css', 'all'),
+ (base_url + u'bob5.css', 'all'),
+ (base_url + u'bob6.css', 'print'),
+ (base_url + u'bob7.css', 'print'),
+ (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+ (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+ ]
+ result = head.group_urls(urls_spec)
+ expected = [(base_url + u'??bob0.js,bob1.js', None),
+ (u'http://ext.com/bob2.js', None),
+ (u'http://ext.com/bob3.js', None),
+ (base_url + u'??bob4.css,bob5.css', 'all'),
+ (base_url + u'??bob6.css,bob7.css', 'print'),
+ (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+ (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+ ]
+ self.assertEqual(list(result), expected)
+
+ def test_getvalue_with_concat(self):
+ base_url = u'http://test.fr/data/'
+ head = HTMLHead(base_url)
+ head.add_js(base_url + u'bob0.js')
+ head.add_js(base_url + u'bob1.js')
+ head.add_js(u'http://ext.com/bob2.js')
+ head.add_js(u'http://ext.com/bob3.js')
+ head.add_css(base_url + u'bob4.css')
+ head.add_css(base_url + u'bob5.css')
+ head.add_css(base_url + u'bob6.css', 'print')
+ head.add_css(base_url + u'bob7.css', 'print')
+ head.add_ie_css(base_url + u'bob8.css')
+ head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+ result = head.getvalue()
+ expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/??bob4.css,bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/??bob6.css,bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]-->
+<script type="text/javascript" src="http://test.fr/data/??bob0.js,bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+ self.assertEqual(result, expected)
+
+ def test_getvalue_without_concat(self):
+ base_url = u'http://test.fr/data/'
+ head = HTMLHead()
+ head.add_js(base_url + u'bob0.js')
+ head.add_js(base_url + u'bob1.js')
+ head.add_js(u'http://ext.com/bob2.js')
+ head.add_js(u'http://ext.com/bob3.js')
+ head.add_css(base_url + u'bob4.css')
+ head.add_css(base_url + u'bob5.css')
+ head.add_css(base_url + u'bob6.css', 'print')
+ head.add_css(base_url + u'bob7.css', 'print')
+ head.add_ie_css(base_url + u'bob8.css')
+ head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+ result = head.getvalue()
+ expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob4.css"/>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob6.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]-->
+<script type="text/javascript" src="http://test.fr/data/bob0.js"></script>
+<script type="text/javascript" src="http://test.fr/data/bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+ self.assertEqual(result, expected)
if __name__ == '__main__':
unittest_main()
--- a/utils.py Mon May 02 15:46:03 2011 +0200
+++ b/utils.py Mon May 02 15:59:30 2011 +0200
@@ -240,7 +240,7 @@
xhtml_safe_script_opening = u'<script type="text/javascript"><!--//--><![CDATA[//><!--\n'
xhtml_safe_script_closing = u'\n//--><!]]></script>'
- def __init__(self):
+ def __init__(self, datadir_url=None):
super(HTMLHead, self).__init__()
self.jsvars = []
self.jsfiles = []
@@ -248,6 +248,7 @@
self.ie_cssfiles = []
self.post_inlined_scripts = []
self.pagedata_unload = False
+ self.datadir_url = datadir_url
def add_raw(self, rawheader):
@@ -284,7 +285,7 @@
if jsfile not in self.jsfiles:
self.jsfiles.append(jsfile)
- def add_css(self, cssfile, media):
+ def add_css(self, cssfile, media='all'):
"""adds `cssfile` to the list of javascripts used in the webpage
This function checks if the file has already been added
@@ -304,6 +305,45 @@
self.post_inlined_scripts.append(self.js_unload_code)
self.pagedata_unload = True
+ def concat_urls(self, urls):
+ """concatenates urls into one url usable by Apache mod_concat
+
+ This method returns the url without modifying it if there is only
+ one element in the list
+ :param urls: list of local urls/filenames to concatenate
+ """
+ if len(urls) == 1:
+ return urls[0]
+ len_prefix = len(self.datadir_url)
+ concated = u','.join(url[len_prefix:] for url in urls)
+ return (u'%s??%s' % (self.datadir_url, concated))
+
+ def group_urls(self, urls_spec):
+ """parses urls_spec in order to generate concatenated urls
+ for js and css includes
+
+ This method checks if the file is local and if it shares options
+ with direct neighbors
+ :param urls_spec: entire list of urls/filenames to inspect
+ """
+ concatable = []
+ prev_islocal = False
+ prev_key = None
+ for url, key in urls_spec:
+ islocal = url.startswith(self.datadir_url)
+ if concatable and (islocal != prev_islocal or key != prev_key):
+ yield (self.concat_urls(concatable), prev_key)
+ del concatable[:]
+ if not islocal:
+ yield (url, key)
+ else:
+ concatable.append(url)
+ prev_islocal = islocal
+ prev_key = key
+ if concatable:
+ yield (self.concat_urls(concatable), prev_key)
+
+
def getvalue(self, skiphead=False):
"""reimplement getvalue to provide a consistent (and somewhat browser
optimzed cf. http://stevesouders.com/cuzillion) order in external
@@ -321,18 +361,20 @@
w(vardecl + u'\n')
w(self.xhtml_safe_script_closing)
# 2/ css files
- for cssfile, media in self.cssfiles:
+ for cssfile, media in (self.group_urls(self.cssfiles) if self.datadir_url else self.cssfiles):
w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
(media, xml_escape(cssfile)))
# 3/ ie css if necessary
if self.ie_cssfiles:
- for cssfile, media, iespec in self.ie_cssfiles:
+ ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles)
+ for cssfile, (media, iespec) in (self.group_urls(ie_cssfiles) if self.datadir_url else ie_cssfiles):
w(u'<!--%s>\n' % iespec)
w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
(media, xml_escape(cssfile)))
w(u'<![endif]--> \n')
# 4/ js files
- for jsfile in self.jsfiles:
+ jsfiles = ((x, None) for x in self.jsfiles)
+ for jsfile, media in self.group_urls(jsfiles) if self.datadir_url else jsfiles:
w(u'<script type="text/javascript" src="%s"></script>\n' %
xml_escape(jsfile))
# 5/ post inlined scripts (i.e. scripts depending on other JS files)
--- a/vregistry.py Mon May 02 15:46:03 2011 +0200
+++ b/vregistry.py Mon May 02 15:59:30 2011 +0200
@@ -184,7 +184,10 @@
raise :exc:`NoSelectableObject` if not object apply
"""
- return self._select_best(self[__oid], *args, **kwargs)
+ obj = self._select_best(self[__oid], *args, **kwargs)
+ if obj is None:
+ raise NoSelectableObject(args, kwargs, self[__oid] )
+ return obj
def select_or_none(self, __oid, *args, **kwargs):
"""return the most specific object among those with the given oid
@@ -202,16 +205,18 @@
context
"""
for appobjects in self.itervalues():
- try:
- yield self._select_best(appobjects, *args, **kwargs)
- except NoSelectableObject:
+ obj = self._select_best(appobjects, *args, **kwargs)
+ if obj is None:
continue
+ yield obj
def _select_best(self, appobjects, *args, **kwargs):
"""return an instance of the most specific object according
to parameters
- raise `NoSelectableObject` if not object apply
+ return None if not object apply (don't raise `NoSelectableObject` since
+ it's costly when searching appobjects using `possible_objects`
+ (e.g. searching for hooks).
"""
if len(args) > 1:
warn('[3.5] only the request param can not be named when calling select*',
@@ -224,7 +229,7 @@
elif appobjectscore > 0 and appobjectscore == score:
winners.append(appobject)
if winners is None:
- raise NoSelectableObject(args, kwargs, appobjects)
+ return None
if len(winners) > 1:
# log in production environement / test, error while debugging
msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)'
--- a/web/controller.py Mon May 02 15:46:03 2011 +0200
+++ b/web/controller.py Mon May 02 15:59:30 2011 +0200
@@ -165,7 +165,7 @@
elif self._edited_entity:
# clear caches in case some attribute participating to the rest path
# has been modified
- self._edited_entity.clear_all_caches()
+ self._edited_entity.cw_clear_all_caches()
path = self._edited_entity.rest_path()
else:
path = 'view'
--- a/web/data/cubicweb.ajax.js Mon May 02 15:46:03 2011 +0200
+++ b/web/data/cubicweb.ajax.js Mon May 02 15:59:30 2011 +0200
@@ -86,6 +86,41 @@
var JSON_BASE_URL = baseuri() + 'json?';
+/**
+ * returns true if `url` is a mod_concat-like url
+ * (e.g. http://..../data??resource1.js,resource2.js)
+ */
+function _modconcatLikeUrl(url) {
+ var base = baseuri();
+ if (!base.endswith('/')) {
+ base += '/';
+ }
+ var modconcat_rgx = new RegExp('(' + base + 'data/([a-z0-9]+/))?\\?\\?(.+)');
+ return modconcat_rgx.exec(url);
+}
+
+/**
+ * decomposes a mod_concat-like url into its corresponding list of
+ * resources' urls
+ *
+ * >>> _listResources('http://foo.com/data/??a.js,b.js,c.js')
+ * ['http://foo.com/data/a.js', 'http://foo.com/data/b.js', 'http://foo.com/data/c.js']
+ */
+function _listResources(src) {
+ var resources = [];
+ var groups = _modconcatLikeUrl(src);
+ if (groups == null) {
+ resources.push(src);
+ } else {
+ var dataurl = groups[0];
+ $.each(cw.utils.lastOf(groups).split(','),
+ function() {
+ resources.push(dataurl + this);
+ });
+ }
+ return resources;
+}
+
//============= utility function handling remote calls responses. ==============//
function _loadAjaxHtmlHead($node, $head, tag, srcattr) {
var jqtagfilter = tag + '[' + srcattr + ']';
@@ -93,28 +128,33 @@
cw['loaded_'+srcattr] = [];
var loaded = cw['loaded_'+srcattr];
jQuery('head ' + jqtagfilter).each(function(i) {
- loaded.push(this.getAttribute(srcattr));
- });
+ // tab1.push.apply(tab1, tab2) <=> tab1 += tab2 (python-wise)
+ loaded.push.apply(loaded, _listResources(this.getAttribute(srcattr)));
+ });
} else {
var loaded = cw['loaded_'+srcattr];
}
$node.find(tag).each(function(i) {
- var url = this.getAttribute(srcattr);
+ var srcnode = this;
+ var url = srcnode.getAttribute(srcattr);
if (url) {
- if (jQuery.inArray(url, loaded) == -1) {
- // take care to <script> tags: jQuery append method script nodes
- // don't appears in the DOM (See comments on
- // http://api.jquery.com/append/), which cause undesired
- // duplicated load in our case. After trying to use bare DOM api
- // to avoid this, we switched to handle a list of already loaded
- // stuff ourselves, since bare DOM api gives bug with the
- // server-response event, since we loose control on when the
- // script is loaded (jQuery load it immediatly).
- loaded.push(url);
- jQuery(this).appendTo($head);
- }
+ $.each(_listResources(url), function() {
+ var resource = '' + this; // implicit object->string cast
+ if ($.inArray(resource, loaded) == -1) {
+ // take care to <script> tags: jQuery append method script nodes
+ // don't appears in the DOM (See comments on
+ // http://api.jquery.com/append/), which cause undesired
+ // duplicated load in our case. After trying to use bare DOM api
+ // to avoid this, we switched to handle a list of already loaded
+ // stuff ourselves, since bare DOM api gives bug with the
+ // server-response event, since we loose control on when the
+ // script is loaded (jQuery load it immediatly).
+ loaded.push(resource);
+ jQuery(srcnode).appendTo($head);
+ }
+ });
} else {
- jQuery(this).appendTo($head);
+ jQuery(srcnode).appendTo($head);
}
});
$node.find(jqtagfilter).remove();
--- a/web/data/cubicweb.facets.css Mon May 02 15:46:03 2011 +0200
+++ b/web/data/cubicweb.facets.css Mon May 02 15:59:30 2011 +0200
@@ -117,3 +117,7 @@
font-weight: bold;
text-align: center;
}
+
+div.facetTitleSelected {
+ background: url("required.png") no-repeat right top;
+}
--- a/web/data/cubicweb.facets.js Mon May 02 15:46:03 2011 +0200
+++ b/web/data/cubicweb.facets.js Mon May 02 15:59:30 2011 +0200
@@ -238,6 +238,18 @@
});
}
+// change css class of facets that have a value selected
+function updateFacetTitles() {
+ $('.facet').each(function() {
+ var $divTitle = $(this).find('.facetTitle');
+ var facetSelected = $(this).find('.facetValueSelected');
+ if (facetSelected.length) {
+ $divTitle.addClass('facetTitleSelected');
+ } else {
+ $divTitle.removeClass('facetTitleSelected');
+ }
+ });
+}
// we need to differenciate cases where initFacetBoxEvents is called with one
// argument or without any argument. If we use `initFacetBoxEvents` as the
@@ -245,4 +257,32 @@
// his, so we use this small anonymous function instead.
jQuery(document).ready(function() {
initFacetBoxEvents();
+ jQuery(cw).bind('facets-content-loaded', onFacetContentLoaded);
+ jQuery(cw).bind('facets-content-loading', onFacetFiltering);
+ jQuery(cw).bind('facets-content-loading', updateFacetTitles);
});
+
+function showFacetLoading(parentid) {
+ var loadingWidth = 200; // px
+ var loadingHeight = 100; // px
+ var $msg = jQuery('#facetLoading');
+ var $parent = jQuery('#' + parentid);
+ var leftPos = $parent.offset().left + ($parent.width() - loadingWidth) / 2;
+ $parent.fadeTo('normal', 0.2);
+ $msg.css('left', leftPos).show();
+}
+
+function onFacetFiltering(event, divid /* ... */) {
+ showFacetLoading(divid);
+}
+
+function onFacetContentLoaded(event, divid, rql, vid, extraparams) {
+ jQuery('#facetLoading').hide();
+}
+
+jQuery(document).ready(function () {
+ if (jQuery('div.facetBody').length) {
+ document.body.appendChild(DIV({id:'facetLoading'},
+ IMG({src: baseuri() + 'data/facet-loading.png'})));
+ }
+});
--- a/web/data/cubicweb.js Mon May 02 15:46:03 2011 +0200
+++ b/web/data/cubicweb.js Mon May 02 15:59:30 2011 +0200
@@ -308,6 +308,17 @@
},
/**
+ * returns the last element of an array-like object or undefined if empty
+ */
+ lastOf: function(array) {
+ if (array.length) {
+ return array[array.length-1];
+ } else {
+ return undefined;
+ }
+ },
+
+ /**
* .. function:: difference(lst1, lst2)
*
* returns a list containing all elements in `lst1` that are not
Binary file web/data/facet-loading.png has changed
--- a/web/request.py Mon May 02 15:46:03 2011 +0200
+++ b/web/request.py Mon May 02 15:59:30 2011 +0200
@@ -92,7 +92,7 @@
self.uiprops = vreg.config.uiprops
self.datadir_url = vreg.config.datadir_url
# raw html headers that can be added from any view
- self.html_headers = HTMLHead()
+ self.html_headers = HTMLHead(self.datadir_url)
# form parameters
self.setup_params(form)
# dictionnary that may be used to store request data that has to be
@@ -256,7 +256,7 @@
"""used by AutomaticWebTest to clear html headers between tests on
the same resultset
"""
- self.html_headers = HTMLHead()
+ self.html_headers = HTMLHead(self.datadir_url)
return self
# web state helpers #######################################################
@@ -415,7 +415,8 @@
@cached # so it's writed only once
def fckeditor_config(self):
- self.add_js('fckeditor/fckeditor.js')
+ fckeditor_url = self.build_url('fckeditor/fckeditor.js')
+ self.add_js(fckeditor_url, localfile=False)
self.html_headers.define_var('fcklang', self.lang)
self.html_headers.define_var('fckconfigpath',
self.data_url('cubicweb.fckcwconfig.js'))
@@ -888,10 +889,20 @@
def _parse_accept_header(raw_header, value_parser=None, value_sort_key=None):
"""returns an ordered list accepted types
- returned value is a list of 2-tuple (value, score), ordered
- by score. Exact type of `value` will depend on what `value_parser`
- will reutrn. if `value_parser` is None, then the raw value, as found
- in the http header, is used.
+ :param value_parser: a function to parse a raw accept chunk. If None
+ is provided, the function defaults to identity. If a function is provided,
+ it must accept 2 parameters ``value`` and ``other_params``. ``value`` is
+ the value found before the first ';', `other_params` is a dictionary
+ built from all other chunks after this first ';'
+
+ :param value_sort_key: a key function to sort values found in the accept
+ header. This function will be passed a 3-tuple
+ (raw_value, parsed_value, score). If None is provided, the default
+ sort_key is 1./score
+
+ :return: a list of 3-tuple (raw_value, parsed_value, score),
+ ordered by score. ``parsed_value`` will be the return value of
+ ``value_parser(raw_value)``
"""
if value_sort_key is None:
value_sort_key = lambda infos: 1./infos[-1]
@@ -926,7 +937,7 @@
'text/html;level=1', `mimetypeinfo` will be ('text', '*', {'level': '1'})
"""
try:
- media_type, media_subtype = value.strip().split('/')
+ media_type, media_subtype = value.strip().split('/', 1)
except ValueError: # safety belt : '/' should always be present
media_type = value.strip()
media_subtype = '*'
--- a/web/test/unittest_views_basecontrollers.py Mon May 02 15:46:03 2011 +0200
+++ b/web/test/unittest_views_basecontrollers.py Mon May 02 15:59:30 2011 +0200
@@ -194,7 +194,7 @@
'use_email-object:'+emaileid: peid,
}
path, params = self.expect_redirect_publish(req, 'edit')
- email.clear_all_caches()
+ email.cw_clear_all_caches()
self.assertEqual(email.address, 'adim@logilab.fr')
--- a/web/views/urlpublishing.py Mon May 02 15:46:03 2011 +0200
+++ b/web/views/urlpublishing.py Mon May 02 15:59:30 2011 +0200
@@ -260,9 +260,8 @@
else:
try:
action = actionsreg._select_best(actions, req, rset=rset)
+ if action is not None:
+ raise Redirect(action.url())
except RegistryException:
- continue
- else:
- # XXX avoid redirect
- raise Redirect(action.url())
+ pass # continue searching
raise PathDontMatch()
--- a/web/webconfig.py Mon May 02 15:46:03 2011 +0200
+++ b/web/webconfig.py Mon May 02 15:59:30 2011 +0200
@@ -300,19 +300,14 @@
if not (self.repairing or self.creating):
self.global_set_option('base-url', baseurl)
httpsurl = self['https-url']
+ datadir_path = 'data/' if self.debugmode else 'data/%s/' % self.instance_md5_version()
if httpsurl:
if httpsurl[-1] != '/':
httpsurl += '/'
if not self.repairing:
self.global_set_option('https-url', httpsurl)
- if self.debugmode:
- self.https_datadir_url = httpsurl + 'data/'
- else:
- self.https_datadir_url = httpsurl + 'data%s/' % self.instance_md5_version()
- if self.debugmode:
- self.datadir_url = baseurl + 'data/'
- else:
- self.datadir_url = baseurl + 'data%s/' % self.instance_md5_version()
+ self.https_datadir_url = httpsurl + datadir_path
+ self.datadir_url = baseurl + datadir_path
def _build_ui_properties(self):
# self.datadir_url[:-1] to remove trailing /
--- a/web/webctl.py Mon May 02 15:46:03 2011 +0200
+++ b/web/webctl.py Mon May 02 15:59:30 2011 +0200
@@ -21,9 +21,22 @@
__docformat__ = "restructuredtext en"
+import os, os.path as osp
+from shutil import copy
+
from logilab.common.shellutils import ASK
-from cubicweb.toolsutils import CommandHandler, underline_title
+from cubicweb import ExecutionError
+from cubicweb.cwctl import CWCTL
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from cubicweb.toolsutils import Command, CommandHandler, underline_title
+
+
+try:
+ from os import symlink as linkdir
+except ImportError:
+ from shutil import copytree as linkdir
+
class WebCreateHandler(CommandHandler):
cmdname = 'create'
@@ -43,3 +56,57 @@
def postcreate(self, *args, **kwargs):
"""hooks called once instance's initialization has been completed"""
+
+
+class GenStaticDataDir(Command):
+ """Create a directory merging all data directory content from cubes and CW.
+ """
+ name = 'gen-static-datadir'
+ arguments = '<instance> [dirpath]'
+ min_args = 1
+ max_args = 2
+
+ options = ()
+
+ def run(self, args):
+ appid = args.pop(0)
+ config = cwcfg.config_for(appid)
+ if args:
+ dest = args[0]
+ else:
+ dest = osp.join(config.appdatahome, 'data')
+ if osp.exists(dest):
+ raise ExecutionError('Directory %s already exists. '
+ 'Remove it first.' % dest)
+ config.quick_start = True # notify this is not a regular start
+ # list all resources (no matter their order)
+ resources = set()
+ for datadir in self._datadirs(config):
+ for dirpath, dirnames, filenames in os.walk(datadir):
+ rel_dirpath = dirpath[len(datadir)+1:]
+ resources.update(osp.join(rel_dirpath, f) for f in filenames)
+ # locate resources and copy them to destination
+ for resource in resources:
+ dirname = osp.dirname(resource)
+ dest_resource = osp.join(dest, dirname)
+ if not osp.isdir(dest_resource):
+ os.makedirs(dest_resource)
+ resource_dir, resource_path = config.locate_resource(resource)
+ copy(osp.join(resource_dir, resource_path), dest_resource)
+ # handle md5 version subdirectory
+ linkdir(dest, osp.join(dest, config.instance_md5_version()))
+ print ('You can use apache rewrite rule below :\n'
+ 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
+
+ def _datadirs(self, config):
+ repo = config.repository()
+ if config._cubes is None:
+ # web only config
+ config.init_cubes(repo.get_cubes())
+ for cube in repo.get_cubes():
+ cube_datadir = osp.join(cwcfg.cube_dir(cube), 'data')
+ if osp.isdir(cube_datadir):
+ yield cube_datadir
+ yield osp.join(config.shared_dir(), 'data')
+
+CWCTL.register(GenStaticDataDir)