--- a/__init__.py Fri Dec 16 14:35:25 2011 +0100
+++ b/__init__.py Wed Dec 21 18:01:48 2011 +0100
@@ -18,6 +18,7 @@
"""CubicWeb is a generic framework to quickly build applications which describes
relations between entitites.
"""
+from __future__ import with_statement
__docformat__ = "restructuredtext en"
@@ -76,6 +77,49 @@
"Binary objects must use raw strings, not %s" % data.__class__
StringIO.write(self, data)
+ def to_file(self, filename):
+ """write a binary to disk
+
+ the writing is performed in a safe way for files stored on
+ Windows SMB shares
+ """
+ pos = self.tell()
+ with open(filename, 'wb') as fobj:
+ self.seek(0)
+ if sys.platform == 'win32':
+ while True:
+ # the 16kB chunksize comes from the shutil module
+ # in stdlib
+ chunk = self.read(16*1024)
+ if not chunk:
+ break
+ fobj.write(chunk)
+ else:
+ fobj.write(self.read())
+ self.seek(pos)
+
+ @staticmethod
+ def from_file(filename):
+ """read a file and returns its contents in a Binary
+
+ the reading is performed in a safe way for files stored on
+ Windows SMB shares
+ """
+ binary = Binary()
+ with open(filename, 'rb') as fobj:
+ if sys.platform == 'win32':
+ while True:
+ # the 16kB chunksize comes from the shutil module
+ # in stdlib
+ chunk = fobj.read(16*1024)
+ if not chunk:
+ break
+ binary.write(chunk)
+ else:
+ binary.write(fobj.read())
+ return binary
+
+
# use this dictionary to rename entity types while keeping bw compat
ETYPE_NAME_MAP = {}
--- a/server/querier.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/querier.py Wed Dec 21 18:01:48 2011 +0100
@@ -668,7 +668,7 @@
print '*'*80
print 'querier input', repr(rql), repr(args)
# parse the query and binds variables
- cachekey = rql
+ cachekey = (rql,)
try:
if args:
# search for named args in query which are eids (hence
@@ -699,7 +699,7 @@
# we want queries such as "Any X WHERE X eid 9999" return an
# empty result instead of raising UnknownEid
return empty_rset(rql, args, rqlst)
- if args and not rql in self._rql_ck_cache:
+ if args and rql not in self._rql_ck_cache:
self._rql_ck_cache[rql] = eidkeys
if eidkeys:
cachekey = self._repo.querier_cache_key(session, rql, args,
@@ -722,6 +722,11 @@
# a new syntax tree is built from them.
rqlst = rqlst.copy()
self._annotate(rqlst)
+ if args:
+ # different SQL generated when some argument is None or not (IS
+ # NULL). This should be considered when computing sql cache key
+ cachekey += tuple(sorted([k for k,v in args.iteritems()
+ if v is None]))
# make an execution plan
plan = self.plan_factory(rqlst, args, session)
plan.cache_key = cachekey
--- a/server/repository.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/repository.py Wed Dec 21 18:01:48 2011 +0100
@@ -989,11 +989,11 @@
for eid in eids:
try:
etype, uri, extid, auri = etcache.pop(typed_eid(eid)) # may be a string in some cases
- rqlcache.pop('%s X WHERE X eid %s' % (etype, eid), None)
+ rqlcache.pop( ('%s X WHERE X eid %s' % (etype, eid),), None)
extidcache.pop((extid, uri), None)
except KeyError:
etype = None
- rqlcache.pop('Any X WHERE X eid %s' % eid, None)
+ rqlcache.pop( ('Any X WHERE X eid %s' % eid,), None)
for source in self.sources:
source.clear_eid_cache(eid, etype)
--- a/server/sources/__init__.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/sources/__init__.py Wed Dec 21 18:01:48 2011 +0100
@@ -236,7 +236,7 @@
def get_connection(self):
"""open and return a connection to the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def check_connection(self, cnx):
"""Check connection validity, return None if the connection is still
@@ -388,7 +388,7 @@
if this account is defined in this source and valid login / password is
given. Else raise `AuthenticationError`
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
# RQL query api ############################################################
@@ -399,7 +399,7 @@
possible type). If cachekey is given, the query necessary to fetch the
results (but not the results themselves) may be cached using this key.
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def flying_insert(self, table, session, union, args=None, varmap=None):
"""similar as .syntax_tree_search, but inserts data in the temporary
@@ -415,15 +415,15 @@
def get_extid(self, entity):
"""return the external id for the given newly inserted entity"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def add_entity(self, session, entity):
"""add a new entity to the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def update_entity(self, session, entity):
"""update an entity in the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def delete_entities(self, session, entities):
"""delete several entities from the source"""
@@ -432,11 +432,11 @@
def delete_entity(self, session, entity):
"""delete an entity from the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def add_relation(self, session, subject, rtype, object):
"""add a relation to the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def add_relations(self, session, rtype, subj_obj_list):
"""add a relations to the source"""
@@ -447,30 +447,30 @@
def delete_relation(self, session, subject, rtype, object):
"""delete a relation from the source"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
# system source interface #################################################
def eid_type_source(self, session, eid):
"""return a tuple (type, source, extid) for the entity with id <eid>"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def create_eid(self, session):
- raise NotImplementedError()
+ raise NotImplementedError(self)
def add_info(self, session, entity, source, extid):
"""add type and source info for an eid into the system table"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def update_info(self, session, entity, need_fti_update):
"""mark entity as being modified, fulltext reindex if needed"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def delete_info_multi(self, session, entities, uri):
"""delete system information on deletion of a list of entities with the
same etype and belinging to the same source
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def modified_entities(self, session, etypes, mtime):
"""return a 2-uple:
@@ -480,41 +480,41 @@
* list of (etype, eid) of entities of the given types which have been
deleted since the given timestamp
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def index_entity(self, session, entity):
"""create an operation to [re]index textual content of the given entity
on commit
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def fti_unindex_entities(self, session, entities):
"""remove text content for entities from the full text index
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def fti_index_entities(self, session, entities):
"""add text content of created/modified entities to the full text index
"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
# sql system source interface #############################################
def sqlexec(self, session, sql, args=None):
"""execute the query and return its result"""
- raise NotImplementedError()
+ raise NotImplementedError(self)
def temp_table_def(self, selection, solution, table, basemap):
- raise NotImplementedError()
+ raise NotImplementedError(self)
def create_index(self, session, table, column, unique=False):
- raise NotImplementedError()
+ raise NotImplementedError(self)
def drop_index(self, session, table, column, unique=False):
- raise NotImplementedError()
+ raise NotImplementedError(self)
def create_temp_table(self, session, table, schema):
- raise NotImplementedError()
+ raise NotImplementedError(self)
def clean_temp_data(self, session, temptables):
"""remove temporary data, usually associated to temporary tables"""
--- a/server/sources/storages.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/sources/storages.py Wed Dec 21 18:01:48 2011 +0100
@@ -17,6 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""custom storages for the system source"""
+import os
from os import unlink, path as osp
from contextlib import contextmanager
@@ -121,7 +122,7 @@
"""
fpath = source.binary_to_str(value)
try:
- return Binary(file(fpath, 'rb').read())
+ return Binary.from_file(fpath)
except EnvironmentError, ex:
source.critical("can't open %s: %s", value, ex)
return None
@@ -129,18 +130,18 @@
def entity_added(self, entity, attr):
"""an entity using this storage for attr has been added"""
if entity._cw.transaction_data.get('fs_importing'):
- binary = Binary(file(entity.cw_edited[attr].getvalue(), 'rb').read())
+ binary = Binary.from_file(entity.cw_edited[attr].getvalue())
else:
binary = entity.cw_edited.pop(attr)
fpath = self.new_fs_path(entity, attr)
# bytes storage used to store file's path
entity.cw_edited.edited_attribute(attr, Binary(fpath))
- file(fpath, 'wb').write(binary.getvalue())
+ binary.to_file(fpath)
AddFileOp.get_instance(entity._cw).add_data(fpath)
return binary
def entity_updated(self, entity, attr):
- """an entity using this storage for attr has been updatded"""
+ """an entity using this storage for attr has been updated"""
# get the name of the previous file containing the value
oldpath = self.current_fs_path(entity, attr)
if entity._cw.transaction_data.get('fs_importing'):
@@ -149,7 +150,7 @@
# the file as the actual content of the attribute
fpath = entity.cw_edited[attr].getvalue()
assert fpath is not None
- binary = Binary(file(fpath, 'rb').read())
+ binary = Binary.from_file(fpath)
else:
# We must store the content of the attributes
# into a file to stay consistent with the behaviour of entity_add.
@@ -168,7 +169,7 @@
fpath = self.new_fs_path(entity, attr)
assert not osp.exists(fpath)
# write attribute value on disk
- file(fpath, 'wb').write(binary.getvalue())
+ binary.to_file(fpath)
# Mark the new file as added during the transaction.
# The file will be removed on rollback
AddFileOp.get_instance(entity._cw).add_data(fpath)
@@ -208,9 +209,9 @@
return fspath
def current_fs_path(self, entity, attr):
- """return the current fs_path of the tribute.
-
- Return None is the attr is not stored yet."""
+ """return the current fs_path of the attribute, or None is the attr is
+ not stored yet.
+ """
sysource = entity._cw.cnxset.source('system')
cu = sysource.doexec(entity._cw,
'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
--- a/server/ssplanner.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/ssplanner.py Wed Dec 21 18:01:48 2011 +0100
@@ -392,7 +392,8 @@
# cachekey
if inputmap or self.plan.cache_key is None:
cachekey = None
- # union may have been splited into subqueries, rebuild a cache key
+ # union may have been splited into subqueries, in which case we can't
+ # use plan.cache_key, rebuild a cache key
elif isinstance(self.plan.cache_key, tuple):
cachekey = list(self.plan.cache_key)
cachekey[0] = union.as_string()
--- a/server/test/unittest_querier.py Fri Dec 16 14:35:25 2011 +0100
+++ b/server/test/unittest_querier.py Wed Dec 21 18:01:48 2011 +0100
@@ -1495,5 +1495,11 @@
rset = self.execute('Any X WHERE X is CWUser, X has_text "bidule", X in_state S, S name SN')
self.assertEqual(rset.rows, [[peid]])
+
+ def test_nonregr_sql_cache(self):
+ # different SQL generated when 'name' is None or not (IS NULL).
+ self.assertFalse(self.execute('Any X WHERE X is CWEType, X name %(name)s', {'name': None}))
+ self.assertTrue(self.execute('Any X WHERE X is CWEType, X name %(name)s', {'name': 'CWEType'}))
+
if __name__ == '__main__':
unittest_main()
--- a/web/data/cubicweb.old.css Fri Dec 16 14:35:25 2011 +0100
+++ b/web/data/cubicweb.old.css Wed Dec 21 18:01:48 2011 +0100
@@ -813,7 +813,7 @@
table.listing th {
font-weight: bold;
- background: #ebe8d9 url("button.png") repeat-x;
+ background: %(listingHeaderBgColor)s;
}
table.listing td a,
--- a/web/htmlwidgets.py Fri Dec 16 14:35:25 2011 +0100
+++ b/web/htmlwidgets.py Wed Dec 21 18:01:48 2011 +0100
@@ -350,7 +350,7 @@
self.w(u'<tr class="%s" %s>' % (klass, self.highlight))
for column, sortvalue in self.itercols(rowindex):
attrs = dict(column.cell_attrs)
- attrs["cubicweb:sortvalue"] = 'json:' + sortvalue
+ attrs["cubicweb:sortvalue"] = sortvalue
attrs = ('%s="%s"' % (name, value) for name, value in attrs.iteritems())
self.w(u'<td %s>' % (' '.join(attrs)))
for cellvid, colindex in column.cellrenderers:
--- a/web/views/cwsources.py Fri Dec 16 14:35:25 2011 +0100
+++ b/web/views/cwsources.py Wed Dec 21 18:01:48 2011 +0100
@@ -23,7 +23,7 @@
_ = unicode
import logging
-from itertools import repeat, chain
+from itertools import repeat
from logilab.mtconverter import xml_escape
from logilab.common.decorators import cachedproperty
@@ -136,7 +136,7 @@
errors = zip(repeat(_('error')), checker.errors)
warnings = zip(repeat(_('warning')), checker.warnings)
infos = zip(repeat(_('warning')), checker.infos)
- self.wview('pyvaltable', pyvalue=chain(errors, warnings, infos))
+ self.wview('pyvaltable', pyvalue=errors + warnings + infos)
class MappingChecker(object):