# HG changeset patch # User Nicola Spanti # Date 1576747909 -3600 # Node ID dadbd4148a4429fd461fa7051c836915b6a197ca # Parent 5d15685728958d4792ad4272d1342a15221791fc [py] Make flake8 less angry with cubicweb/server/sources/ diff -r 5d1568572895 -r dadbd4148a44 cubicweb/server/sources/datafeed.py --- a/cubicweb/server/sources/datafeed.py Thu Dec 19 00:59:27 2019 +0100 +++ b/cubicweb/server/sources/datafeed.py Thu Dec 19 10:31:49 2019 +0100 @@ -42,7 +42,7 @@ options = ( ('synchronize', - {'type' : 'yn', + {'type': 'yn', 'default': True, 'help': ('Is the repository responsible to automatically import ' 'content from this source? ' @@ -52,14 +52,14 @@ 'group': 'datafeed-source', 'level': 2, }), ('synchronization-interval', - {'type' : 'time', + {'type': 'time', 'default': '5min', 'help': ('Interval in seconds between synchronization with the ' 'external source (default to 5 minutes, must be >= 1 min).'), 'group': 'datafeed-source', 'level': 2, }), ('max-lock-lifetime', - {'type' : 'time', + {'type': 'time', 'default': '1h', 'help': ('Maximum time allowed for a synchronization to be run. ' 'Exceeded that time, the synchronization will be considered ' @@ -68,7 +68,7 @@ 'group': 'datafeed-source', 'level': 2, }), ('delete-entities', - {'type' : 'yn', + {'type': 'yn', 'default': False, 'help': ('Should already imported entities not found anymore on the ' 'external source be deleted? Handling of this parameter ' @@ -89,7 +89,7 @@ }), ('use-cwuri-as-url', {'type': 'yn', - 'default': None, # explicitly unset + 'default': None, # explicitly unset 'help': ('Use cwuri (i.e. external URL) for link to the entity ' 'instead of its local URL.'), 'group': 'datafeed-source', 'level': 1, @@ -353,8 +353,8 @@ self.notify_checked(entity) mdate = attrs.get('modification_date') if not mdate or mdate > entity.modification_date: - attrs = dict( (k, v) for k, v in attrs.items() - if v != getattr(entity, k)) + attrs = dict((k, v) for k, v in attrs.items() + if v != getattr(entity, k)) if attrs: entity.cw_set(**attrs) self.notify_updated(entity) @@ -424,6 +424,6 @@ try: from logilab.common import urllib2ext _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler()) -except ImportError: # python-kerberos not available +except ImportError: # python-kerberos not available pass _OPENER.add_handler(HTTPCookieProcessor(CookieJar())) diff -r 5d1568572895 -r dadbd4148a44 cubicweb/server/sources/ldapfeed.py --- a/cubicweb/server/sources/ldapfeed.py Thu Dec 19 00:59:27 2019 +0100 +++ b/cubicweb/server/sources/ldapfeed.py Thu Dec 19 10:31:49 2019 +0100 @@ -65,48 +65,48 @@ options = ( ('auth-mode', - {'type' : 'choice', + {'type': 'choice', 'default': 'simple', 'choices': ('simple', 'digest_md5', 'gssapi'), 'help': 'authentication mode used to authenticate user to the ldap.', 'group': 'ldap-source', 'level': 3, }), ('auth-realm', - {'type' : 'string', + {'type': 'string', 'default': None, 'help': 'realm to use when using gssapi/kerberos authentication.', 'group': 'ldap-source', 'level': 3, }), ('data-cnx-dn', - {'type' : 'string', + {'type': 'string', 'default': '', 'help': 'user dn to use to open data connection to the ldap (eg used \ to respond to rql queries). Leave empty for anonymous bind', 'group': 'ldap-source', 'level': 1, }), ('data-cnx-password', - {'type' : 'string', + {'type': 'string', 'default': '', 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.', 'group': 'ldap-source', 'level': 1, }), ('user-base-dn', - {'type' : 'string', + {'type': 'string', 'default': '', 'help': 'base DN to lookup for users; disable user importation mechanism if unset', 'group': 'ldap-source', 'level': 1, }), ('user-scope', - {'type' : 'choice', + {'type': 'choice', 'default': 'ONELEVEL', 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', 'group': 'ldap-source', 'level': 1, }), ('user-classes', - {'type' : 'csv', + {'type': 'csv', 'default': ('top', 'posixAccount'), 'help': 'classes of user (with Active Directory, you want to say "user" here)', 'group': 'ldap-source', 'level': 1, @@ -118,39 +118,39 @@ 'group': 'ldap-source', 'level': 2, }), ('user-login-attr', - {'type' : 'string', + {'type': 'string', 'default': 'uid', 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)', 'group': 'ldap-source', 'level': 1, }), ('user-default-group', - {'type' : 'csv', + {'type': 'csv', 'default': ('users',), 'help': 'name of a group in which ldap users will be by default. \ You can set multiple groups by separating them by a comma.', 'group': 'ldap-source', 'level': 1, }), ('user-attrs-map', - {'type' : 'named', + {'type': 'named', 'default': {'uid': 'login'}, 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)', 'group': 'ldap-source', 'level': 1, }), ('group-base-dn', - {'type' : 'string', + {'type': 'string', 'default': '', 'help': 'base DN to lookup for groups; disable group importation mechanism if unset', 'group': 'ldap-source', 'level': 1, }), ('group-scope', - {'type' : 'choice', + {'type': 'choice', 'default': 'ONELEVEL', 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), 'help': 'group search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', 'group': 'ldap-source', 'level': 1, }), ('group-classes', - {'type' : 'csv', + {'type': 'csv', 'default': ('top', 'posixGroup'), 'help': 'classes of group', 'group': 'ldap-source', 'level': 1, @@ -162,7 +162,7 @@ 'group': 'ldap-source', 'level': 2, }), ('group-attrs-map', - {'type' : 'named', + {'type': 'named', 'default': {'cn': 'name', 'memberUid': 'member'}, 'help': 'map from ldap group attributes to cubicweb attributes', 'group': 'ldap-source', 'level': 1, @@ -273,7 +273,10 @@ self.info('connecting %s://%s:%s as %s', protocol, host, port, user and user['dn'] or 'anonymous') server = ldap3.Server(host, port=int(port)) - conn = ldap3.Connection(server, user=user and user['dn'], client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, auto_referrals=False) + conn = ldap3.Connection( + server, user=user and user['dn'], + client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, + auto_referrals=False) # Now bind with the credentials given. Let exceptions propagate out. if user is None: # XXX always use simple bind for data connection @@ -330,7 +333,7 @@ """Turn an ldap received item into a proper dict.""" itemdict = {'dn': dn} for key, value in iterator: - if self.user_attrs.get(key) == 'upassword': # XXx better password detection + if self.user_attrs.get(key) == 'upassword': # XXx better password detection value = value[0].encode('utf-8') # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py if not value.startswith(b'{SSHA}'): diff -r 5d1568572895 -r dadbd4148a44 cubicweb/server/sources/rql2sql.py --- a/cubicweb/server/sources/rql2sql.py Thu Dec 19 00:59:27 2019 +0100 +++ b/cubicweb/server/sources/rql2sql.py Thu Dec 19 10:31:49 2019 +0100 @@ -68,6 +68,8 @@ def default_update_cb_stack(self, stack): stack.append(self.source_execute) + + FunctionDescr.update_cb_stack = default_update_cb_stack FunctionDescr.source_execute = None @@ -75,6 +77,7 @@ def length_source_execute(source, session, value): return len(value.getvalue()) + LENGTH = get_func_descr('LENGTH') LENGTH.source_execute = length_source_execute diff -r 5d1568572895 -r dadbd4148a44 cubicweb/server/sources/storages.py --- a/cubicweb/server/sources/storages.py Thu Dec 19 00:59:27 2019 +0100 +++ b/cubicweb/server/sources/storages.py Thu Dec 19 10:31:49 2019 +0100 @@ -18,7 +18,6 @@ """custom storages for the system source""" import os -import sys from os import unlink, path as osp from contextlib import contextmanager import tempfile @@ -35,6 +34,7 @@ def set_attribute_storage(repo, etype, attr, storage): repo.system_source.set_storage(etype, attr, storage) + def unset_attribute_storage(repo, etype, attr): repo.system_source.unset_storage(etype, attr) @@ -71,12 +71,15 @@ def entity_added(self, entity, attr): """an entity using this storage for attr has been added""" raise NotImplementedError() + def entity_updated(self, entity, attr): """an entity using this storage for attr has been updatded""" raise NotImplementedError() + def entity_deleted(self, entity, attr): """an entity using this storage for attr has been deleted""" raise NotImplementedError() + def migrate_entity(self, entity, attribute): """migrate an entity attribute to the storage""" raise NotImplementedError() @@ -86,6 +89,7 @@ # * better file path attribution # * handle backup/restore + def uniquify_path(dirpath, basename): """return a file descriptor and unique file name for `basename` in `dirpath` """ @@ -93,6 +97,7 @@ base, ext = osp.splitext(path) return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath) + @contextmanager def fsimport(cnx): present = 'fs_importing' in cnx.transaction_data @@ -129,7 +134,6 @@ binary.to_file(fileobj) fileobj.close() - def callback(self, source, cnx, value): """sql generator callback when some attribute with a custom storage is accessed @@ -220,7 +224,7 @@ if name is not None: basename.append(name) fd, fspath = uniquify_path(self.default_directory, - '_'.join(basename)) + '_'.join(basename)) if fspath is None: msg = entity._cw._('failed to uniquify path (%s, %s)') % ( self.default_directory, '_'.join(basename)) @@ -235,9 +239,9 @@ sysource = entity._cw.repo.system_source cu = sysource.doexec(entity._cw, 'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % ( - attr, entity.cw_etype, entity.eid)) + attr, entity.cw_etype, entity.eid)) rawvalue = cu.fetchone()[0] - if rawvalue is None: # no previous value + if rawvalue is None: # no previous value return None fspath = sysource._process_value(rawvalue, cu.description[0], binarywrap=bytes) @@ -266,6 +270,7 @@ except Exception as ex: self.error("can't remove %s: %s" % (filepath, ex)) + class DeleteFileOp(hook.DataOperationMixIn, hook.Operation): def postcommit_event(self): for filepath in self.get_data():