backport stable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Fri, 12 Oct 2012 16:05:16 +0200
changeset 8573 ae0a567dff30
parent 8570 e20057a9ceea (current diff)
parent 8572 e54b3bc39011 (diff)
child 8576 c9c72ac10db3
backport stable
dbapi.py
server/repository.py
server/sources/datafeed.py
server/sources/native.py
server/test/unittest_repository.py
sobjects/ldapparser.py
--- a/.hgtags	Fri Oct 12 15:38:58 2012 +0200
+++ b/.hgtags	Fri Oct 12 16:05:16 2012 +0200
@@ -264,3 +264,5 @@
 9aa5553b26520ceb68539e7a32721b5cd5393e16 cubicweb-debian-version-3.15.2-1
 0e012eb80990ca6f91aa9a8ad3324fbcf51435b1 cubicweb-version-3.15.3
 7ad423a5b6a883dbdf00e6c87a5f8ab121041640 cubicweb-debian-version-3.15.3-1
+63260486de89a9dc32128cd0eacef891a668977b cubicweb-version-3.15.4
+70cb36c826df86de465f9b69647cef7096dcf12c cubicweb-debian-version-3.15.4-1
--- a/cwctl.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/cwctl.py	Fri Oct 12 16:05:16 2012 +0200
@@ -917,7 +917,7 @@
                     break
             cnx.load_appobjects()
             repo = cnx._repo
-            mih = ServerMigrationHelper(None, repo=repo, cnx=cnx,
+            mih = ServerMigrationHelper(None, repo=repo, cnx=cnx, verbosity=0,
                                          # hack so it don't try to load fs schema
                                         schema=1)
         else:
--- a/dbapi.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/dbapi.py	Fri Oct 12 16:05:16 2012 +0200
@@ -103,7 +103,7 @@
         return Repository(config, TasksManager(), vreg=vreg)
     elif method == 'zmq':
         from cubicweb.zmqclient import ZMQRepositoryClient
-        return ZMQRepositoryClient(config, vreg=vreg)
+        return ZMQRepositoryClient(database)
     else: # method == 'pyro'
         # resolve the Pyro object
         from logilab.common.pyro_ext import ns_get_proxy, get_proxy
@@ -592,7 +592,12 @@
             esubpath = list(subpath)
             esubpath.remove('views')
             esubpath.append(join('web', 'views'))
+        # first load available configs, necessary for proper persistent
+        # properties initialization
+        config.load_available_configs()
+        # then init cubes
         config.init_cubes(cubes)
+        # then load appobjects into the registry
         vpath = config.build_appobjects_path(reversed(config.cubes_path()),
                                              evobjpath=esubpath,
                                              tvobjpath=subpath)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/scripts/repair_splitbrain_ldapuser_source.py	Fri Oct 12 16:05:16 2012 +0200
@@ -0,0 +1,108 @@
+"""
+CAUTION: READ THIS CAREFULLY
+
+Sometimes it happens that ldap (specifically ldapuser type) source
+yield "ghost" users. The reasons may vary (server upgrade while some
+instances are still running & syncing with the ldap source, unmanaged
+updates to the upstream ldap, etc.).
+
+This script was written and refined enough times that we are confident
+in that it does something reasonnable (at least it did for the
+target application).
+
+However you should really REALLY understand what it does before
+deciding to apply it for you. And then ADAPT it tou your needs.
+
+"""
+
+import base64
+from collections import defaultdict
+
+from cubicweb.server.session import hooks_control
+
+try:
+    source_name, = __args__
+    source = repo.sources_by_uri[source_name]
+except ValueError:
+    print('you should specify the source name as script argument (i.e. after --'
+          ' on the command line)')
+    sys.exit(1)
+except KeyError:
+    print '%s is not an active source' % source_name
+    sys.exit(1)
+
+# check source is reachable before doing anything
+if not source.get_connection().cnx:
+    print '%s is not reachable. Fix this before running this script' % source_name
+    sys.exit(1)
+
+def find_dupes():
+    # XXX this retrieves entities from a source name "ldap"
+    #     you will want to adjust
+    rset = sql("SELECT eid, extid FROM entities WHERE source='%s'" % source_name)
+    extid2eids = defaultdict(list)
+    for eid, extid in rset:
+        extid2eids[extid].append(eid)
+    return dict((base64.b64decode(extid).lower(), eids)
+                for extid, eids in extid2eids.items()
+                if len(eids) > 1)
+
+def merge_dupes(dupes, docommit=False):
+    gone_eids = []
+    CWUser = schema['CWUser']
+    for extid, eids in dupes.items():
+        newest = eids.pop() # we merge everything on the newest
+        print 'merging ghosts of', extid, 'into', newest
+        # now we merge pairwise into the newest
+        for old in eids:
+            subst = {'old': old, 'new': newest}
+            print '  merging', old
+            gone_eids.append(old)
+            for rschema in CWUser.subject_relations():
+                if rschema.final or rschema == 'identity':
+                    continue
+                if CWUser.rdef(rschema, 'subject').composite == 'subject':
+                    # old 'composite' property is wiped ...
+                    # think about email addresses, excel preferences
+                    for eschema in rschema.objects():
+                        rql('DELETE %s X WHERE U %s X, U eid %%(old)s' % (eschema, rschema), subst)
+                else:
+                    # relink the new user to its old relations
+                    rql('SET NU %s X WHERE NU eid %%(new)s, NOT NU %s X, OU %s X, OU eid %%(old)s' %
+                        (rschema, rschema, rschema), subst)
+                    # delete the old relations
+                    rql('DELETE U %s X WHERE U eid %%(old)s' % rschema, subst)
+            # same thing ...
+            for rschema in CWUser.object_relations():
+                if rschema.final or rschema == 'identity':
+                    continue
+                rql('SET X %s NU WHERE NU eid %%(new)s, NOT X %s NU, X %s OU, OU eid %%(old)s' %
+                    (rschema, rschema, rschema), subst)
+                rql('DELETE X %s U WHERE U eid %%(old)s' % rschema, subst)
+    if not docommit:
+        rollback()
+        return
+    commit() # XXX flushing operations is wanted rather than really committing
+    print 'clean up entities table'
+    sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids)))
+    commit()
+
+def main():
+    dupes = find_dupes()
+    if not dupes:
+        print 'No duplicate user'
+        return
+
+    print 'Found %s duplicate user instances' % len(dupes)
+
+    while True:
+        print 'Fix or dry-run? (f/d)  ... or Ctrl-C to break out'
+        answer = raw_input('> ')
+        if answer.lower() not in 'fd':
+            continue
+        print 'Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.'
+        raw_input('<I swear all running instances and workers of the application are stopped>')
+        with hooks_control(session, session.HOOKS_DENY_ALL):
+            merge_dupes(dupes, docommit=answer=='f')
+
+main()
--- a/server/repository.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/repository.py	Fri Oct 12 16:05:16 2012 +0200
@@ -119,6 +119,26 @@
                             {'x': eidfrom, 'y': eidto})
 
 
+def preprocess_inlined_relations(session, entity):
+    """when an entity is added, check if it has some inlined relation which
+    requires to be extrated for proper call hooks
+    """
+    relations = []
+    activeintegrity = session.is_hook_category_activated('activeintegrity')
+    eschema = entity.e_schema
+    for attr in entity.cw_edited.iterkeys():
+        rschema = eschema.subjrels[attr]
+        if not rschema.final: # inlined relation
+            value = entity.cw_edited[attr]
+            relations.append((attr, value))
+            session.update_rel_cache_add(entity.eid, attr, value)
+            rdef = session.rtype_eids_rdef(attr, entity.eid, value)
+            if rdef.cardinality[1] in '1?' and activeintegrity:
+                with session.security_enabled(read=False):
+                    session.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr,
+                                    {'x': entity.eid, 'y': value})
+    return relations
+
 
 class NullEventBus(object):
     def publish(self, msg):
@@ -1333,7 +1353,6 @@
         entity._cw_is_saved = False # entity has an eid but is not yet saved
         # init edited_attributes before calling before_add_entity hooks
         entity.cw_edited = edited
-        eschema = entity.e_schema
         source = self.locate_etype_source(entity.__regid__)
         # allocate an eid to the entity before calling hooks
         entity.eid = self.system_source.create_eid(session)
@@ -1344,19 +1363,7 @@
         prefill_entity_caches(entity)
         if source.should_call_hooks:
             self.hm.call_hooks('before_add_entity', session, entity=entity)
-        relations = []
-        activeintegrity = session.is_hook_category_activated('activeintegrity')
-        for attr in edited.iterkeys():
-            rschema = eschema.subjrels[attr]
-            if not rschema.final: # inlined relation
-                value = edited[attr]
-                relations.append((attr, value))
-                session.update_rel_cache_add(entity.eid, attr, value)
-                rdef = session.rtype_eids_rdef(attr, entity.eid, value)
-                if rdef.cardinality[1] in '1?' and activeintegrity:
-                    with session.security_enabled(read=False):
-                        session.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr,
-                                        {'x': entity.eid, 'y': value})
+        relations = preprocess_inlined_relations(session, entity)
         edited.set_defaults()
         if session.is_hook_category_activated('integrity'):
             edited.check(creation=True)
@@ -1519,7 +1526,7 @@
         activintegrity = session.is_hook_category_activated('activeintegrity')
         for rtype, eids_subj_obj in relations.iteritems():
             if server.DEBUG & server.DBG_REPO:
-                for subjeid, objeid in relations:
+                for subjeid, objeid in eids_subj_obj:
                     print 'ADD relation', subjeid, rtype, objeid
             for subjeid, objeid in eids_subj_obj:
                 source = self.locate_relation_source(session, subjeid, rtype, objeid)
--- a/server/sources/datafeed.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/sources/datafeed.py	Fri Oct 12 16:05:16 2012 +0200
@@ -30,6 +30,7 @@
 from lxml import etree
 
 from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid
+from cubicweb.server.repository import preprocess_inlined_relations
 from cubicweb.server.sources import AbstractSource
 from cubicweb.appobject import AppObject
 
@@ -254,11 +255,20 @@
         """called by the repository after an entity stored here has been
         inserted in the system table.
         """
+        relations = preprocess_inlined_relations(session, entity)
         if session.is_hook_category_activated('integrity'):
             entity.cw_edited.check(creation=True)
         self.repo.system_source.add_entity(session, entity)
         entity.cw_edited.saved = entity._cw_is_saved = True
         sourceparams['parser'].after_entity_copy(entity, sourceparams)
+        # call hooks for inlined relations
+        call_hooks = self.repo.hm.call_hooks
+        if self.should_call_hooks:
+            for attr, value in relations:
+                call_hooks('before_add_relation', session,
+                           eidfrom=entity.eid, rtype=attr, eidto=value)
+                call_hooks('after_add_relation', session,
+                           eidfrom=entity.eid, rtype=attr, eidto=value)
 
     def source_cwuris(self, session):
         sql = ('SELECT extid, eid, type FROM entities, cw_source_relation '
@@ -399,6 +409,7 @@
                 entity.cw_set(**attrs)
                 self.notify_updated(entity)
 
+
 class DataFeedXMLParser(DataFeedParser):
 
     def process(self, url, raise_on_error=False):
--- a/server/sources/native.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/sources/native.py	Fri Oct 12 16:05:16 2012 +0200
@@ -61,7 +61,7 @@
 from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.cwconfig import CubicWebNoAppConfiguration
 from cubicweb.server import hook
-from cubicweb.server.utils import crypt_password, eschema_eid
+from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update
 from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn
 from cubicweb.server.rqlannotation import set_qdata
 from cubicweb.server.hook import CleanupDeletedEidsCacheOp
@@ -1629,7 +1629,22 @@
         # get eid from login and (crypted) password
         rset = self.source.syntax_tree_search(session, self._auth_rqlst, args)
         try:
-            return rset[0][0]
+            user = rset[0][0]
+            # If the stored hash uses a deprecated scheme (e.g. DES or MD5 used
+            # before 3.14.7), update with a fresh one
+            if pwd.getvalue():
+                verify, newhash = verify_and_update(password, pwd.getvalue())
+                if not verify: # should not happen, but...
+                    raise AuthenticationError('bad password')
+                if newhash:
+                    session.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % (
+                                        SQL_PREFIX + 'CWUser',
+                                        SQL_PREFIX + 'upassword',
+                                        SQL_PREFIX + 'login'),
+                                       {'newhash': self.source._binary(newhash),
+                                        'login': login})
+                    session.commit(free_cnxset=False)
+            return user
         except IndexError:
             raise AuthenticationError('bad password')
 
--- a/server/sources/pyrorql.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/sources/pyrorql.py	Fri Oct 12 16:05:16 2012 +0200
@@ -91,5 +91,9 @@
         except AttributeError:
             # inmemory connection
             pass
-        return super(PyroRQLSource, self).check_connection(cnx)
+        try:
+            return super(PyroRQLSource, self).check_connection(cnx)
+        except ConnectionClosedError:
+            # try to reconnect
+            return self.get_connection()
 
--- a/server/sources/remoterql.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/sources/remoterql.py	Fri Oct 12 16:05:16 2012 +0200
@@ -302,7 +302,7 @@
             try:
                 cnx.check()
                 return # ok
-            except (BadConnectionId, ConnectionClosedError):
+            except BadConnectionId:
                 pass
         # try to reconnect
         return self.get_connection()
--- a/server/test/unittest_repository.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/test/unittest_repository.py	Fri Oct 12 16:05:16 2012 +0200
@@ -416,8 +416,7 @@
     def _zmq_client(self, done):
         cnxprops = ConnectionProperties('zmq')
         try:
-            cnx = connect(self.repo.config.appid, u'admin', password=u'gingkow',
-                          host='tcp://127.0.0.1:41415',
+            cnx = connect('tcp://127.0.0.1:41415', u'admin', password=u'gingkow',
                           cnxprops=cnxprops,
                           initlog=False) # don't reset logging configuration
             try:
--- a/server/test/unittest_security.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/test/unittest_security.py	Fri Oct 12 16:05:16 2012 +0200
@@ -25,9 +25,10 @@
 from rql import RQLException
 
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb import Unauthorized, ValidationError, QueryError
+from cubicweb import Unauthorized, ValidationError, QueryError, Binary
 from cubicweb.schema import ERQLExpression
 from cubicweb.server.querier import check_read_access
+from cubicweb.server.utils import _CRYPTO_CTX
 
 
 class BaseSecurityTC(CubicWebTC):
@@ -35,7 +36,8 @@
     def setup_database(self):
         super(BaseSecurityTC, self).setup_database()
         self.create_user(self.request(), 'iaminusersgrouponly')
-
+        hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt')
+        self.create_user(self.request(), 'oldpassword', password=Binary(hash))
 
 class LowLevelSecurityFunctionTC(BaseSecurityTC):
 
@@ -60,6 +62,18 @@
             self.assertRaises(Unauthorized,
                               cu.execute, 'Any X,P WHERE X is CWUser, X upassword P')
 
+    def test_update_password(self):
+        """Ensure that if a user's password is stored with a deprecated hash, it will be updated on next login"""
+        oldhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0])
+        with self.login('oldpassword') as cu:
+            pass
+        newhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0])
+        self.assertNotEqual(oldhash, newhash)
+        self.assertTrue(newhash.startswith('$6$'))
+        with self.login('oldpassword') as cu:
+            pass
+        self.assertEqual(newhash, str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0]))
+
 
 class SecurityRewritingTC(BaseSecurityTC):
     def hijack_source_execute(self):
--- a/server/utils.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/server/utils.py	Fri Oct 12 16:05:16 2012 +0200
@@ -52,7 +52,9 @@
         return md5crypt(secret, self.salt.encode('ascii')).decode('utf-8')
     _calc_checksum = calc_checksum
 
-_CRYPTO_CTX = CryptContext(['sha512_crypt', CustomMD5Crypt, 'des_crypt', 'ldap_salted_sha1'])
+_CRYPTO_CTX = CryptContext(['sha512_crypt', CustomMD5Crypt, 'des_crypt', 'ldap_salted_sha1'],
+                           deprecated=['cubicwebmd5crypt', 'des_crypt'])
+verify_and_update = _CRYPTO_CTX.verify_and_update
 
 def crypt_password(passwd, salt=None):
     """return the encrypted password using the given salt or a generated one
@@ -62,8 +64,11 @@
     # empty hash, accept any password for backwards compat
     if salt == '':
         return salt
-    if _CRYPTO_CTX.verify(passwd, salt):
-        return salt
+    try:
+        if _CRYPTO_CTX.verify(passwd, salt):
+            return salt
+    except ValueError: # e.g. couldn't identify hash
+        pass
     # wrong password
     return ''
 
--- a/skeleton/__pkginfo__.py.tmpl	Fri Oct 12 15:38:58 2012 +0200
+++ b/skeleton/__pkginfo__.py.tmpl	Fri Oct 12 16:05:16 2012 +0200
@@ -16,6 +16,12 @@
 __depends__ =  %(dependencies)s
 __recommends__ = {}
 
+classifiers = [
+    'Environment :: Web Environment',
+    'Framework :: CubicWeb',
+    'Programming Language :: Python',
+    'Programming Language :: JavaScript',
+    ]
 
 from os import listdir as _listdir
 from os.path import join, isdir
--- a/skeleton/setup.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/skeleton/setup.py	Fri Oct 12 16:05:16 2012 +0200
@@ -41,7 +41,7 @@
 
 # import required features
 from __pkginfo__ import modname, version, license, description, web, \
-     author, author_email
+     author, author_email, classifiers
 
 if exists('README'):
     long_description = file('README').read()
@@ -193,6 +193,7 @@
                  data_files = data_files,
                  ext_modules = ext_modules,
                  cmdclass = cmdclass,
+                 classifiers = classifiers,
                  **kwargs
                  )
 
--- a/sobjects/ldapparser.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/sobjects/ldapparser.py	Fri Oct 12 16:05:16 2012 +0200
@@ -25,7 +25,7 @@
 from logilab.common.decorators import cached
 from logilab.common.shellutils import generate_password
 
-from cubicweb import Binary
+from cubicweb import Binary, ConfigurationError
 from cubicweb.server.utils import crypt_password
 from cubicweb.server.sources import datafeed
 
@@ -92,7 +92,12 @@
             tdict = {}
         for sattr, tattr in self.source.user_attrs.iteritems():
             if tattr not in self.non_attribute_keys:
-                tdict[tattr] = sdict[sattr]
+                try:
+                    tdict[tattr] = sdict[sattr]
+                except KeyError:
+                    raise ConfigurationError('source attribute %s is not present '
+                                             'in the source, please check the '
+                                             'user-attrs-map field' % sattr)
         return tdict
 
     def before_entity_copy(self, entity, sourceparams):
--- a/web/views/boxes.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/web/views/boxes.py	Fri Oct 12 16:05:16 2012 +0200
@@ -48,7 +48,7 @@
 BoxTemplate = box.BoxTemplate
 BoxHtml = htmlwidgets.BoxHtml
 
-class EditBox(component.CtxComponent): # XXX rename to ActionsBox
+class EditBox(component.CtxComponent):
     """
     box with all actions impacting the entity displayed: edit, copy, delete
     change state, add related entities...
@@ -58,6 +58,7 @@
     title = _('actions')
     order = 2
     contextual = True
+    __select__ = component.CtxComponent.__select__ & non_final_entity()
 
     def init_rendering(self):
         super(EditBox, self).init_rendering()
--- a/zmqclient.py	Fri Oct 12 15:38:58 2012 +0200
+++ b/zmqclient.py	Fri Oct 12 16:05:16 2012 +0200
@@ -43,12 +43,9 @@
     ZMQ is used as the transport layer and cPickle is used to serialize data.
     """
 
-    def __init__(self, config, vreg=None):
-        self.config = config
-        self.vreg = vreg
+    def __init__(self, zmq_address):
         self.socket = ctx.socket(zmq.REQ)
-        self.host = config.get('base-url')
-        self.socket.connect(self.host)
+        self.socket.connect(zmq_address)
 
     def __zmqcall__(self, name, *args, **kwargs):
          self.socket.send_pyobj([name, args, kwargs])