hgext/obsolete.py
changeset 224 f60555898df4
parent 218 ace5608350b6
child 226 0892b91947ef
--- a/hgext/obsolete.py	Sun May 06 03:27:45 2012 +0200
+++ b/hgext/obsolete.py	Tue May 08 20:50:43 2012 +0200
@@ -83,6 +83,7 @@
 from mercurial.i18n import _
 
 import base64
+import json
 
 from mercurial import util
 from mercurial import context
@@ -163,9 +164,10 @@
     """obsolete parents of a subset"""
     cs = set()
     nm = repo.changelog.nodemap
+    markerbysubj = repo.obsoletestore.subjects
     for r in s:
-        for p in repo._obssubrels.get(repo[r].node(), ()):
-            pr = nm.get(p)
+        for p in markerbysubj.get(repo[r].node(), ()):
+            pr = nm.get(p['object'])
             if pr is not None:
                 cs.add(pr)
     return cs
@@ -180,9 +182,11 @@
     """obsolete ancestors of a subset"""
     toproceed = [repo[r].node() for r in s]
     seen = set()
+    allsubjects = repo.obsoletestore.subjects
     while toproceed:
         nc = toproceed.pop()
-        for np in repo._obssubrels.get(nc, ()):
+        for mark in allsubjects.get(nc, ()):
+            np = mark['object']
             if np not in seen:
                 seen.add(np)
                 toproceed.append(np)
@@ -253,30 +257,25 @@
 
 def pushobsolete(repo, key, old, raw):
     """push obsolete relation through pushkey"""
-    assert key == "relations"
-    w = repo.wlock()
+    assert key == "markers"
+    l = repo.lock()
     try:
         tmp = StringIO()
         tmp.write(raw)
         tmp.seek(0)
-        relations = _obsdeserialise(tmp)
-        for sub, objs in relations.iteritems():
-            for obj in objs:
-                try:
-                    repo.addobsolete(sub, obj)
-                except error.RepoLookupError:
-                    pass
-        return 0
+        repo.obsoletestore.load(tmp)
+        repo.obsoletestore._dirty = True # XXX meh
+        return 1
     finally:
-        w.release()
+        l.release()
 
 def listobsolete(repo):
     """dump all obsolete relation in
 
     XXX this have be improved"""
     tmp = StringIO()
-    _obsserialise(repo._obssubrels, tmp)
-    return {'relations': base64.b64encode(tmp.getvalue())}
+    repo.obsoletestore.save(tmp)
+    return {'markers': base64.b64encode(tmp.getvalue())}
 
 pushkey.register('obsolete', pushobsolete, listobsolete)
 
@@ -345,11 +344,12 @@
                     new = set()
                     while nodes:
                         n = nodes.pop()
-                        if n in repo._obsobjrels:
-                            newernodes = repo._obsobjrels[n]
-                            for newernode in newernodes:
-                                if newernode is not None:
-                                    nodes.append(newernode)
+                        if n in repo.obsoletestore.objects:
+                            markers = repo.obsoletestore.objects[n]
+                            for mark in markers:
+                                for newernode in mark['subjects']:
+                                    if newernode is not None:
+                                        nodes.append(newernode)
                         else:
                             new.add(n)
                     if new:
@@ -384,12 +384,53 @@
     """Add an obsolete relation between a too node
 
     The subject is expected to be a newer version of the object"""
-    sub = repo[subject]
-    obj = repo[object]
-    repo.addobsolete(sub.node(), obj.node())
+    lock = repo.lock()
+    try:
+        sub = repo[subject]
+        obj = repo[object]
+        repo.addobsolete(sub.node(), obj.node())
+    finally:
+        lock.release()
     return 0
 
-cmdtable = {'debugobsolete': (cmddebugobsolete, [], '<subject> <object>')}
+def cmddebugconvertobsolete(ui, repo):
+    cnt = 0
+    l = repo.lock()
+    try:
+        repo._importoldobsolete = True
+        store = repo.obsoletestore
+        try:
+            f = repo.opener('obsolete-relations')
+            try:
+                for line in f:
+                    subhex, objhex = line.split()
+                    sub = bin(subhex)
+                    obj = bin(objhex)
+                    newmarker = {
+                        'subjects': (sub==nullid) and [] or [sub],
+                        'object': obj,
+                        'date':  util.makedate(),
+                        'user': ui.username(),
+                        'reason': 'import from older format.',
+                        }
+                    store.new(newmarker)
+                    store._dirty = True
+                    cnt += 1
+            finally:
+                f.close()
+            util.unlink(repo.join('obsolete-relations'))
+        except IOError:
+            ui.warn('nothing to do\n')
+            pass
+    finally:
+        del repo._importoldobsolete
+        l.release()
+    ui.status('%i obsolete marker converted\n' % cnt)
+
+
+cmdtable = {'debugobsolete': (cmddebugobsolete, [], '<subject> <object>'),
+            'debugconvertobsolete': (cmddebugconvertobsolete, [], ''),
+           }
 
 ### Altering existing command
 #############################
@@ -400,11 +441,16 @@
         ui.warn(_('Working directory parent is obsolete\n'))
     return res
 
+def noextinctsvisibleheads(orig, repo):
+    repo._turn_extinct_secret()
+    return orig(repo)
+
 def uisetup(ui):
     extensions.wrapcommand(commands.table, "update", wrapmayobsoletewc)
     extensions.wrapcommand(commands.table, "pull", wrapmayobsoletewc)
     extensions.wrapfunction(discovery, 'findcommonoutgoing', wrapfindcommonoutgoing)
     extensions.wrapfunction(discovery, 'checkheads', wrapcheckheads)
+    extensions.wrapfunction(phases, 'visibleheads', noextinctsvisibleheads)
     if util.safehasattr(phases, 'visiblebranchmap'):
         extensions.wrapfunction(phases, 'visiblebranchmap', wrapvisiblebranchmap)
 
@@ -447,30 +493,128 @@
 
 def newerversion(repo, obs):
     """Return the newer version of an obsolete changeset"""
-    toproceed = set([obs])
-    # XXX know optimization available
+    toproceed = set([(obs,)])
+    # XXX known optimization available
     newer = set()
+    objectrels = repo.obsoletestore.objects
     while toproceed:
         current = toproceed.pop()
-        if current in repo._obsobjrels:
-            toproceed.update(repo._obsobjrels[current])
-        elif current is not None: # None is kill
-            newer.add((current,))
+        assert len(current) <= 1, 'splitting not handled yet. %r' % current
+        if current:
+            n, = current
+            if n in objectrels:
+                markers = objectrels[n]
+                for mark in markers:
+                    toproceed.add(tuple(mark['subjects']))
+            else:
+                newer.add(tuple(current))
         else:
             newer.add(())
     return sorted(newer)
 
+### obsolete relation storage
+#############################
+def add2set(d, key, mark):
+    """add <mark> to a `set` in <d>[<key>]"""
+    d.setdefault(key, []).append(mark)
+
+def markerid(marker):
+    KEYS = ['subjects', "object", "date", "user", "reason"]
+    for key in KEYS:
+        assert key in marker
+    keys = sorted(marker.keys())
+    a = util.sha1()
+    for key in keys:
+        if key == 'subjects':
+            for sub in sorted(marker[key]):
+                a.update(sub)
+        elif key == 'id':
+            pass
+        else:
+            a.update(str(marker[key]))
+    a.update('\0')
+    return a.digest()
+
+class obsoletestore(object):
+    """Store obsolete relations
+
+    Relation are stored in three mapping. All mapping have "obsolete markers"
+    as values::
+
+        {'id': "unique id of the obsolete marker"
+         'subjects': "0-N newer version of changeset in "object" (as ordered list)
+         'object': "old and obsolete version"
+         'date': "When was this marker created ?"
+         'user': "Who did that ?"
+         'reason': "Why was it done"
+        }
+
+    Three keys exists
+
+    :self._markers: "id" -> marker
+
+    :self.subjects: "subject" -> set(marker)
+
+    :self.objects: "object" -> set(marker)
+    """
+
+    def __init__(self):
+        self._markers = {}
+        self.subjects = {}
+        self.objects = {}
+        self._dirty = False # should be on repo
+
+    def new(self, marker):
+        """Add a *new* marker to the store. computing it's ID"""
+        mid = marker['id'] = markerid(marker)
+        self._insert(marker)
+        self._dirty = True
+        return mid
+
+    def _insert(self, marker):
+        if marker['id'] not in self._markers:
+            self._markers[marker['id']] = marker
+            add2set(self.objects, marker['object'], marker)
+            for subj in marker['subjects']:
+                add2set(self.subjects, subj, marker)
+
+    def save(self, stream):
+        markers = []
+        for mark in self._markers.itervalues():
+            jmark = mark.copy()
+            jmark['id'] = hex(jmark['id'])
+            jmark['subjects'] = [hex(n) for n in jmark['subjects']]
+            jmark['object'] = hex(jmark['object'])
+            markers.append(jmark)
+        json.dump(markers, stream)
+
+    def load(self, stream):
+        for mark in json.load(stream):
+            mark['id'] = bin(mark['id'])
+            mark['subjects'] = [bin(n) for n in mark['subjects']]
+            mark['object'] = bin(mark['object'])
+            self._insert(mark)
+
+def writeobsolete(repo):
+    """wire obsolete data on disk"""
+    f = repo.sopener('obsoletemarkers', 'w', atomictemp=True)
+    try:
+        repo.obsoletestore.save(f)
+        repo._dirty = False
+    finally:
+        f.close()
+
 
 ### repo subclassing
 #############################
 
 def reposetup(ui, repo):
-
     if not repo.local():
         return
 
     opull = repo.pull
     opush = repo.push
+    olock = repo.lock
     o_rollback = repo._rollback
     o_updatebranchcache = repo.updatebranchcache
 
@@ -485,18 +629,39 @@
         ### Public method
         def obsoletedby(self, node):
             """return the set of node that make <node> obsolete (obj)"""
-            return self._obsobjrels.get(node, set())
+            others = set()
+            for marker in self.obsoletestore.objects.get(node, []):
+                others.update(marker['subjects'])
+            return others
 
         def obsolete(self, node):
             """return the set of node that <node> make obsolete (sub)"""
-            return self._obssubrels.get(node, set())
+            return set(marker['object'] for marker in self.obsoletestore.subjects.get(node, []))
+
+        @util.propertycache
+        def obsoletestore(self):
+            if not getattr(self, '_importoldobsolete', False):
+                try:
+                    f = self.opener('obsolete-relations')
+                    f.close()
+                    raise util.Abort('old format of obsolete marker detected!\n'
+                                     'run `hg debugconvertobsolete` once.')
+                except IOError:
+                    pass
+            store = obsoletestore()
+            try:
+                f = self.sopener('obsoletemarkers')
+                store.load(f)
+            except IOError:
+                pass
+            return store
 
         @util.propertycache
         def _obsoleteset(self):
             """the set of obsolete revision"""
             obs = set()
             nm = self.changelog.nodemap
-            for obj in self._obsobjrels:
+            for obj in self.obsoletestore.objects:
                 try: # /!\api change in Hg 2.2 (e8d37b78acfb22ae2c1fb126c2)/!\
                     rev = nm.get(obj)
                 except TypeError:  #XXX to remove while breaking Hg 2.1 support
@@ -521,10 +686,6 @@
             return set(self.revs('obsolete() - obsolete()::unstable()'))
 
         def _clearobsoletecache(self):
-            if '_obsobjrels' in vars(self):
-                del self._obsobjrels
-            if '_obssubrels' in vars(self):
-                del self._obssubrels
             if '_obsoleteset' in vars(self):
                 del self._obsoleteset
             self._clearunstablecache()
@@ -543,55 +704,56 @@
 
         def addobsolete(self, sub, obj):
             """Add a relation marking that node <sub> is a new version of <obj>"""
-            if sub == nullid:
-                sub = None
-            if obj in self._obssubrels.get(sub, set()):
-                return 0
-            if sub == obj:
-                return 0
-            self._obssubrels.setdefault(sub, set()).add(obj)
-            self._obsobjrels.setdefault(obj, set()).add(sub)
+            assert sub != obj
+            if not repo[obj].phase():
+                if sub is None:
+                    self.ui.warn(
+                        _("trying to kill immutable changeset %(obj)s\n")
+                        % {'obj': short(obj)})
+                if sub is not None:
+                    self.ui.warn(
+                        _("%(sub)s try to obsolete immutable changeset %(obj)s\n")
+                        % {'sub': short(sub), 'obj': short(obj)})
+            lock = self.lock()
             try:
-                if self[obj].phase() == 0:
-                    if sub is None:
-                        self.ui.warn(
-                            _("trying to kill immutable changeset %(obj)s\n")
-                            % {'obj': short(obj)})
-                    if sub is not None:
-                        self.ui.warn(
-                            _("%(sub)s try to obsolete immutable changeset %(obj)s\n")
-                            % {'sub': short(sub), 'obj': short(obj)})
-                self.changelog.hiddenrevs.add(repo[obj].rev())
-            except (error.RepoLookupError, error.LookupError):
-                pass #unknow revision (but keep propagating the data
-            self._writeobsrels()
+                newmarker = {
+                    'subjects': (sub==nullid) and [] or [sub],
+                    'object': obj,
+                    'date':  util.makedate(),
+                    'user': ui.username(),
+                    'reason': 'unknown',
+                    }
+                mid = self.obsoletestore.new(newmarker)
+                self._clearobsoletecache()
+                self._turn_extinct_secret()
+                return mid
+            finally:
+                lock.release()
+
+        def _turn_extinct_secret(self):
+            """ensure all extinct changeset are secret"""
             self._clearobsoletecache()
-            return 1
-
-        ### obsolete storage
-        @util.propertycache
-        def _obsobjrels(self):
-            """{<old-node> -> set(<new-node>)}
-
-            also compute hidden revision"""
-            #reverse sub -> objs mapping
-            objrels = {}
-            for sub, objs in self._obssubrels.iteritems():
-                for obj in objs:
-                    objrels.setdefault(obj, set()).add(sub)
-            return objrels
-
-        @util.propertycache
-        def _obssubrels(self):
-            """{<new-node> -> set(<old-node>)}"""
-            return self._readobsrels()
-
-
-
+            # this is mainly for safety purpose
+            # both pull and push
+            expobs = [c.node() for c in repo.set('extinct() - secret()')]
+            phases.retractboundary(repo, 2, expobs)
 
         ### Disk IO
+
+        def lock(self, *args, **kwargs):
+            l = olock(*args, **kwargs)
+            if not getattr(l.releasefn, 'obspatched', False):
+                oreleasefn = l.releasefn
+                def releasefn(*args, **kwargs):
+                    if self.obsoletestore._dirty:
+                        writeobsolete(self)
+                    oreleasefn(*args, **kwargs)
+                releasefn.obspatched = True
+                l.releasefn = releasefn
+            return l
+
         def _readobsrels(self):
-            """Write obsolete relation on disk"""
+            """Read obsolete relation on disk"""
             # XXX handle lock
             try:
                 f = self.opener('obsolete-relations')
@@ -602,47 +764,36 @@
             except IOError:
                 return {}
 
-        def _writeobsrels(self):
-            """Write obsolete relation on disk"""
-            # XXX handle lock
-            lock = self.wlock()
-            try:
-                f = self.opener('obsolete-relations', 'w', atomictemp=True)
-                try:
-                    _obsserialise(self._obssubrels, f)
-                    try:
-                        f.rename()
-                    except AttributeError: # old version
-                        f.close()
-                finally:
-                    f.close()
-            finally:
-                lock.release()
-
 
         ### pull // push support
 
         def pull(self, remote, *args, **kwargs):
             """wrapper around push that push obsolete relation"""
-            result = opull(remote, *args, **kwargs)
-            if 'obsolete' in remote.listkeys('namespaces'):
-                tmp = StringIO()
-                rels = remote.listkeys('obsolete')['relations']
-                tmp.write(base64.b64decode(rels))
-                tmp.seek(0)
-                obsrels = _obsdeserialise(tmp)
-                for sub, objs in obsrels.iteritems():
-                    for obj in objs:
-                        self.addobsolete(sub, obj)
-            return result
+            l = repo.lock()
+            try:
+                result = opull(remote, *args, **kwargs)
+                if 'obsolete' in remote.listkeys('namespaces'):
+                    tmp = StringIO()
+                    rels = remote.listkeys('obsolete')['markers']
+                    tmp.write(base64.b64decode(rels))
+                    tmp.seek(0)
+                    repo.obsoletestore.load(tmp)
+                    repo.obsoletestore._dirty = True # XXX meh
+                    self._clearobsoletecache()
+                self._turn_extinct_secret()
+                return result
+            finally:
+                l.release()
 
         def push(self, remote, *args, **opts):
             """wrapper around pull that pull obsolete relation"""
+            self._turn_extinct_secret()
             result = opush(remote, *args, **opts)
             if 'obsolete' in remote.listkeys('namespaces'):
                 tmp = StringIO()
-                _obsserialise(self._obssubrels, tmp)
-                remote.pushkey('obsolete', 'relations', '', tmp.getvalue())
+                self.obsoletestore.save(tmp)
+                remote.pushkey('obsolete', 'markers', '', tmp.getvalue())
+            self._turn_extinct_secret()
 
             return result
 
@@ -652,27 +803,27 @@
         # /!\ api change in  Hg 2.2 (97efd26eb9576f39590812ea9) /!\
         if util.safehasattr(repo, '_journalfiles'): # Hg 2.2
             def _journalfiles(self):
-                return o_journalfiles() + (self.join('journal.obsolete-relations'),) 
+                return o_journalfiles() + (self.sjoin('journal.obsoletemarkers'),) 
 
             def _writejournal(self, desc):
                 """wrapped version of _writejournal that save obsolete data"""
                 o_writejournal(desc)
-                filename = 'obsolete-relations'
-                filepath = self.join(filename)
+                filename = 'obsoletemarkers'
+                filepath = self.sjoin(filename)
                 if os.path.exists(filepath):
                     journalname = 'journal.' + filename
-                    journalpath = self.join(journalname)
+                    journalpath = self.sjoin(journalname)
                     util.copyfile(filepath, journalpath)
 
         else: # XXX remove this bloc while breaking support to Hg 2.1
             def _writejournal(self, desc):
                 """wrapped version of _writejournal that save obsolete data"""
                 entries = list(o_writejournal(desc))
-                filename = 'obsolete-relations'
-                filepath = self.join(filename)
+                filename = 'obsoletemarkers'
+                filepath = self.sjoin(filename)
                 if  os.path.exists(filepath):
                     journalname = 'journal.' + filename
-                    journalpath = self.join(journalname)
+                    journalpath = self.sjoin(journalname)
                     util.copyfile(filepath, journalpath)
                     entries.append(journalpath)
                 return tuple(entries)
@@ -681,16 +832,15 @@
             """wrapped version of _rollback that restore obsolete data"""
             ret = o_rollback(dryrun, force)
             if not (ret or dryrun): #rollback did not failed
-                src = self.join('undo.obsolete-relations')
-                dst = self.join('obsolete-relations')
+                src = self.sjoin('undo.obsoletemarkers')
+                dst = self.sjoin('obsoletemarkers')
                 if os.path.exists(src):
                     util.rename(src, dst)
                 elif os.path.exists(dst):
                     # If no state was saved because the file did not existed before.
                     os.unlink(dst)
                 # invalidate cache
-                self.__dict__.pop('_obssubrels', None)
-                self.__dict__.pop('_obsobjrels', None)
+                self.__dict__.pop('obsoletestore', None)
             return ret
 
         @storecache('00changelog.i')
@@ -721,17 +871,16 @@
             c.__class__ = hchangelog
             return c
 
-
-
-
     repo.__class__ = obsoletingrepo
 
-    expobs = [c.node() for c in repo.set('extinct() - secret()')]
-    if expobs: # do not lock in nothing move. locking for peanut make hgview reload on any command
-        lock = repo.lock()
-        try:
-            phases.retractboundary(repo, 2, expobs)
-        finally:
-            lock.release()
+    if False:
+        expobs = [c.node() for c in repo.set('extinct() - secret()')]
+        if expobs: # do not lock in nothing move. locking for peanut make hgview reload on any command
+            lock = repo.lock()
+            try:
+                expobs = [c.node() for c in repo.set('extinct() - secret()')]
+                phases.retractboundary(repo, 2, expobs)
+            finally:
+                lock.release()