hgext/obsolete.py
author Pierre-Yves David <pierre-yves.david@logilab.fr>
Wed, 09 May 2012 12:43:45 +0200
changeset 226 0892b91947ef
parent 224 f60555898df4
child 228 5a17c0d41a00
permissions -rw-r--r--
obsolete: export pretty printed json Will help unavoidable hand fixing.

# obsolete.py - introduce the obsolete concept in mercurial.
#
# Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
#                Logilab SA        <contact@logilab.fr>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Introduce the Obsolete concept to mercurial

General concept
===============

This extension introduces the *obsolete* concept. It adds a new *obsolete*
relation between two changesets. A relation ``<changeset B> obsolete <changeset
A>`` is set to denote that ``<changeset B>`` is new version of ``<changeset
A>``.

The *obsolete* relation act as a **perpendicular history** to the standard
changeset history. Standard changeset history versions files. The *obsolete*
relation versions changesets.

:obsolete:     a changeset that have been replace by another one.
:unstable:     a non-obsolet changeset based on another one.
:suspended:    an obsolete changeset with unstable descendant.
:extinct:      an obsolete changeset without unstable descendant
               (subject to garbage collection)

Another name for unstable could be out of sync.



Usage and Feature
=================


Display and Exchange
....................

obsolete changesets are hidden. (except if they have non obsolete changeset)

obsolete changesets are currently not exchange. This will probably change later
but it was the simpler solution for now.

New commands
............

a ``debugobsolete`` command has been added.

It add an obsolete relation between too relation.

Context object
..............

Context gain a ``obsolete`` method that return True if a changeset is obsolete
False otherwise.

revset
......

Add an ``obsolete()`` entry.

repo extension
..............

To Do
-----

* refuse to obsolete published changesets

* handle split

* handle conflict

* handle unstable // out of sync

"""
import os
try:
    from cStringIO import StringIO
except ImportError:
    from StringIO import StringIO

from mercurial.i18n import _

import base64
import json

from mercurial import util
from mercurial import context
from mercurial import revset
from mercurial import scmutil
from mercurial import extensions
from mercurial import pushkey
from mercurial import discovery
from mercurial import error
from mercurial import commands
from mercurial import changelog
from mercurial import phases
from mercurial.node import hex, bin, short, nullid
from mercurial.lock import release

try:
    from mercurial.localrepo import storecache
    storecache('babar') # to trigger import
except (TypeError, ImportError):
    def storecache(*args):
        return scmutil.filecache(*args, instore=True)


### Patch changectx
#############################

def obsolete(ctx):
    """is the changeset obsolete by other"""
    if ctx.node()is None:
        return False
    return bool(ctx._repo.obsoletedby(ctx.node())) and ctx.phase()

context.changectx.obsolete = obsolete

def unstable(ctx):
    """is the changeset unstable (have obsolete ancestor)"""
    if ctx.node() is None:
        return False
    return ctx.rev() in ctx._repo._unstableset

context.changectx.unstable = unstable

def extinct(ctx):
    """is the changeset extinct by other"""
    if ctx.node() is None:
        return False
    return ctx.rev() in ctx._repo._extinctset

context.changectx.extinct = extinct


### revset
#############################


def revsetobsolete(repo, subset, x):
    """obsolete changesets"""
    args = revset.getargs(x, 0, 0, 'obsolete takes no argument')
    return [r for r in subset if r in repo._obsoleteset and repo._phaserev[r] > 0]

def revsetunstable(repo, subset, x):
    """non obsolete changesets descendant of obsolete one"""
    args = revset.getargs(x, 0, 0, 'unstable takes no arguments')
    return [r for r in subset if r in repo._unstableset]

def revsetsuspended(repo, subset, x):
    """obsolete changesets with non obsolete descendants"""
    args = revset.getargs(x, 0, 0, 'unstable takes no arguments')
    return [r for r in subset if r in repo._suspendedset]

def revsetextinct(repo, subset, x):
    """obsolete changesets without obsolete descendants"""
    args = revset.getargs(x, 0, 0, 'unstable takes no arguments')
    return [r for r in subset if r in repo._extinctset]


def _obsparents(repo, s):
    """obsolete parents of a subset"""
    cs = set()
    nm = repo.changelog.nodemap
    markerbysubj = repo.obsoletestore.subjects
    for r in s:
        for p in markerbysubj.get(repo[r].node(), ()):
            pr = nm.get(p['object'])
            if pr is not None:
                cs.add(pr)
    return cs

def revsetobsparents(repo, subset, x):
    """obsolete parents"""
    s = revset.getset(repo, range(len(repo)), x)
    cs = _obsparents(repo, s)
    return [r for r in subset if r in cs]

def _obsancestors(repo, s):
    """obsolete ancestors of a subset"""
    toproceed = [repo[r].node() for r in s]
    seen = set()
    allsubjects = repo.obsoletestore.subjects
    while toproceed:
        nc = toproceed.pop()
        for mark in allsubjects.get(nc, ()):
            np = mark['object']
            if np not in seen:
                seen.add(np)
                toproceed.append(np)
    nm = repo.changelog.nodemap
    cs = set()
    for p in seen:
        pr = nm.get(p)
        if pr is not None:
            cs.add(pr)
    return cs

def revsetobsancestors(repo, subset, x):
    """obsolete parents"""
    s = revset.getset(repo, range(len(repo)), x)
    cs = _obsancestors(repo, s)
    return [r for r in subset if r in cs]



### Other Extension compat
############################


def buildstate(orig, repo, dest, rebaseset, detach):
    """wrapper for rebase 's buildstate that exclude obsolete changeset"""
    rebaseset = repo.revs('%ld - extinct()', rebaseset)
    return orig(repo, dest, rebaseset, detach)


def concludenode(orig, repo, rev, *args, **kwargs):
    """wrapper for rebase 's concludenode that set obsolete relation"""
    newrev = orig(repo, rev, *args, **kwargs)
    oldnode = repo[rev].node()
    newnode = repo[newrev].node()
    repo.addobsolete(newnode, oldnode)
    return newrev

def cmdrebase(orig, repo, ui, *args, **kwargs):
    oldkeep = kwargs.pop('keep', False)
    if oldkeep:
        ui.warn('WARNING --keep option ignored by experimental obsolete extension')
    kwargs['keep'] = True
    return orig(repo, ui, *args, **kwargs)



def extsetup(ui):

    revset.symbols["obsolete"] = revsetobsolete
    revset.symbols["unstable"] = revsetunstable
    revset.symbols["suspended"] = revsetsuspended
    revset.symbols["extinct"] = revsetextinct
    revset.symbols["obsparents"] = revsetobsparents
    revset.symbols["obsancestors"] = revsetobsancestors


    try:
        rebase = extensions.find('rebase')
        if rebase:
            extensions.wrapfunction(rebase, 'buildstate', buildstate)
            extensions.wrapfunction(rebase, 'concludenode', concludenode)
            extensions.wrapcommand(rebase.cmdtable, "rebase", cmdrebase)
    except KeyError:
        pass # rebase not found

# Pushkey mechanism for mutable
#########################################

def pushobsolete(repo, key, old, raw):
    """push obsolete relation through pushkey"""
    assert key == "markers"
    l = repo.lock()
    try:
        tmp = StringIO()
        tmp.write(raw)
        tmp.seek(0)
        repo.obsoletestore.load(tmp)
        repo.obsoletestore._dirty = True # XXX meh
        return 1
    finally:
        l.release()

def listobsolete(repo):
    """dump all obsolete relation in

    XXX this have be improved"""
    tmp = StringIO()
    repo.obsoletestore.save(tmp)
    return {'markers': base64.b64encode(tmp.getvalue())}

pushkey.register('obsolete', pushobsolete, listobsolete)

### Discovery wrapping
#############################

class blist(list, object):
    """silly class to have non False but empty list"""

    def __nonzero__(self):
        return bool(len(self.orig))

def wrapfindcommonoutgoing(orig, repo, *args, **kwargs):
    """wrap mercurial.discovery.findcommonoutgoing to remove extinct changeset

    Such excluded changeset are removed from excluded  and will *not* appear
    are excluded secret changeset.
    """
    outgoing = orig(repo, *args, **kwargs)
    orig = outgoing.excluded
    outgoing.excluded = blist(n for n in orig if not repo[n].extinct())
    # when no revision is specified (push everything) a shortcut is taken when
    # nothign was exclude. taking this code path when extinct changeset have
    # been excluded leads to repository corruption.
    outgoing.excluded.orig = orig
    return outgoing

def wrapcheckheads(orig, repo, remote, outgoing, *args, **kwargs):
    """wrap mercurial.discovery.checkheads

    * prevent unstability to be pushed
    * patch remote to ignore obsolete heads on remote
    """
    # do not push instability
    for h in outgoing.missingheads:
        # checking heads only is enought because any thing base on obsolete
        # changeset is either obsolete or unstable.
        ctx =  repo[h]
        hint = _("use 'hg stabilize' to get a stable history (or --force to proceed)")
        if ctx.unstable():
            raise util.Abort(_("Trying to push unstable changeset: %s!") % ctx,
                             hint=hint)
        if ctx.obsolete():
            raise util.Abort(_("Trying to push obsolete changeset: %s!") % ctx,
                             hint=hint)
    ### patch remote branch map
    # do not read it this burn eyes
    try:
        if 'oldbranchmap' not in vars(remote):
            remote.oldbranchmap = remote.branchmap
            def branchmap():
                newbm = {}
                oldbm = None
                if (util.safehasattr(phases, 'visiblebranchmap')
                    and not util.safehasattr(remote, 'ignorevisiblebranchmap')
                   ):
                    remote.ignorevisiblebranchmap = False
                    remote.branchmap = remote.oldbranchmap
                    oldbm = phases.visiblebranchmap(remote)
                    remote.branchmap = remote.newbranchmap
                    remote.ignorevisiblebranchmap = True
                if oldbm is None:
                    oldbm = remote.oldbranchmap()
                for branch, nodes in oldbm.iteritems():
                    nodes = list(nodes)
                    new = set()
                    while nodes:
                        n = nodes.pop()
                        if n in repo.obsoletestore.objects:
                            markers = repo.obsoletestore.objects[n]
                            for mark in markers:
                                for newernode in mark['subjects']:
                                    if newernode is not None:
                                        nodes.append(newernode)
                        else:
                            new.add(n)
                    if new:
                        newbm[branch] = list(new)
                return newbm
            remote.ignorevisiblebranchmap = True
            remote.branchmap = branchmap
            remote.newbranchmap = branchmap
        return orig(repo, remote, outgoing, *args, **kwargs)
    finally:
        remote.__dict__.pop('branchmap', None) # restore class one
        remote.__dict__.pop('oldbranchmap', None)
        remote.__dict__.pop('newbranchmap', None)
        remote.__dict__.pop('ignorevisiblebranchmap', None)

# eye are still burning
def wrapvisiblebranchmap(orig, repo):
    ignore = getattr(repo, 'ignorevisiblebranchmap', None)
    if ignore is None:
        return orig(repo)
    elif ignore:
        return repo.branchmap()
    else:
        return None # break recursion


### New commands
#############################


def cmddebugobsolete(ui, repo, subject, object):
    """Add an obsolete relation between a too node

    The subject is expected to be a newer version of the object"""
    lock = repo.lock()
    try:
        sub = repo[subject]
        obj = repo[object]
        repo.addobsolete(sub.node(), obj.node())
    finally:
        lock.release()
    return 0

def cmddebugconvertobsolete(ui, repo):
    cnt = 0
    l = repo.lock()
    try:
        repo._importoldobsolete = True
        store = repo.obsoletestore
        try:
            f = repo.opener('obsolete-relations')
            try:
                for line in f:
                    subhex, objhex = line.split()
                    sub = bin(subhex)
                    obj = bin(objhex)
                    newmarker = {
                        'subjects': (sub==nullid) and [] or [sub],
                        'object': obj,
                        'date':  util.makedate(),
                        'user': ui.username(),
                        'reason': 'import from older format.',
                        }
                    store.new(newmarker)
                    store._dirty = True
                    cnt += 1
            finally:
                f.close()
            util.unlink(repo.join('obsolete-relations'))
        except IOError:
            ui.warn('nothing to do\n')
            pass
    finally:
        del repo._importoldobsolete
        l.release()
    ui.status('%i obsolete marker converted\n' % cnt)


cmdtable = {'debugobsolete': (cmddebugobsolete, [], '<subject> <object>'),
            'debugconvertobsolete': (cmddebugconvertobsolete, [], ''),
           }

### Altering existing command
#############################

def wrapmayobsoletewc(origfn, ui, repo, *args, **opts):
    res = origfn(ui, repo, *args, **opts)
    if repo['.'].obsolete():
        ui.warn(_('Working directory parent is obsolete\n'))
    return res

def noextinctsvisibleheads(orig, repo):
    repo._turn_extinct_secret()
    return orig(repo)

def uisetup(ui):
    extensions.wrapcommand(commands.table, "update", wrapmayobsoletewc)
    extensions.wrapcommand(commands.table, "pull", wrapmayobsoletewc)
    extensions.wrapfunction(discovery, 'findcommonoutgoing', wrapfindcommonoutgoing)
    extensions.wrapfunction(discovery, 'checkheads', wrapcheckheads)
    extensions.wrapfunction(phases, 'visibleheads', noextinctsvisibleheads)
    if util.safehasattr(phases, 'visiblebranchmap'):
        extensions.wrapfunction(phases, 'visiblebranchmap', wrapvisiblebranchmap)

### serialisation
#############################

def _obsserialise(obssubrels, flike):
    """serialise an obsolete relation mapping in a plain text one

    this is for subject -> [objects] mapping

    format is::

        <subject-full-hex> <object-full-hex>\n"""
    for sub, objs in obssubrels.iteritems():
        for obj in objs:
            if sub is None:
                sub = nullid
            flike.write('%s %s\n' % (hex(sub), hex(obj)))

def _obsdeserialise(flike):
    """read a file like object serialised with _obsserialise

    this desierialize into a {subject -> objects} mapping"""
    rels = {}
    for line in flike:
        subhex, objhex = line.split()
        subnode = bin(subhex)
        if subnode == nullid:
            subnode = None
        rels.setdefault( subnode, set()).add(bin(objhex))
    return rels

### diagnostique tools
#############################

def unstables(repo):
    """Return all unstable changeset"""
    return scmutil.revrange(repo, ['obsolete():: and (not obsolete())'])

def newerversion(repo, obs):
    """Return the newer version of an obsolete changeset"""
    toproceed = set([(obs,)])
    # XXX known optimization available
    newer = set()
    objectrels = repo.obsoletestore.objects
    while toproceed:
        current = toproceed.pop()
        assert len(current) <= 1, 'splitting not handled yet. %r' % current
        if current:
            n, = current
            if n in objectrels:
                markers = objectrels[n]
                for mark in markers:
                    toproceed.add(tuple(mark['subjects']))
            else:
                newer.add(tuple(current))
        else:
            newer.add(())
    return sorted(newer)

### obsolete relation storage
#############################
def add2set(d, key, mark):
    """add <mark> to a `set` in <d>[<key>]"""
    d.setdefault(key, []).append(mark)

def markerid(marker):
    KEYS = ['subjects', "object", "date", "user", "reason"]
    for key in KEYS:
        assert key in marker
    keys = sorted(marker.keys())
    a = util.sha1()
    for key in keys:
        if key == 'subjects':
            for sub in sorted(marker[key]):
                a.update(sub)
        elif key == 'id':
            pass
        else:
            a.update(str(marker[key]))
    a.update('\0')
    return a.digest()

class obsoletestore(object):
    """Store obsolete relations

    Relation are stored in three mapping. All mapping have "obsolete markers"
    as values::

        {'id': "unique id of the obsolete marker"
         'subjects': "0-N newer version of changeset in "object" (as ordered list)
         'object': "old and obsolete version"
         'date': "When was this marker created ?"
         'user': "Who did that ?"
         'reason': "Why was it done"
        }

    Three keys exists

    :self._markers: "id" -> marker

    :self.subjects: "subject" -> set(marker)

    :self.objects: "object" -> set(marker)
    """

    def __init__(self):
        self._markers = {}
        self.subjects = {}
        self.objects = {}
        self._dirty = False # should be on repo

    def new(self, marker):
        """Add a *new* marker to the store. computing it's ID"""
        mid = marker['id'] = markerid(marker)
        self._insert(marker)
        self._dirty = True
        return mid

    def _insert(self, marker):
        if marker['id'] not in self._markers:
            self._markers[marker['id']] = marker
            add2set(self.objects, marker['object'], marker)
            for subj in marker['subjects']:
                add2set(self.subjects, subj, marker)

    def save(self, stream):
        markers = []
        for mark in self._markers.itervalues():
            jmark = mark.copy()
            jmark['id'] = hex(jmark['id'])
            jmark['subjects'] = [hex(n) for n in jmark['subjects']]
            jmark['object'] = hex(jmark['object'])
            markers.append(jmark)
        json.dump(markers, stream, indent=4)

    def load(self, stream):
        for mark in json.load(stream):
            mark['id'] = bin(mark['id'])
            mark['subjects'] = [bin(n) for n in mark['subjects']]
            mark['object'] = bin(mark['object'])
            self._insert(mark)

def writeobsolete(repo):
    """wire obsolete data on disk"""
    f = repo.sopener('obsoletemarkers', 'w', atomictemp=True)
    try:
        repo.obsoletestore.save(f)
        repo._dirty = False
    finally:
        f.close()


### repo subclassing
#############################

def reposetup(ui, repo):
    if not repo.local():
        return

    opull = repo.pull
    opush = repo.push
    olock = repo.lock
    o_rollback = repo._rollback
    o_updatebranchcache = repo.updatebranchcache

    # /!\ api change in  Hg 2.2 (97efd26eb9576f39590812ea9) /!\
    if util.safehasattr(repo, '_journalfiles'): # Hg 2.2
        o_journalfiles = repo._journalfiles
    o_writejournal = repo._writejournal


    class obsoletingrepo(repo.__class__):

        ### Public method
        def obsoletedby(self, node):
            """return the set of node that make <node> obsolete (obj)"""
            others = set()
            for marker in self.obsoletestore.objects.get(node, []):
                others.update(marker['subjects'])
            return others

        def obsolete(self, node):
            """return the set of node that <node> make obsolete (sub)"""
            return set(marker['object'] for marker in self.obsoletestore.subjects.get(node, []))

        @util.propertycache
        def obsoletestore(self):
            if not getattr(self, '_importoldobsolete', False):
                try:
                    f = self.opener('obsolete-relations')
                    f.close()
                    raise util.Abort('old format of obsolete marker detected!\n'
                                     'run `hg debugconvertobsolete` once.')
                except IOError:
                    pass
            store = obsoletestore()
            try:
                f = self.sopener('obsoletemarkers')
                store.load(f)
            except IOError:
                pass
            return store

        @util.propertycache
        def _obsoleteset(self):
            """the set of obsolete revision"""
            obs = set()
            nm = self.changelog.nodemap
            for obj in self.obsoletestore.objects:
                try: # /!\api change in Hg 2.2 (e8d37b78acfb22ae2c1fb126c2)/!\
                    rev = nm.get(obj)
                except TypeError:  #XXX to remove while breaking Hg 2.1 support
                    rev = nm.get(obj, None)
                if rev is not None:
                    obs.add(rev)
            return obs

        @util.propertycache
        def _unstableset(self):
            """the set of non obsolete revision with obsolete parent"""
            return set(self.revs('(obsolete()::) - obsolete()'))

        @util.propertycache
        def _suspendedset(self):
            """the set of obsolete parent with non obsolete descendant"""
            return set(self.revs('obsolete() and obsolete()::unstable()'))

        @util.propertycache
        def _extinctset(self):
            """the set of obsolete parent without non obsolete descendant"""
            return set(self.revs('obsolete() - obsolete()::unstable()'))

        def _clearobsoletecache(self):
            if '_obsoleteset' in vars(self):
                del self._obsoleteset
            self._clearunstablecache()

        def updatebranchcache(self):
            o_updatebranchcache()
            self._clearunstablecache()

        def _clearunstablecache(self):
            if '_unstableset' in vars(self):
                del self._unstableset
            if '_suspendedset' in vars(self):
                del self._suspendedset
            if '_extinct' in vars(self):
                del self._extinctset

        def addobsolete(self, sub, obj):
            """Add a relation marking that node <sub> is a new version of <obj>"""
            assert sub != obj
            if not repo[obj].phase():
                if sub is None:
                    self.ui.warn(
                        _("trying to kill immutable changeset %(obj)s\n")
                        % {'obj': short(obj)})
                if sub is not None:
                    self.ui.warn(
                        _("%(sub)s try to obsolete immutable changeset %(obj)s\n")
                        % {'sub': short(sub), 'obj': short(obj)})
            lock = self.lock()
            try:
                newmarker = {
                    'subjects': (sub==nullid) and [] or [sub],
                    'object': obj,
                    'date':  util.makedate(),
                    'user': ui.username(),
                    'reason': 'unknown',
                    }
                mid = self.obsoletestore.new(newmarker)
                self._clearobsoletecache()
                self._turn_extinct_secret()
                return mid
            finally:
                lock.release()

        def _turn_extinct_secret(self):
            """ensure all extinct changeset are secret"""
            self._clearobsoletecache()
            # this is mainly for safety purpose
            # both pull and push
            expobs = [c.node() for c in repo.set('extinct() - secret()')]
            phases.retractboundary(repo, 2, expobs)

        ### Disk IO

        def lock(self, *args, **kwargs):
            l = olock(*args, **kwargs)
            if not getattr(l.releasefn, 'obspatched', False):
                oreleasefn = l.releasefn
                def releasefn(*args, **kwargs):
                    if self.obsoletestore._dirty:
                        writeobsolete(self)
                    oreleasefn(*args, **kwargs)
                releasefn.obspatched = True
                l.releasefn = releasefn
            return l

        def _readobsrels(self):
            """Read obsolete relation on disk"""
            # XXX handle lock
            try:
                f = self.opener('obsolete-relations')
                try:
                    return _obsdeserialise(f)
                finally:
                    f.close()
            except IOError:
                return {}


        ### pull // push support

        def pull(self, remote, *args, **kwargs):
            """wrapper around push that push obsolete relation"""
            l = repo.lock()
            try:
                result = opull(remote, *args, **kwargs)
                if 'obsolete' in remote.listkeys('namespaces'):
                    tmp = StringIO()
                    rels = remote.listkeys('obsolete')['markers']
                    tmp.write(base64.b64decode(rels))
                    tmp.seek(0)
                    repo.obsoletestore.load(tmp)
                    repo.obsoletestore._dirty = True # XXX meh
                    self._clearobsoletecache()
                self._turn_extinct_secret()
                return result
            finally:
                l.release()

        def push(self, remote, *args, **opts):
            """wrapper around pull that pull obsolete relation"""
            self._turn_extinct_secret()
            result = opush(remote, *args, **opts)
            if 'obsolete' in remote.listkeys('namespaces'):
                tmp = StringIO()
                self.obsoletestore.save(tmp)
                remote.pushkey('obsolete', 'markers', '', tmp.getvalue())
            self._turn_extinct_secret()

            return result


        ### rollback support

        # /!\ api change in  Hg 2.2 (97efd26eb9576f39590812ea9) /!\
        if util.safehasattr(repo, '_journalfiles'): # Hg 2.2
            def _journalfiles(self):
                return o_journalfiles() + (self.sjoin('journal.obsoletemarkers'),) 

            def _writejournal(self, desc):
                """wrapped version of _writejournal that save obsolete data"""
                o_writejournal(desc)
                filename = 'obsoletemarkers'
                filepath = self.sjoin(filename)
                if os.path.exists(filepath):
                    journalname = 'journal.' + filename
                    journalpath = self.sjoin(journalname)
                    util.copyfile(filepath, journalpath)

        else: # XXX remove this bloc while breaking support to Hg 2.1
            def _writejournal(self, desc):
                """wrapped version of _writejournal that save obsolete data"""
                entries = list(o_writejournal(desc))
                filename = 'obsoletemarkers'
                filepath = self.sjoin(filename)
                if  os.path.exists(filepath):
                    journalname = 'journal.' + filename
                    journalpath = self.sjoin(journalname)
                    util.copyfile(filepath, journalpath)
                    entries.append(journalpath)
                return tuple(entries)

        def _rollback(self, dryrun, force):
            """wrapped version of _rollback that restore obsolete data"""
            ret = o_rollback(dryrun, force)
            if not (ret or dryrun): #rollback did not failed
                src = self.sjoin('undo.obsoletemarkers')
                dst = self.sjoin('obsoletemarkers')
                if os.path.exists(src):
                    util.rename(src, dst)
                elif os.path.exists(dst):
                    # If no state was saved because the file did not existed before.
                    os.unlink(dst)
                # invalidate cache
                self.__dict__.pop('obsoletestore', None)
            return ret

        @storecache('00changelog.i')
        def changelog(self):
            # << copy pasted from mercurila source
            c = changelog.changelog(self.sopener)
            if 'HG_PENDING' in os.environ:
                p = os.environ['HG_PENDING']
                if p.startswith(self.root):
                    c.readpending('00changelog.i.a')
            # >> end of the copy paste
            old = c.__dict__.pop('hiddenrevs', ())
            if old:
                ui.warn("old wasn't empty ? %r" % old)
            def _sethidden(c, value):
                assert not value


            class hchangelog(c.__class__):
                @util.propertycache
                def hiddenrevs(c):
                    shown = ['not obsolete()', '.', 'bookmark()', 'tagged()',
                             'public()']
                    basicquery = 'obsolete() - (::(%s))' % (' or '.join(shown))
                    # !!! self is repo not changelog
                    result = set(scmutil.revrange(self, [basicquery]))
                    return result
            c.__class__ = hchangelog
            return c

    repo.__class__ = obsoletingrepo

    if False:
        expobs = [c.node() for c in repo.set('extinct() - secret()')]
        if expobs: # do not lock in nothing move. locking for peanut make hgview reload on any command
            lock = repo.lock()
            try:
                expobs = [c.node() for c in repo.set('extinct() - secret()')]
                phases.retractboundary(repo, 2, expobs)
            finally:
                lock.release()