[obsolete] published changeset can't be obsoleted
states extension is now required.
A warning message is emitted when obsoleting a published changeset.
# obsolete.py - introduce the obsolete concept in mercurial.
#
# Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
# Logilab SA <contact@logilab.fr>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Introduce the Obsolete concept to mercurial
General concept
===============
This extension introduces the *obsolete* concept. It adds a new *obsolete*
relation between two changesets. A relation ``<changeset B> obsolete <changeset
A>`` is set to denote that ``<changeset B>`` is new version of ``<changeset
A>``.
The *obsolete* relation act as a **perpendicular history** to the standard
changeset history. Standard changeset history versions files. The *obsolete*
relation versions changesets.
:obsolete: a changeset that have been replace by another one.
:out of sync: a non-obsolet changeset based on another one.
:suspended: an obsolete changeset with out of sync descendant.
Usage and Feature
=================
Display and Exchange
....................
obsolete changesets are hidden. (except if they have non obsolete changeset)
obsolete changesets are currently not exchange. This will probably change later
but it was the simpler solution for now.
New commands
............
a ``debugobsolete`` command has been added.
It add an obsolete relation between too relation.
Context object
..............
Context gain a ``obsolete`` method that return True if a changeset is obsolete
False otherwise.
revset
......
Add an ``obsolete()`` entry.
repo extension
..............
To Do
-----
* refuse to obsolete published changesets
* handle split
* handle conflict
* handle out of sync
"""
import os
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from mercurial.i18n import _
from mercurial import util
from mercurial import context
from mercurial import revset
from mercurial import scmutil
from mercurial import extensions
from mercurial import pushkey
from mercurial import discovery
from mercurial import error
from mercurial.node import hex, bin, short
from mercurial.lock import release
### Patch changectx
#############################
def obsolete(ctx):
"""is the changeset obsolete by other"""
if ctx.node()is None:
return False
return bool(ctx._repo.obsoletedby(ctx.node())) and ctx.state().mutable
context.changectx.obsolete = obsolete
ohidden = context.changectx.hidden
def hidden(ctx):
"""hacked version fo hidden that also initialize hiddenrevs set
XXX we need something cleaner"""
# hack to fill hiddenrevs
# compute hidden (XXX should move elsewhere)
if not getattr(ctx._repo.changelog, 'hiddeninit', False):
shown = ['not obsolete()', '.', 'bookmark()', 'tagged()',
'publishedheads()']
basicquery = 'obsolete() - (::(%s))' % (' or '.join(shown))
for rev in scmutil.revrange(ctx._repo, [basicquery]):
ctx._repo.changelog.hiddenrevs.add(rev)
ctx._repo.changelog.hiddeninit = True
return ohidden(ctx)
context.changectx.hidden = hidden
### revset
#############################
def revsetobsolete(repo, subset, x):
"""filter obsolet entry"""
args = revset.getargs(x, 0, 0, 'publicheads takes no arguments')
# XXX slow stop using context
return [r for r in subset if repo[r].obsolete()]
### Other Extension compat
############################
def concludenode(orig, repo, rev, *args, **kwargs):
"""wrapper for rebase 's concludenode that set obsolete relation"""
newrev = orig(repo, rev, *args, **kwargs)
oldnode = repo[rev].node()
newnode = repo[newrev].node()
repo.addobsolete(newnode, oldnode)
return newrev
### Filter obsolet changeset
############################
def filterobsoleteout(orig, repo, remote, *args,**kwargs):
"""wrapper for findcommonoutgoing that remove obsolete changeset
This behaviour is temporary"""
common, heads = orig(repo, remote, *args, **kwargs)
# filter obsolete
heads = set(map(repo.changelog.rev, heads))
obsoletes = set()
for obj in repo._obsobjrels:
try:
obsoletes.add(repo.changelog.rev(obj))
except error.LookupError:
pass # we don't have this node locally
outgoing = set(repo.changelog.ancestors(*heads))
outgoing.update(heads)
selected = outgoing - obsoletes
heads = sorted(map(repo.changelog.node, selected))
return common, heads
def extsetup(ui):
try:
rebase = extensions.find('states')
except KeyError:
raise error.Abort(_('obsolete extension require states extension.'))
revset.symbols["obsolete"] = revsetobsolete
extensions.wrapfunction(discovery, 'findcommonoutgoing', filterobsoleteout)
try:
rebase = extensions.find('rebase')
if rebase:
extensions.wrapfunction(rebase, 'concludenode', concludenode)
except KeyError:
pass # rebase not found
# Pushkey mechanism for mutable
#########################################
def pushobsolete(repo, key, old, raw):
"""push obsolete relation through pushkey"""
assert key == "relations"
w = repo.wlock()
try:
tmp = StringIO()
tmp.write(raw)
tmp.seek(0)
relations = _obsdeserialise(tmp)
for sub, objs in relations.iteritems():
for obj in objs:
try:
repo.addobsolete(sub, obj)
except error.RepoLookupError:
pass
return 0
finally:
w.release()
def listobsolete(repo):
"""dump all obsolete relation in
XXX this have be improved"""
tmp = StringIO()
_obsserialise(repo._obssubrels, tmp)
return {'relations': tmp.getvalue()}
pushkey.register('obsolete', pushobsolete, listobsolete)
### New commands
#############################
def cmddebugobsolete(ui, repo, subject, object):
"""Add an obsolete relation between a too node
The subject is expected to be a newer version of the object"""
sub = repo[subject]
obj = repo[object]
repo.addobsolete(sub.node(), obj.node())
return 0
cmdtable = {'debugobsolete': (cmddebugobsolete, [], '<subject> <object>')}
### serialisation
#############################
def _obsserialise(obssubrels, flike):
"""serialise an obsolete relation mapping in a plain text one
this is for subject -> [objects] mapping
format is::
<subject-full-hex> <object-full-hex>\n"""
for sub, objs in obssubrels.iteritems():
for obj in objs:
flike.write('%s %s\n' % (hex(sub), hex(obj)))
def _obsdeserialise(flike):
"""read a file like object serialised with _obsserialise
this desierialize into a {subject -> objects} mapping"""
rels = {}
for line in flike:
subhex, objhex = line.split()
rels.setdefault(bin(subhex), set()).add(bin(objhex))
return rels
def reposetup(ui, repo):
if not repo.local():
return
opull = repo.pull
opush = repo.push
orollback = repo.rollback
o_writejournal = repo._writejournal
ocancopy = repo.cancopy
class obsoletingrepo(repo.__class__):
### Public method
def obsoletedby(self, node):
"""return the set of node that make <node> obsolete (obj)"""
return self._obsobjrels.get(node, set())
def obsolete(self, node):
"""return the set of node that <node> make obsolete (sub)"""
return self._obssubrels.get(node, set())
def addobsolete(self, sub, obj):
"""Add a relation marking that node <sub> is a new version of <obj>"""
self._obssubrels.setdefault(sub, set()).add(obj)
self._obsobjrels.setdefault(obj, set()).add(sub)
try:
if not self.nodestate(obj).mutable:
self.ui.warn(
_("%(sub)s try to obsolete immutable changeset %(obj)s\n")
% {'sub': short(sub), 'obj': short(obj)})
self.changelog.hiddenrevs.add(repo[obj].rev())
except (error.RepoLookupError, error.LookupError):
pass #unknow revision (but keep propagating the data
self._writeobsrels()
### obsolete storage
@util.propertycache
def _obsobjrels(self):
"""{<old-node> -> set(<new-node>)}
also compute hidden revision"""
#reverse sub -> objs mapping
objrels = {}
for sub, objs in self._obssubrels.iteritems():
for obj in objs:
objrels.setdefault(obj, set()).add(sub)
return objrels
@util.propertycache
def _obssubrels(self):
"""{<new-node> -> set(<old-node>)}"""
return self._readobsrels()
### Disk IO
def _readobsrels(self):
"""Write obsolete relation on disk"""
# XXX handle lock
try:
f = self.opener('obsolete-relations')
try:
return _obsdeserialise(f)
finally:
f.close()
except IOError:
return {}
def _writeobsrels(self):
"""Write obsolete relation on disk"""
# XXX handle lock
f = self.opener('obsolete-relations', 'w', atomictemp=True)
try:
_obsserialise(self._obssubrels, f)
f.rename()
finally:
f.close()
### local clone support
def cancopy(self):
"""wrapper on cancopy that deny copy if there is obsolete relation"""
return ocancopy() and not bool(self._obsobjrels) # you can't copy if there is obsolete
### pull // push support
def pull(self, remote, *args, **kwargs):
"""wrapper around push that push obsolete relation"""
result = opull(remote, *args, **kwargs)
if 'obsolete' in remote.listkeys('namespaces'):
tmp = StringIO()
tmp.write(remote.listkeys('obsolete')['relations'])
tmp.seek(0)
obsrels = _obsdeserialise(tmp)
for sub, objs in obsrels.iteritems():
for obj in objs:
self.addobsolete(sub, obj)
return result
def push(self, remote, *args, **opts):
"""wrapper around pull that pull obsolete relation"""
result = opush(remote, *args, **opts)
if 'obsolete' in remote.listkeys('namespaces'):
tmp = StringIO()
_obsserialise(self._obssubrels, tmp)
remote.pushkey('obsolete', 'relations', '', tmp.getvalue())
return result
### rollback support
def _writejournal(self, desc):
"""wrapped version of _writejournal that save obsolete data"""
entries = list(o_writejournal(desc))
filename = 'obsolete-relations'
filepath = self.join(filename)
if os.path.exists(filepath):
journalname = 'journal.' + filename
journalpath = self.join(journalname)
util.copyfile(filepath, journalpath)
entries.append(journalpath)
return tuple(entries)
def rollback(self, dryrun=False):
"""wrapped version of rollback that restore obsolete data"""
wlock = lock = None
try:
wlock = self.wlock()
lock = self.lock()
ret = orollback(dryrun)
if not (ret or dryrun): #rollback did not failed
src = self.join('undo.obsolete-relations')
dst = self.join('obsolete-relations')
if os.path.exists(src):
util.rename(src, dst)
elif os.path.exists(dst): #unlink in any case
os.unlink(dst)
# invalidate cache
self.__dict__.pop('_obssubrels', None)
self.__dict__.pop('_obsobjrels', None)
return ret
finally:
release(lock, wlock)
repo.__class__ = obsoletingrepo