--- a/hgext/obsolete.py Sun Aug 05 11:34:21 2012 +0200
+++ b/hgext/obsolete.py Sun Aug 05 12:02:45 2012 +0200
@@ -31,47 +31,15 @@
Usage and Feature
=================
-Display and Exchange
---------------------
-
-obsolete changesets are hidden. (except if they have non obsolete changeset)
-
-obsolete changesets are not exchanged. This will probably change later but it
-was the simpler solution for now.
New commands
------------
Note that rebased changesets are now marked obsolete instead of being stripped.
-Context object
---------------
-
-Context gains a ``obsolete`` method that will return True if a changeset is
-obsolete False otherwise.
-
-revset
-------
-
-Add an ``obsolete()`` entry.
-
-repo extension
---------------
-
-To Do
-~~~~~
-
-- refuse to obsolete published changesets
-
-- handle split
-
-- handle conflict
-
-- handle unstable // out of sync
-
"""
-import os
+import os, sys
try:
from cStringIO import StringIO
except ImportError:
@@ -79,7 +47,6 @@
from mercurial.i18n import _
-import base64
import json
import struct
@@ -104,6 +71,8 @@
from mercurial import localrepo
from mercurial import cmdutil
from mercurial import templatekw
+from mercurial import obsolete
+obsolete._enabled = True
try:
from mercurial.localrepo import storecache
@@ -116,14 +85,6 @@
### Patch changectx
#############################
-def obsolete(ctx):
- """is the changeset obsolete by other"""
- if ctx.node()is None:
- return False
- return bool(ctx._repo.obsoletedby(ctx.node())) and ctx.phase()
-
-context.changectx.obsolete = obsolete
-
def unstable(ctx):
"""is the changeset unstable (have obsolete ancestor)"""
if ctx.node() is None:
@@ -161,44 +122,60 @@
#############################
def revsethidden(repo, subset, x):
- """hidden changesets"""
+ """``hidden()``
+ Changeset is hidden.
+ """
args = revset.getargs(x, 0, 0, 'hidden takes no argument')
- return [r for r in subset if r in repo.changelog.hiddenrevs]
+ return [r for r in subset if r in repo.hiddenrevs]
def revsetobsolete(repo, subset, x):
- """obsolete changesets"""
+ """``obsolete()``
+ Changeset is obsolete.
+ """
args = revset.getargs(x, 0, 0, 'obsolete takes no argument')
return [r for r in subset if r in repo._obsoleteset and repo._phasecache.phase(repo, r) > 0]
# XXX Backward compatibility, to be removed once stabilized
if '_phasecache' not in vars(localrepo.localrepository): # new api
def revsetobsolete(repo, subset, x):
- """obsolete changesets"""
+ """``obsolete()``
+ Changeset is obsolete.
+ """
args = revset.getargs(x, 0, 0, 'obsolete takes no argument')
return [r for r in subset if r in repo._obsoleteset and repo._phaserev[r] > 0]
def revsetunstable(repo, subset, x):
- """non obsolete changesets descendant of obsolete one"""
+ """``unstable()``
+ Unstable changesets are non-obsolete with obsolete ancestors.
+ """
args = revset.getargs(x, 0, 0, 'unstable takes no arguments')
return [r for r in subset if r in repo._unstableset]
def revsetsuspended(repo, subset, x):
- """obsolete changesets with non obsolete descendants"""
+ """``suspended()``
+ Obsolete changesets with non-obsolete descendants.
+ """
args = revset.getargs(x, 0, 0, 'suspended takes no arguments')
return [r for r in subset if r in repo._suspendedset]
def revsetextinct(repo, subset, x):
- """obsolete changesets without obsolete descendants"""
+ """``extinct()``
+ Obsolete changesets with obsolete descendants only.
+ """
args = revset.getargs(x, 0, 0, 'extinct takes no arguments')
return [r for r in subset if r in repo._extinctset]
def revsetlatecomer(repo, subset, x):
- """latecomer, Try to succeed to public change"""
+ """``latecomer()``
+ Changesets marked as successors of public changesets.
+ """
args = revset.getargs(x, 0, 0, 'latecomer takes no arguments')
return [r for r in subset if r in repo._latecomerset]
def revsetconflicting(repo, subset, x):
- """conflicting, Try to succeed to public change"""
+ """``conflicting()``
+ Changesets marked as successors of a same changeset.
+ """
args = revset.getargs(x, 0, 0, 'conflicting takes no arguments')
return [r for r in subset if r in repo._conflictingset]
@@ -215,7 +192,9 @@
return cs
def revsetprecursors(repo, subset, x):
- """precursors of a subset"""
+ """``precursors(set)``
+ Immediate precursors of changesets in set.
+ """
s = revset.getset(repo, range(len(repo)), x)
cs = _precursors(repo, s)
return [r for r in subset if r in cs]
@@ -241,7 +220,9 @@
return cs
def revsetallprecursors(repo, subset, x):
- """obsolete parents"""
+ """``allprecursors(set)``
+ Transitive precursors of changesets in set.
+ """
s = revset.getset(repo, range(len(repo)), x)
cs = _allprecursors(repo, s)
return [r for r in subset if r in cs]
@@ -260,7 +241,9 @@
return cs
def revsetsuccessors(repo, subset, x):
- """successors of a subset"""
+ """``successors(set)``
+ Immediate successors of changesets in set.
+ """
s = revset.getset(repo, range(len(repo)), x)
cs = _successors(repo, s)
return [r for r in subset if r in cs]
@@ -286,7 +269,9 @@
return cs
def revsetallsuccessors(repo, subset, x):
- """obsolete parents"""
+ """``allsuccessors(set)``
+ Transitive successors of changesets in set.
+ """
s = revset.getset(repo, range(len(repo)), x)
cs = _allsuccessors(repo, s)
return [r for r in subset if r in cs]
@@ -424,62 +409,9 @@
except KeyError:
pass # rebase not found
-# Pushkey mechanism for mutable
-#########################################
-
-def listmarkers(repo):
- """List markers over pushkey"""
- if not repo.obsstore:
- return {}
- data = repo.obsstore._writemarkers()
- encdata = base85.b85encode(data)
- return {'dump0': encdata,
- 'dump': encdata} # legacy compat
-
-def pushmarker(repo, key, old, new):
- """Push markers over pushkey"""
- if not key.startswith('dump'):
- repo.ui.warn(_('unknown key: %r') % key)
- return 0
- if old:
- repo.ui.warn(_('unexpected old value') % key)
- return 0
- data = base85.b85decode(new)
- lock = repo.lock()
- try:
- try:
- repo.obsstore.mergemarkers(data)
- return 1
- except util.Abort:
- return 0
- finally:
- lock.release()
-
-pushkey.register('obsolete', pushmarker, listmarkers)
-
### Discovery wrapping
#############################
-class blist(list, object):
- """silly class to have non False but empty list"""
-
- def __nonzero__(self):
- return bool(len(self.orig))
-
-def wrapfindcommonoutgoing(orig, repo, *args, **kwargs):
- """wrap mercurial.discovery.findcommonoutgoing to remove extinct changeset
-
- Such excluded changeset are removed from excluded and will *not* appear
- are excluded secret changeset.
- """
- outgoing = orig(repo, *args, **kwargs)
- orig = outgoing.excluded
- outgoing.excluded = blist(n for n in orig if not repo[n].extinct())
- # when no revision is specified (push everything) a shortcut is taken when
- # nothign was exclude. taking this code path when extinct changeset have
- # been excluded leads to repository corruption.
- outgoing.excluded.orig = orig
- return outgoing
def wrapcheckheads(orig, repo, remote, outgoing, *args, **kwargs):
"""wrap mercurial.discovery.checkheads
@@ -489,74 +421,16 @@
"""
# do not push instability
for h in outgoing.missingheads:
- # checking heads only is enought because any thing base on obsolete
- # changeset is either obsolete or unstable.
- ctx = repo[h]
- if ctx.unstable():
- raise util.Abort(_("push includes an unstable changeset: %s!")
- % ctx)
- if ctx.obsolete():
- raise util.Abort(_("push includes an obsolete changeset: %s!")
- % ctx)
+ # Checking heads is enough, obsolete descendants are either
+ # obsolete or unstable.
+ ctx = repo[h]
if ctx.latecomer():
- raise util.Abort(_("push includes an latecomer changeset: %s!")
+ raise util.Abort(_("push includes a latecomer changeset: %s!")
% ctx)
if ctx.conflicting():
- raise util.Abort(_("push includes conflicting changeset: %s!")
+ raise util.Abort(_("push includes a conflicting changeset: %s!")
% ctx)
- ### patch remote branch map
- # do not read it this burn eyes
- try:
- if 'oldbranchmap' not in vars(remote):
- remote.oldbranchmap = remote.branchmap
- def branchmap():
- newbm = {}
- oldbm = None
- if (util.safehasattr(phases, 'visiblebranchmap')
- and not util.safehasattr(remote, 'ignorevisiblebranchmap')
- ):
- remote.ignorevisiblebranchmap = False
- remote.branchmap = remote.oldbranchmap
- oldbm = phases.visiblebranchmap(remote)
- remote.branchmap = remote.newbranchmap
- remote.ignorevisiblebranchmap = True
- if oldbm is None:
- oldbm = remote.oldbranchmap()
- for branch, nodes in oldbm.iteritems():
- nodes = list(nodes)
- new = set()
- while nodes:
- n = nodes.pop()
- if n in repo.obsstore.precursors:
- markers = repo.obsstore.precursors[n]
- for mark in markers:
- for newernode in mark[1]:
- if newernode is not None:
- nodes.append(newernode)
- else:
- new.add(n)
- if new:
- newbm[branch] = list(new)
- return newbm
- remote.ignorevisiblebranchmap = True
- remote.branchmap = branchmap
- remote.newbranchmap = branchmap
- return orig(repo, remote, outgoing, *args, **kwargs)
- finally:
- remote.__dict__.pop('branchmap', None) # restore class one
- remote.__dict__.pop('oldbranchmap', None)
- remote.__dict__.pop('newbranchmap', None)
- remote.__dict__.pop('ignorevisiblebranchmap', None)
-
-# eye are still burning
-def wrapvisiblebranchmap(orig, repo):
- ignore = getattr(repo, 'ignorevisiblebranchmap', None)
- if ignore is None:
- return orig(repo)
- elif ignore:
- return repo.branchmap()
- else:
- return None # break recursion
+ return orig(repo, remote, outgoing, *args, **kwargs)
def wrapclearcache(orig, repo, *args, **kwargs):
try:
@@ -571,20 +445,6 @@
cmdtable = {}
command = cmdutil.command(cmdtable)
-@command('debugobsolete', [], _('SUBJECT OBJECT'))
-def cmddebugobsolete(ui, repo, subject, object):
- """add an obsolete relation between two nodes
-
- The subject is expected to be a newer version of the object.
- """
- lock = repo.lock()
- try:
- sub = repo[subject]
- obj = repo[object]
- repo.addobsolete(sub.node(), obj.node())
- finally:
- lock.release()
- return 0
@command('debugconvertobsolete', [], '')
def cmddebugconvertobsolete(ui, repo):
@@ -594,69 +454,77 @@
l = repo.lock()
some = False
try:
- repo._importoldobsolete = True
- store = repo.obsstore
- ### very first format
+ unlink = []
+ tr = repo.transaction('convert-obsolete')
try:
- f = repo.opener('obsolete-relations')
+ repo._importoldobsolete = True
+ store = repo.obsstore
+ ### very first format
try:
+ f = repo.opener('obsolete-relations')
+ try:
+ some = True
+ for line in f:
+ subhex, objhex = line.split()
+ suc = bin(subhex)
+ prec = bin(objhex)
+ sucs = (suc==nullid) and [] or [suc]
+ meta = {
+ 'date': '%i %i' % util.makedate(),
+ 'user': ui.username(),
+ }
+ try:
+ store.create(tr, prec, sucs, 0, meta)
+ cnt += 1
+ except ValueError:
+ repo.ui.write_err("invalid old marker line: %s"
+ % (line))
+ err += 1
+ finally:
+ f.close()
+ unlink.append(repo.join('obsolete-relations'))
+ except IOError:
+ pass
+ ### second (json) format
+ data = repo.sopener.tryread('obsoletemarkers')
+ if data:
some = True
- for line in f:
- subhex, objhex = line.split()
- suc = bin(subhex)
- prec = bin(objhex)
- sucs = (suc==nullid) and [] or [suc]
- meta = {
- 'date': '%i %i' % util.makedate(),
- 'user': ui.username(),
- }
+ for oldmark in json.loads(data):
+ del oldmark['id'] # dropped for now
+ del oldmark['reason'] # unused until then
+ oldobject = str(oldmark.pop('object'))
+ oldsubjects = [str(s) for s in oldmark.pop('subjects', [])]
+ LOOKUP_ERRORS = (error.RepoLookupError, error.LookupError)
+ if len(oldobject) != 40:
+ try:
+ oldobject = repo[oldobject].node()
+ except LOOKUP_ERRORS:
+ pass
+ if any(len(s) != 40 for s in oldsubjects):
+ try:
+ oldsubjects = [repo[s].node() for s in oldsubjects]
+ except LOOKUP_ERRORS:
+ pass
+
+ oldmark['date'] = '%i %i' % tuple(oldmark['date'])
+ meta = dict((k.encode('utf-8'), v.encode('utf-8'))
+ for k, v in oldmark.iteritems())
try:
- store.create(prec, sucs, 0, meta)
+ succs = [bin(n) for n in oldsubjects]
+ succs = [n for n in succs if n != nullid]
+ store.create(tr, bin(oldobject), succs,
+ 0, meta)
cnt += 1
except ValueError:
- repo.ui.write_err("invalid old marker line: %s"
- % (line))
+ repo.ui.write_err("invalid marker %s -> %s\n"
+ % (oldobject, oldsubjects))
err += 1
- finally:
- f.close()
- util.unlink(repo.join('obsolete-relations'))
- except IOError:
- pass
- ### second (json) format
- data = repo.sopener.tryread('obsoletemarkers')
- if data:
- some = True
- for oldmark in json.loads(data):
- del oldmark['id'] # dropped for now
- del oldmark['reason'] # unused until then
- oldobject = str(oldmark.pop('object'))
- oldsubjects = [str(s) for s in oldmark.pop('subjects', [])]
- LOOKUP_ERRORS = (error.RepoLookupError, error.LookupError)
- if len(oldobject) != 40:
- try:
- oldobject = repo[oldobject].node()
- except LOOKUP_ERRORS:
- pass
- if any(len(s) != 40 for s in oldsubjects):
- try:
- oldsubjects = [repo[s].node() for s in oldsubjects]
- except LOOKUP_ERRORS:
- pass
-
- oldmark['date'] = '%i %i' % tuple(oldmark['date'])
- meta = dict((k.encode('utf-8'), v.encode('utf-8'))
- for k, v in oldmark.iteritems())
- try:
- succs = [bin(n) for n in oldsubjects]
- succs = [n for n in succs if n != nullid]
- store.create(bin(oldobject), succs,
- 0, meta)
- cnt += 1
- except ValueError:
- repo.ui.write_err("invalid marker %s -> %s\n"
- % (oldobject, oldsubjects))
- err += 1
- util.unlink(repo.sjoin('obsoletemarkers'))
+ unlink.append(repo.sjoin('obsoletemarkers'))
+ tr.close()
+ for path in unlink:
+ util.unlink(path)
+ finally:
+ tr.release()
finally:
del repo._importoldobsolete
l.release()
@@ -717,26 +585,23 @@
if newconflictings > 0:
ui.warn(_('%i new conflictings changesets\n') % newconflictings)
-def noextinctsvisibleheads(orig, repo):
- repo._turn_extinct_secret()
- return orig(repo)
-
def wrapcmdutilamend(orig, ui, repo, commitfunc, old, *args, **kwargs):
oldnode = old.node()
new = orig(ui, repo, commitfunc, old, *args, **kwargs)
if new != oldnode:
lock = repo.lock()
try:
- meta = {
- 'subjects': [new],
- 'object': oldnode,
- 'date': util.makedate(),
- 'user': ui.username(),
- 'reason': 'commit --amend',
- }
- repo.obsstore.create(oldnode, [new], 0, meta)
- repo._clearobsoletecache()
- repo._turn_extinct_secret()
+ tr = repo.transaction('post-amend-obst')
+ try:
+ meta = {
+ 'date': '%i %i' % util.makedate(),
+ 'user': ui.username(),
+ }
+ repo.obsstore.create(tr, oldnode, [new], 0, meta)
+ tr.close()
+ repo._clearobsoletecache()
+ finally:
+ tr.release()
finally:
lock.release()
return new
@@ -746,12 +611,8 @@
extensions.wrapcommand(commands.table, "pull", wrapmayobsoletewc)
if util.safehasattr(cmdutil, 'amend'):
extensions.wrapfunction(cmdutil, 'amend', wrapcmdutilamend)
- extensions.wrapfunction(discovery, 'findcommonoutgoing', wrapfindcommonoutgoing)
extensions.wrapfunction(discovery, 'checkheads', wrapcheckheads)
- extensions.wrapfunction(phases, 'visibleheads', noextinctsvisibleheads)
extensions.wrapfunction(phases, 'advanceboundary', wrapclearcache)
- if util.safehasattr(phases, 'visiblebranchmap'):
- extensions.wrapfunction(phases, 'visiblebranchmap', wrapvisiblebranchmap)
### serialisation
#############################
@@ -812,183 +673,6 @@
newer.add(())
return sorted(newer)
-### obsolete relation storage
-#############################
-def add2set(d, key, mark):
- """add <mark> to a `set` in <d>[<key>]"""
- d.setdefault(key, []).append(mark)
-
-def markerid(marker):
- KEYS = ['subjects', "object", "date", "user", "reason"]
- for key in KEYS:
- assert key in marker
- keys = sorted(marker.keys())
- a = util.sha1()
- for key in keys:
- if key == 'subjects':
- for sub in sorted(marker[key]):
- a.update(sub)
- elif key == 'id':
- pass
- else:
- a.update(str(marker[key]))
- a.update('\0')
- return a.digest()
-
-# mercurial backport
-
-def encodemeta(meta):
- """Return encoded metadata string to string mapping.
-
- Assume no ':' in key and no '\0' in both key and value."""
- for key, value in meta.iteritems():
- if ':' in key or '\0' in key:
- raise ValueError("':' and '\0' are forbidden in metadata key'")
- if '\0' in value:
- raise ValueError("':' are forbidden in metadata value'")
- return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
-
-def decodemeta(data):
- """Return string to string dictionary from encoded version."""
- d = {}
- for l in data.split('\0'):
- if l:
- key, value = l.split(':')
- d[key] = value
- return d
-
-# data used for parsing and writing
-_fmversion = 0
-_fmfixed = '>BIB20s'
-_fmnode = '20s'
-_fmfsize = struct.calcsize(_fmfixed)
-_fnodesize = struct.calcsize(_fmnode)
-
-def _readmarkers(data):
- """Read and enumerate markers from raw data"""
- off = 0
- diskversion = _unpack('>B', data[off:off + 1])[0]
- off += 1
- if diskversion != _fmversion:
- raise util.Abort(_('parsing obsolete marker: unknown version %r')
- % diskversion)
-
- # Loop on markers
- l = len(data)
- while off + _fmfsize <= l:
- # read fixed part
- cur = data[off:off + _fmfsize]
- off += _fmfsize
- nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
- # read replacement
- sucs = ()
- if nbsuc:
- s = (_fnodesize * nbsuc)
- cur = data[off:off + s]
- sucs = _unpack(_fmnode * nbsuc, cur)
- off += s
- # read metadata
- # (metadata will be decoded on demand)
- metadata = data[off:off + mdsize]
- if len(metadata) != mdsize:
- raise util.Abort(_('parsing obsolete marker: metadata is too '
- 'short, %d bytes expected, got %d')
- % (len(metadata), mdsize))
- off += mdsize
- yield (pre, sucs, flags, metadata)
-
-class obsstore(object):
- """Store obsolete markers
-
- Markers can be accessed with two mappings:
- - precursors: old -> set(new)
- - successors: new -> set(old)
- """
-
- def __init__(self):
- self._all = []
- # new markers to serialize
- self._new = []
- self.precursors = {}
- self.successors = {}
-
- def __iter__(self):
- return iter(self._all)
-
- def __nonzero__(self):
- return bool(self._all)
-
- def create(self, prec, succs=(), flag=0, metadata=None):
- """obsolete: add a new obsolete marker
-
- * ensuring it is hashable
- * check mandatory metadata
- * encode metadata
- """
- if metadata is None:
- metadata = {}
- if len(prec) != 20:
- raise ValueError(repr(prec))
- for succ in succs:
- if len(succ) != 20:
- raise ValueError((succs))
- marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
- self.add(marker)
-
- def add(self, marker):
- """Add a new marker to the store
-
- This marker still needs to be written to disk"""
- self._new.append(marker)
- self._load(marker)
-
- def loadmarkers(self, data):
- """Load all markers in data, mark them as known."""
- for marker in _readmarkers(data):
- self._load(marker)
-
- def mergemarkers(self, data):
- other = set(_readmarkers(data))
- local = set(self._all)
- new = other - local
- for marker in new:
- self.add(marker)
-
- def flushmarkers(self, stream):
- """Write all markers to a stream
-
- After this operation, "new" markers are considered "known"."""
- self._writemarkers(stream)
- self._new[:] = []
-
- def _load(self, marker):
- self._all.append(marker)
- pre, sucs = marker[:2]
- self.precursors.setdefault(pre, set()).add(marker)
- for suc in sucs:
- self.successors.setdefault(suc, set()).add(marker)
-
- def _writemarkers(self, stream=None):
- # Kept separate from flushmarkers(), it will be reused for
- # markers exchange.
- if stream is None:
- final = []
- w = final.append
- else:
- w = stream.write
- w(_pack('>B', _fmversion))
- for marker in self._all:
- pre, sucs, flags, metadata = marker
- nbsuc = len(sucs)
- format = _fmfixed + (_fmnode * nbsuc)
- data = [nbsuc, len(metadata), flags, pre]
- data.extend(sucs)
- w(_pack(format, *data))
- w(metadata)
- if stream is None:
- return ''.join(final)
-
-
### repo subclassing
#############################
@@ -998,10 +682,7 @@
if not util.safehasattr(repo.opener, 'tryread'):
raise util.Abort('Obsolete extension requires Mercurial 2.2 (or later)')
- opull = repo.pull
opush = repo.push
- olock = repo.lock
- o_rollback = repo._rollback
o_updatebranchcache = repo.updatebranchcache
# /!\ api change in Hg 2.2 (97efd26eb9576f39590812ea9) /!\
@@ -1032,31 +713,13 @@
"""return the set of node that <node> make obsolete (sub)"""
return set(marker[0] for marker in self.obsstore.successors.get(node, []))
- @storecache('obsstore')
- def obsstore(self):
- if not getattr(self, '_importoldobsolete', False):
- data = repo.opener.tryread('obsolete-relations')
- if not data:
- data = repo.sopener.tryread('obsoletemarkers')
- if data:
- raise util.Abort('old format of obsolete marker detected!\n'
- 'run `hg debugconvertobsolete` once.')
- store = obsstore()
- data = self.sopener.tryread('obsstore')
- if data:
- store.loadmarkers(data)
- return store
-
@util.propertycache
def _obsoleteset(self):
"""the set of obsolete revision"""
obs = set()
nm = self.changelog.nodemap
- for obj in self.obsstore.precursors:
- try: # /!\api change in Hg 2.2 (e8d37b78acfb22ae2c1fb126c2)/!\
- rev = nm.get(obj)
- except TypeError: #XXX to remove while breaking Hg 2.1 support
- rev = nm.get(obj, None)
+ for prec in self.obsstore.precursors:
+ rev = nm.get(prec)
if rev is not None:
obs.add(rev)
return obs
@@ -1137,16 +800,19 @@
% {'sub': short(sub), 'obj': short(obj)})
lock = self.lock()
try:
- meta = {
- 'date': util.makedate(),
- 'user': ui.username(),
- 'reason': 'unknown',
- }
- subs = (sub == nullid) and [] or [sub]
- mid = self.obsstore.create(obj, subs, 0, meta)
- self._clearobsoletecache()
- self._turn_extinct_secret()
- return mid
+ tr = self.transaction('add-obsolete')
+ try:
+ meta = {
+ 'date': '%i %i' % util.makedate(),
+ 'user': ui.username(),
+ }
+ subs = (sub == nullid) and [] or [sub]
+ mid = self.obsstore.create(tr, obj, subs, 0, meta)
+ tr.close()
+ self._clearobsoletecache()
+ return mid
+ finally:
+ tr.release()
finally:
lock.release()
@@ -1155,155 +821,35 @@
# Assume oldnodes are all descendants of a single rev
rootrevs = self.revs('roots(%ln)', oldnodes)
assert len(rootrevs) == 1, rootrevs
- rootnode = self[rootrevs[0]].node()
+ #rootnode = self[rootrevs[0]].node()
for n in oldnodes:
self.addobsolete(newnode, n)
- def _turn_extinct_secret(self):
- """ensure all extinct changeset are secret"""
- self._clearobsoletecache()
- # this is mainly for safety purpose
- # both pull and push
- query = '(obsolete() - obsolete()::(unstable() - secret())) - secret()'
- expobs = [c.node() for c in repo.set(query)]
- phases.retractboundary(repo, 2, expobs)
-
- ### Disk IO
-
- def lock(self, *args, **kwargs):
- l = olock(*args, **kwargs)
- if not getattr(l.releasefn, 'obspatched', False):
- oreleasefn = l.releasefn
- def releasefn(*args, **kwargs):
- if 'obsstore' in vars(self) and self.obsstore._new:
- f = self.sopener('obsstore', 'wb', atomictemp=True)
- try:
- self.obsstore.flushmarkers(f)
- f.close()
- except: # re-raises
- f.discard()
- raise
- oreleasefn(*args, **kwargs)
- releasefn.obspatched = True
- l.releasefn = releasefn
- return l
-
-
### pull // push support
- def pull(self, remote, *args, **kwargs):
- """wrapper around push that push obsolete relation"""
- l = repo.lock()
- try:
- result = opull(remote, *args, **kwargs)
- remoteobs = remote.listkeys('obsolete')
- if 'dump' in remoteobs:
- remoteobs['dump0'] = remoteobs.pop('dump')
- if 'dump0' in remoteobs:
- for key, values in remoteobs.iteritems():
- if key.startswith('dump'):
- data = base85.b85decode(remoteobs['dump0'])
- self.obsstore.mergemarkers(data)
- self._clearobsoletecache()
- self._turn_extinct_secret()
- return result
- finally:
- l.release()
-
def push(self, remote, *args, **opts):
"""wrapper around pull that pull obsolete relation"""
- self._turn_extinct_secret()
try:
result = opush(remote, *args, **opts)
except util.Abort, ex:
- hint = _("use 'hg stabilize' to get a stable history (or --force to proceed)")
+ hint = _("use 'hg stabilize' to get a stable history "
+ "or --force to ignore warnings")
if (len(ex.args) >= 1
and ex.args[0].startswith('push includes ')
and ex.hint is None):
ex.hint = hint
raise
- if 'obsolete' in remote.listkeys('namespaces') and self.obsstore:
- data = self.obsstore._writemarkers()
- r = remote.pushkey('obsolete', 'dump0', '',
- base85.b85encode(data))
- if not r:
- self.ui.warn(_('failed to push obsolete markers!\n'))
- self._turn_extinct_secret()
-
return result
- ### rollback support
-
- # /!\ api change in Hg 2.2 (97efd26eb9576f39590812ea9) /!\
- if util.safehasattr(repo, '_journalfiles'): # Hg 2.2
- def _journalfiles(self):
- return o_journalfiles() + (self.sjoin('journal.obsstore'),)
-
- def _writejournal(self, desc):
- """wrapped version of _writejournal that save obsolete data"""
- o_writejournal(desc)
- filename = 'obsstore'
- filepath = self.sjoin(filename)
- if os.path.exists(filepath):
- journalname = 'journal.' + filename
- journalpath = self.sjoin(journalname)
- util.copyfile(filepath, journalpath)
-
- else: # XXX removing this bloc will break Hg 2.1 support
- def _writejournal(self, desc):
- """wrapped version of _writejournal that save obsolete data"""
- entries = list(o_writejournal(desc))
- filename = 'obsstore'
- filepath = self.sjoin(filename)
- if os.path.exists(filepath):
- journalname = 'journal.' + filename
- journalpath = self.sjoin(journalname)
- util.copyfile(filepath, journalpath)
- entries.append(journalpath)
- return tuple(entries)
-
- def _rollback(self, dryrun, force):
- """wrapped version of _rollback that restore obsolete data"""
- ret = o_rollback(dryrun, force)
- if not (ret or dryrun): #rollback did not failed
- src = self.sjoin('undo.obsstore')
- dst = self.sjoin('obsstore')
- if os.path.exists(src):
- util.rename(src, dst)
- elif os.path.exists(dst):
- # If no state was saved because the file did not existed before.
- os.unlink(dst)
- # invalidate cache
- self.__dict__.pop('obsstore', None)
- return ret
-
- @storecache('00changelog.i')
- def changelog(self):
- # << copy pasted from mercurial source
- c = changelog.changelog(self.sopener)
- if 'HG_PENDING' in os.environ:
- p = os.environ['HG_PENDING']
- if p.startswith(self.root):
- c.readpending('00changelog.i.a')
- # >> end of the copy paste
- old = c.__dict__.pop('hiddenrevs', ())
- if old:
- ui.warn("old wasn't empty ? %r" % old)
- def _sethidden(c, value):
- assert not value
-
-
- class hchangelog(c.__class__):
- @util.propertycache
- def hiddenrevs(c):
- shown = ['not obsolete()', '.', 'bookmark()', 'tagged()',
- 'public()']
- basicquery = 'obsolete() - (::(%s))' % (' or '.join(shown))
- # !!! self is repo not changelog
- result = set(scmutil.revrange(self, [basicquery]))
- return result
- c.__class__ = hchangelog
- return c
-
repo.__class__ = obsoletingrepo
+ for arg in sys.argv:
+ if 'debugc' in arg:
+ break
+ else:
+ data = repo.opener.tryread('obsolete-relations')
+ if not data:
+ data = repo.sopener.tryread('obsoletemarkers')
+ if data:
+ raise util.Abort('old format of obsolete marker detected!\n'
+ 'run `hg debugconvertobsolete` once.')