checkheads: import our own copy of the checkheads code
I expect enough change and experimental will be made that is it worthwhile
having them done in evolution close to the rest of the exchange tests make
sense.
--- a/hgext3rd/evolve/__init__.py Sun Mar 26 04:59:36 2017 +0200
+++ b/hgext3rd/evolve/__init__.py Tue Mar 21 23:44:30 2017 +0100
@@ -111,6 +111,7 @@
from mercurial.node import nullid
from . import (
+ checkheads,
debugcmd,
obsdiscovery,
obsexchange,
@@ -147,6 +148,7 @@
eh.merge(debugcmd.eh)
eh.merge(obsdiscovery.eh)
eh.merge(obsexchange.eh)
+eh.merge(checkheads.eh)
uisetup = eh.final_uisetup
extsetup = eh.final_extsetup
reposetup = eh.final_reposetup
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext3rd/evolve/checkheads.py Tue Mar 21 23:44:30 2017 +0100
@@ -0,0 +1,220 @@
+# Code dedicated to the postprocessing new heads check with obsolescence
+#
+# Copyright 2017 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from mercurial import (
+ discovery,
+ error,
+ extensions,
+ node as nodemod,
+ obsolete,
+ phases,
+ util,
+)
+
+from mercurial.i18n import _
+
+from . import exthelper
+
+nullid = nodemod.nullid
+short = nodemod.short
+_headssummary = discovery._headssummary
+_oldheadssummary = discovery._oldheadssummary
+_nowarnheads = discovery._nowarnheads
+
+eh = exthelper.exthelper()
+
+@eh.uisetup
+def setupcheckheadswrapper(ui):
+ if util.safehasattr(discovery, '_postprocessobsolete'):
+ extensions.wrapfunction(discovery, '_postprocessobsolete',
+ checkheadslightoverlay)
+ else:
+ extensions.wrapfunction(discovery, 'checkheads',
+ checkheadsfulloverlay)
+
+# have dedicated wrapper to keep the rest as close as core as possible
+def checkheadsfulloverlay(orig, pushop):
+ if pushop.repo.obsstore:
+ return corecheckheads(pushop)
+ else:
+ return orig(pushop)
+
+def checkheadslightoverlay(orig, *args, **kwargs):
+ return _postprocessobsolete(*args, **kwargs)
+
+# copied from mercurial.discovery.checkheads as in a5bad127128d (4.1)
+#
+# The only differences are:
+# * the _postprocessobsolete section have been extracted,
+# * minor test adjustment to please flake8
+def corecheckheads(pushop):
+ """Check that a push won't add any outgoing head
+
+ raise Abort error and display ui message as needed.
+ """
+
+ repo = pushop.repo.unfiltered()
+ remote = pushop.remote
+ outgoing = pushop.outgoing
+ remoteheads = pushop.remoteheads
+ newbranch = pushop.newbranch
+ inc = bool(pushop.incoming)
+
+ # Check for each named branch if we're creating new remote heads.
+ # To be a remote head after push, node must be either:
+ # - unknown locally
+ # - a local outgoing head descended from update
+ # - a remote head that's known locally and not
+ # ancestral to an outgoing head
+ if remoteheads == [nullid]:
+ # remote is empty, nothing to check.
+ return
+
+ if remote.capable('branchmap'):
+ headssum = _headssummary(repo, remote, outgoing)
+ else:
+ headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
+ newbranches = [branch for branch, heads in headssum.iteritems()
+ if heads[0] is None]
+ # 1. Check for new branches on the remote.
+ if newbranches and not newbranch: # new branch requires --new-branch
+ branchnames = ', '.join(sorted(newbranches))
+ raise error.Abort(_("push creates new remote branches: %s!")
+ % branchnames,
+ hint=_("use 'hg push --new-branch' to create"
+ " new remote branches"))
+
+ # 2. Find heads that we need not warn about
+ nowarnheads = _nowarnheads(pushop)
+
+ # 3. Check for new heads.
+ # If there are more heads after the push than before, a suitable
+ # error message, depending on unsynced status, is displayed.
+ errormsg = None
+ # If there is no obsstore, allfuturecommon won't be used, so no
+ # need to compute it.
+ if repo.obsstore:
+ allmissing = set(outgoing.missing)
+ cctx = repo.set('%ld', outgoing.common)
+ allfuturecommon = set(c.node() for c in cctx)
+ allfuturecommon.update(allmissing)
+ for branch, heads in sorted(headssum.iteritems()):
+ remoteheads, newheads, unsyncedheads = heads
+ candidate_newhs = set(newheads)
+ # add unsynced data
+ if remoteheads is None:
+ oldhs = set()
+ else:
+ oldhs = set(remoteheads)
+ oldhs.update(unsyncedheads)
+ candidate_newhs.update(unsyncedheads)
+ dhs = None # delta heads, the new heads on branch
+ if not repo.obsstore:
+ discardedheads = set()
+ newhs = candidate_newhs
+ else:
+ newhs, discardedheads = _postprocessobsolete(pushop,
+ allfuturecommon,
+ candidate_newhs)
+ unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
+ if unsynced:
+ if None in unsynced:
+ # old remote, no heads data
+ heads = None
+ elif len(unsynced) <= 4 or repo.ui.verbose:
+ heads = ' '.join(short(h) for h in unsynced)
+ else:
+ heads = (' '.join(short(h) for h in unsynced[:4]) +
+ ' ' + _("and %s others") % (len(unsynced) - 4))
+ if heads is None:
+ repo.ui.status(_("remote has heads that are "
+ "not known locally\n"))
+ elif branch is None:
+ repo.ui.status(_("remote has heads that are "
+ "not known locally: %s\n") % heads)
+ else:
+ repo.ui.status(_("remote has heads on branch '%s' that are "
+ "not known locally: %s\n") % (branch, heads))
+ if remoteheads is None:
+ if len(newhs) > 1:
+ dhs = list(newhs)
+ if errormsg is None:
+ errormsg = (_("push creates new branch '%s' "
+ "with multiple heads") % (branch))
+ hint = _("merge or"
+ " see 'hg help push' for details about"
+ " pushing new heads")
+ elif len(newhs) > len(oldhs):
+ # remove bookmarked or existing remote heads from the new heads list
+ dhs = sorted(newhs - nowarnheads - oldhs)
+ if dhs:
+ if errormsg is None:
+ if branch not in ('default', None):
+ errormsg = _("push creates new remote head %s "
+ "on branch '%s'!") % (short(dhs[0]), branch)
+ elif repo[dhs[0]].bookmarks():
+ errormsg = (_("push creates new remote head %s "
+ "with bookmark '%s'!")
+ % (short(dhs[0]), repo[dhs[0]].bookmarks()[0]))
+ else:
+ errormsg = _("push creates new remote head %s!"
+ ) % short(dhs[0])
+ if unsyncedheads:
+ hint = _("pull and merge or"
+ " see 'hg help push' for details about"
+ " pushing new heads")
+ else:
+ hint = _("merge or"
+ " see 'hg help push' for details about"
+ " pushing new heads")
+ if branch is None:
+ repo.ui.note(_("new remote heads:\n"))
+ else:
+ repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
+ for h in dhs:
+ repo.ui.note((" %s\n") % short(h))
+ if errormsg:
+ raise error.Abort(errormsg, hint=hint)
+
+def _postprocessobsolete(pushop, common, candidate):
+ """post process the list of new heads with obsolescence information
+
+ Exist as a subfunction to contains the complexity and allow extensions to
+ experiment with smarter logic.
+ Returns (newheads, discarded_heads) tuple
+ """
+ # remove future heads which are actually obsoleted by another
+ # pushed element:
+ #
+ # XXX as above, There are several cases this code does not handle
+ # XXX properly
+ #
+ # (1) if <nh> is public, it won't be affected by obsolete marker
+ # and a new is created
+ #
+ # (2) if the new heads have ancestors which are not obsolete and
+ # not ancestors of any other heads we will have a new head too.
+ #
+ # These two cases will be easy to handle for known changeset but
+ # much more tricky for unsynced changes.
+ #
+ # In addition, this code is confused by prune as it only looks for
+ # successors of the heads (none if pruned) leading to issue4354
+ repo = pushop.repo
+ newhs = set()
+ discarded = set()
+ for nh in candidate:
+ if nh in repo and repo[nh].phase() <= phases.public:
+ newhs.add(nh)
+ else:
+ for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
+ if suc != nh and suc in common:
+ discarded.add(nh)
+ break
+ else:
+ newhs.add(nh)
+ return newhs, discarded