18 |
23 |
19 |
24 |
20 @eh.uisetup |
25 @eh.uisetup |
21 def uisetup(ui): |
26 def uisetup(ui): |
22 extensions.wrapfunction(discovery, '_postprocessobsolete', _postprocessobsolete) |
27 extensions.wrapfunction(discovery, '_postprocessobsolete', _postprocessobsolete) |
|
28 extensions.wrapfunction(scmutil, 'enforcesinglehead', enforcesinglehead) |
23 |
29 |
24 def branchinfo(pushop, repo, node): |
30 def branchinfo(pushop, repo, node): |
25 return repo[node].branch() |
31 return repo[node].branch() |
26 |
32 |
27 # taken from 7d5455b988ec + branchinfo abstraction. |
33 # taken from 7d5455b988ec + branchinfo abstraction. |
109 # "added" by the push, but more by the "removal" on the remote so I |
115 # "added" by the push, but more by the "removal" on the remote so I |
110 # think is a okay to ignore them, |
116 # think is a okay to ignore them, |
111 discarded.add(nh) |
117 discarded.add(nh) |
112 newhs |= unknownheads |
118 newhs |= unknownheads |
113 return newhs, discarded |
119 return newhs, discarded |
|
120 |
|
121 def _get_branch_name(ctx): |
|
122 # make it easy for extension with the branch logic there |
|
123 branch = ctx.branch() |
|
124 if util.safehasattr(ctx, 'topic'): |
|
125 topic = ctx.topic() |
|
126 if topic: |
|
127 branch = "%s:%s" % (branch, topic) |
|
128 return branch |
|
129 |
|
130 def _filter_obsolete_heads(repo, heads): |
|
131 """filter heads to return non-obsolete ones |
|
132 |
|
133 Given a list of heads (on the same named branch) return a new list of heads |
|
134 where the obsolete part have been skimmed out. |
|
135 """ |
|
136 new_heads = [] |
|
137 old_heads = heads[:] |
|
138 while old_heads: |
|
139 rh = old_heads.pop() |
|
140 ctx = repo[rh] |
|
141 current_name = _get_branch_name(ctx) |
|
142 # run this check early to skip the evaluation of the whole branch |
|
143 if not ctx.obsolete(): |
|
144 new_heads.append(rh) |
|
145 continue |
|
146 |
|
147 # Get all revs/nodes on the branch exclusive to this head |
|
148 # (already filtered heads are "ignored")) |
|
149 sections_revs = repo.revs( |
|
150 b'only(%d, (%ld+%ld))', rh, old_heads, new_heads, |
|
151 ) |
|
152 keep_revs = [] |
|
153 for r in sections_revs: |
|
154 ctx = repo[r] |
|
155 if ctx.obsolete(): |
|
156 continue |
|
157 if _get_branch_name(ctx) != current_name: |
|
158 continue |
|
159 keep_revs.append(r) |
|
160 for h in repo.revs(b'heads(%ld and (::%ld))', sections_revs, keep_revs): |
|
161 new_heads.append(h) |
|
162 new_heads.sort() |
|
163 return new_heads |
|
164 |
|
165 def enforcesinglehead(orig, repo, tr, desc, accountclosed=False): |
|
166 """check that no named branch has multiple heads""" |
|
167 nodesummaries = scmutil.nodesummaries |
|
168 if desc in (b'strip', b'repair'): |
|
169 # skip the logic during strip |
|
170 return |
|
171 visible = repo.filtered(b'visible') |
|
172 # possible improvement: we could restrict the check to affected branch |
|
173 bm = visible.branchmap() |
|
174 cl = repo.changelog |
|
175 to_rev = cl.rev |
|
176 to_node = cl.node |
|
177 for name in bm: |
|
178 all_heads = bm.branchheads(name, closed=accountclosed) |
|
179 all_heads = [to_rev(n) for n in all_heads] |
|
180 heads = _filter_obsolete_heads(repo, all_heads) |
|
181 heads = [to_node(r) for r in heads] |
|
182 if len(heads) > 1: |
|
183 msg = _(b'rejecting multiple heads on branch "%s"') |
|
184 msg %= name |
|
185 hint = _(b'%d heads: %s') |
|
186 hint %= (len(heads), nodesummaries(repo, heads)) |
|
187 raise error.Abort(msg, hint=hint) |