##// END OF EJS Templates
setdiscover: allow to ignore part of the local graph...
Boris Feld -
r35305:f77121b6 default
parent child Browse files
Show More
@@ -1,286 +1,287 b''
1 1 # dagutil.py - dag utilities for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 from .i18n import _
12 12 from .node import nullrev
13 13
14 14 class basedag(object):
15 15 '''generic interface for DAGs
16 16
17 17 terms:
18 18 "ix" (short for index) identifies a nodes internally,
19 19 "id" identifies one externally.
20 20
21 21 All params are ixs unless explicitly suffixed otherwise.
22 22 Pluralized params are lists or sets.
23 23 '''
24 24
25 25 def __init__(self):
26 26 self._inverse = None
27 27
28 28 def nodeset(self):
29 29 '''set of all node ixs'''
30 30 raise NotImplementedError
31 31
32 32 def heads(self):
33 33 '''list of head ixs'''
34 34 raise NotImplementedError
35 35
36 36 def parents(self, ix):
37 37 '''list of parents ixs of ix'''
38 38 raise NotImplementedError
39 39
40 40 def inverse(self):
41 41 '''inverse DAG, where parents becomes children, etc.'''
42 42 raise NotImplementedError
43 43
44 44 def ancestorset(self, starts, stops=None):
45 45 '''
46 46 set of all ancestors of starts (incl), but stop walk at stops (excl)
47 47 '''
48 48 raise NotImplementedError
49 49
50 50 def descendantset(self, starts, stops=None):
51 51 '''
52 52 set of all descendants of starts (incl), but stop walk at stops (excl)
53 53 '''
54 54 return self.inverse().ancestorset(starts, stops)
55 55
56 56 def headsetofconnecteds(self, ixs):
57 57 '''
58 58 subset of connected list of ixs so that no node has a descendant in it
59 59
60 60 By "connected list" we mean that if an ancestor and a descendant are in
61 61 the list, then so is at least one path connecting them.
62 62 '''
63 63 raise NotImplementedError
64 64
65 65 def externalize(self, ix):
66 66 '''return a node id'''
67 67 return self._externalize(ix)
68 68
69 69 def externalizeall(self, ixs):
70 70 '''return a list of (or set if given a set) of node ids'''
71 71 ids = self._externalizeall(ixs)
72 72 if isinstance(ixs, set):
73 73 return set(ids)
74 74 return list(ids)
75 75
76 76 def internalize(self, id):
77 77 '''return a node ix'''
78 78 return self._internalize(id)
79 79
80 80 def internalizeall(self, ids, filterunknown=False):
81 81 '''return a list of (or set if given a set) of node ixs'''
82 82 ixs = self._internalizeall(ids, filterunknown)
83 83 if isinstance(ids, set):
84 84 return set(ixs)
85 85 return list(ixs)
86 86
87 87
88 88 class genericdag(basedag):
89 89 '''generic implementations for DAGs'''
90 90
91 91 def ancestorset(self, starts, stops=None):
92 92 if stops:
93 93 stops = set(stops)
94 94 else:
95 95 stops = set()
96 96 seen = set()
97 97 pending = list(starts)
98 98 while pending:
99 99 n = pending.pop()
100 100 if n not in seen and n not in stops:
101 101 seen.add(n)
102 102 pending.extend(self.parents(n))
103 103 return seen
104 104
105 105 def headsetofconnecteds(self, ixs):
106 106 hds = set(ixs)
107 107 if not hds:
108 108 return hds
109 109 for n in ixs:
110 110 for p in self.parents(n):
111 111 hds.discard(p)
112 112 assert hds
113 113 return hds
114 114
115 115
116 116 class revlogbaseddag(basedag):
117 117 '''generic dag interface to a revlog'''
118 118
119 119 def __init__(self, revlog, nodeset):
120 120 basedag.__init__(self)
121 121 self._revlog = revlog
122 122 self._heads = None
123 123 self._nodeset = nodeset
124 124
125 125 def nodeset(self):
126 126 return self._nodeset
127 127
128 128 def heads(self):
129 129 if self._heads is None:
130 130 self._heads = self._getheads()
131 131 return self._heads
132 132
133 133 def _externalize(self, ix):
134 134 return self._revlog.index[ix][7]
135 135 def _externalizeall(self, ixs):
136 136 idx = self._revlog.index
137 137 return [idx[i][7] for i in ixs]
138 138
139 139 def _internalize(self, id):
140 140 ix = self._revlog.rev(id)
141 141 if ix == nullrev:
142 142 raise LookupError(id, self._revlog.indexfile, _('nullid'))
143 143 return ix
144 144 def _internalizeall(self, ids, filterunknown):
145 145 rl = self._revlog
146 146 if filterunknown:
147 147 return [r for r in map(rl.nodemap.get, ids)
148 148 if (r is not None
149 149 and r != nullrev
150 150 and r not in rl.filteredrevs)]
151 151 return [self._internalize(i) for i in ids]
152 152
153 153
154 154 class revlogdag(revlogbaseddag):
155 155 '''dag interface to a revlog'''
156 156
157 def __init__(self, revlog):
157 def __init__(self, revlog, localsubset=None):
158 158 revlogbaseddag.__init__(self, revlog, set(revlog))
159 self._heads = localsubset
159 160
160 161 def _getheads(self):
161 162 return [r for r in self._revlog.headrevs() if r != nullrev]
162 163
163 164 def parents(self, ix):
164 165 rlog = self._revlog
165 166 idx = rlog.index
166 167 revdata = idx[ix]
167 168 prev = revdata[5]
168 169 if prev != nullrev:
169 170 prev2 = revdata[6]
170 171 if prev2 == nullrev:
171 172 return [prev]
172 173 return [prev, prev2]
173 174 prev2 = revdata[6]
174 175 if prev2 != nullrev:
175 176 return [prev2]
176 177 return []
177 178
178 179 def inverse(self):
179 180 if self._inverse is None:
180 181 self._inverse = inverserevlogdag(self)
181 182 return self._inverse
182 183
183 184 def ancestorset(self, starts, stops=None):
184 185 rlog = self._revlog
185 186 idx = rlog.index
186 187 if stops:
187 188 stops = set(stops)
188 189 else:
189 190 stops = set()
190 191 seen = set()
191 192 pending = list(starts)
192 193 while pending:
193 194 rev = pending.pop()
194 195 if rev not in seen and rev not in stops:
195 196 seen.add(rev)
196 197 revdata = idx[rev]
197 198 for i in [5, 6]:
198 199 prev = revdata[i]
199 200 if prev != nullrev:
200 201 pending.append(prev)
201 202 return seen
202 203
203 204 def headsetofconnecteds(self, ixs):
204 205 if not ixs:
205 206 return set()
206 207 rlog = self._revlog
207 208 idx = rlog.index
208 209 headrevs = set(ixs)
209 210 for rev in ixs:
210 211 revdata = idx[rev]
211 212 for i in [5, 6]:
212 213 prev = revdata[i]
213 214 if prev != nullrev:
214 215 headrevs.discard(prev)
215 216 assert headrevs
216 217 return headrevs
217 218
218 219 def linearize(self, ixs):
219 220 '''linearize and topologically sort a list of revisions
220 221
221 222 The linearization process tries to create long runs of revs where
222 223 a child rev comes immediately after its first parent. This is done by
223 224 visiting the heads of the given revs in inverse topological order,
224 225 and for each visited rev, visiting its second parent, then its first
225 226 parent, then adding the rev itself to the output list.
226 227 '''
227 228 sorted = []
228 229 visit = list(self.headsetofconnecteds(ixs))
229 230 visit.sort(reverse=True)
230 231 finished = set()
231 232
232 233 while visit:
233 234 cur = visit.pop()
234 235 if cur < 0:
235 236 cur = -cur - 1
236 237 if cur not in finished:
237 238 sorted.append(cur)
238 239 finished.add(cur)
239 240 else:
240 241 visit.append(-cur - 1)
241 242 visit += [p for p in self.parents(cur)
242 243 if p in ixs and p not in finished]
243 244 assert len(sorted) == len(ixs)
244 245 return sorted
245 246
246 247
247 248 class inverserevlogdag(revlogbaseddag, genericdag):
248 249 '''inverse of an existing revlog dag; see revlogdag.inverse()'''
249 250
250 251 def __init__(self, orig):
251 252 revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
252 253 self._orig = orig
253 254 self._children = {}
254 255 self._roots = []
255 256 self._walkfrom = len(self._revlog) - 1
256 257
257 258 def _walkto(self, walkto):
258 259 rev = self._walkfrom
259 260 cs = self._children
260 261 roots = self._roots
261 262 idx = self._revlog.index
262 263 while rev >= walkto:
263 264 data = idx[rev]
264 265 isroot = True
265 266 for prev in [data[5], data[6]]: # parent revs
266 267 if prev != nullrev:
267 268 cs.setdefault(prev, []).append(rev)
268 269 isroot = False
269 270 if isroot:
270 271 roots.append(rev)
271 272 rev -= 1
272 273 self._walkfrom = rev
273 274
274 275 def _getheads(self):
275 276 self._walkto(nullrev)
276 277 return self._roots
277 278
278 279 def parents(self, ix):
279 280 if ix is None:
280 281 return []
281 282 if ix <= self._walkfrom:
282 283 self._walkto(ix)
283 284 return self._children.get(ix, [])
284 285
285 286 def inverse(self):
286 287 return self._orig
@@ -1,2365 +1,2368 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 fctxs = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 fctxs[fn] = p2[fn]
225 225
226 226 def fctxfn(repo, cx, path):
227 227 return fctxs.get(path)
228 228
229 229 if len(ps) == 0 or ps[0] < 0:
230 230 pars = [None, None]
231 231 elif len(ps) == 1:
232 232 pars = [nodeids[ps[0]], None]
233 233 else:
234 234 pars = [nodeids[p] for p in ps]
235 235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 236 date=(id, 0),
237 237 user="debugbuilddag",
238 238 extra={'branch': atbranch})
239 239 nodeid = repo.commitctx(cx)
240 240 nodeids.append(nodeid)
241 241 at = id
242 242 elif type == 'l':
243 243 id, name = data
244 244 ui.note(('tag %s\n' % name))
245 245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 246 elif type == 'a':
247 247 ui.note(('branch %s\n' % data))
248 248 atbranch = data
249 249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 250 tr.close()
251 251
252 252 if tags:
253 253 repo.vfs.write("localtags", "".join(tags))
254 254 finally:
255 255 ui.progress(_('building'), None)
256 256 release(tr, lock, wlock)
257 257
258 258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 259 indent_string = ' ' * indent
260 260 if all:
261 261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 262 % indent_string)
263 263
264 264 def showchunks(named):
265 265 ui.write("\n%s%s\n" % (indent_string, named))
266 266 for deltadata in gen.deltaiter():
267 267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 268 ui.write("%s%s %s %s %s %s %s\n" %
269 269 (indent_string, hex(node), hex(p1), hex(p2),
270 270 hex(cs), hex(deltabase), len(delta)))
271 271
272 272 chunkdata = gen.changelogheader()
273 273 showchunks("changelog")
274 274 chunkdata = gen.manifestheader()
275 275 showchunks("manifest")
276 276 for chunkdata in iter(gen.filelogheader, {}):
277 277 fname = chunkdata['filename']
278 278 showchunks(fname)
279 279 else:
280 280 if isinstance(gen, bundle2.unbundle20):
281 281 raise error.Abort(_('use debugbundle2 for this file'))
282 282 chunkdata = gen.changelogheader()
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s\n" % (indent_string, hex(node)))
286 286
287 287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 288 """display version and markers contained in 'data'"""
289 289 opts = pycompat.byteskwargs(opts)
290 290 data = part.read()
291 291 indent_string = ' ' * indent
292 292 try:
293 293 version, markers = obsolete._readmarkers(data)
294 294 except error.UnknownVersion as exc:
295 295 msg = "%sunsupported version: %s (%d bytes)\n"
296 296 msg %= indent_string, exc.version, len(data)
297 297 ui.write(msg)
298 298 else:
299 299 msg = "%sversion: %d (%d bytes)\n"
300 300 msg %= indent_string, version, len(data)
301 301 ui.write(msg)
302 302 fm = ui.formatter('debugobsolete', opts)
303 303 for rawmarker in sorted(markers):
304 304 m = obsutil.marker(None, rawmarker)
305 305 fm.startitem()
306 306 fm.plain(indent_string)
307 307 cmdutil.showmarker(fm, m)
308 308 fm.end()
309 309
310 310 def _debugphaseheads(ui, data, indent=0):
311 311 """display version and markers contained in 'data'"""
312 312 indent_string = ' ' * indent
313 313 headsbyphase = phases.binarydecode(data)
314 314 for phase in phases.allphases:
315 315 for head in headsbyphase[phase]:
316 316 ui.write(indent_string)
317 317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318 318
319 319 def _quasirepr(thing):
320 320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 321 return '{%s}' % (
322 322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 323 return pycompat.bytestr(repr(thing))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcapabilities',
364 364 [], _('PATH'),
365 365 norepo=True)
366 366 def debugcapabilities(ui, path, **opts):
367 367 """lists the capabilities of a remote peer"""
368 368 peer = hg.peer(ui, opts, path)
369 369 caps = peer.capabilities()
370 370 ui.write(('Main capabilities:\n'))
371 371 for c in sorted(caps):
372 372 ui.write((' %s\n') % c)
373 373 b2caps = bundle2.bundle2caps(peer)
374 374 if b2caps:
375 375 ui.write(('Bundle2 capabilities:\n'))
376 376 for key, values in sorted(b2caps.iteritems()):
377 377 ui.write((' %s\n') % key)
378 378 for v in values:
379 379 ui.write((' %s\n') % v)
380 380
381 381 @command('debugcheckstate', [], '')
382 382 def debugcheckstate(ui, repo):
383 383 """validate the correctness of the current dirstate"""
384 384 parent1, parent2 = repo.dirstate.parents()
385 385 m1 = repo[parent1].manifest()
386 386 m2 = repo[parent2].manifest()
387 387 errors = 0
388 388 for f in repo.dirstate:
389 389 state = repo.dirstate[f]
390 390 if state in "nr" and f not in m1:
391 391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 392 errors += 1
393 393 if state in "a" and f in m1:
394 394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 395 errors += 1
396 396 if state in "m" and f not in m1 and f not in m2:
397 397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 398 (f, state))
399 399 errors += 1
400 400 for f in m1:
401 401 state = repo.dirstate[f]
402 402 if state not in "nrm":
403 403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 404 errors += 1
405 405 if errors:
406 406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 407 raise error.Abort(error)
408 408
409 409 @command('debugcolor',
410 410 [('', 'style', None, _('show all configured styles'))],
411 411 'hg debugcolor')
412 412 def debugcolor(ui, repo, **opts):
413 413 """show available color, effects or style"""
414 414 ui.write(('color mode: %s\n') % ui._colormode)
415 415 if opts.get(r'style'):
416 416 return _debugdisplaystyle(ui)
417 417 else:
418 418 return _debugdisplaycolor(ui)
419 419
420 420 def _debugdisplaycolor(ui):
421 421 ui = ui.copy()
422 422 ui._styles.clear()
423 423 for effect in color._activeeffects(ui).keys():
424 424 ui._styles[effect] = effect
425 425 if ui._terminfoparams:
426 426 for k, v in ui.configitems('color'):
427 427 if k.startswith('color.'):
428 428 ui._styles[k] = k[6:]
429 429 elif k.startswith('terminfo.'):
430 430 ui._styles[k] = k[9:]
431 431 ui.write(_('available colors:\n'))
432 432 # sort label with a '_' after the other to group '_background' entry.
433 433 items = sorted(ui._styles.items(),
434 434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 435 for colorname, label in items:
436 436 ui.write(('%s\n') % colorname, label=label)
437 437
438 438 def _debugdisplaystyle(ui):
439 439 ui.write(_('available style:\n'))
440 440 width = max(len(s) for s in ui._styles)
441 441 for label, effects in sorted(ui._styles.items()):
442 442 ui.write('%s' % label, label=label)
443 443 if effects:
444 444 # 50
445 445 ui.write(': ')
446 446 ui.write(' ' * (max(0, width - len(label))))
447 447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 448 ui.write('\n')
449 449
450 450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 451 def debugcreatestreamclonebundle(ui, repo, fname):
452 452 """create a stream clone bundle file
453 453
454 454 Stream bundles are special bundles that are essentially archives of
455 455 revlog files. They are commonly used for cloning very quickly.
456 456 """
457 457 # TODO we may want to turn this into an abort when this functionality
458 458 # is moved into `hg bundle`.
459 459 if phases.hassecret(repo):
460 460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 461 'revisions)\n'))
462 462
463 463 requirements, gen = streamclone.generatebundlev1(repo)
464 464 changegroup.writechunks(ui, gen, fname)
465 465
466 466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467 467
468 468 @command('debugdag',
469 469 [('t', 'tags', None, _('use tags as labels')),
470 470 ('b', 'branches', None, _('annotate with branch names')),
471 471 ('', 'dots', None, _('use dots for runs')),
472 472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 473 _('[OPTION]... [FILE [REV]...]'),
474 474 optionalrepo=True)
475 475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 476 """format the changelog or an index DAG as a concise textual description
477 477
478 478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 479 revision numbers, they get labeled in the output as rN.
480 480
481 481 Otherwise, the changelog DAG of the current repo is emitted.
482 482 """
483 483 spaces = opts.get(r'spaces')
484 484 dots = opts.get(r'dots')
485 485 if file_:
486 486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 487 file_)
488 488 revs = set((int(r) for r in revs))
489 489 def events():
490 490 for r in rlog:
491 491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 492 if p != -1))
493 493 if r in revs:
494 494 yield 'l', (r, "r%i" % r)
495 495 elif repo:
496 496 cl = repo.changelog
497 497 tags = opts.get(r'tags')
498 498 branches = opts.get(r'branches')
499 499 if tags:
500 500 labels = {}
501 501 for l, n in repo.tags().items():
502 502 labels.setdefault(cl.rev(n), []).append(l)
503 503 def events():
504 504 b = "default"
505 505 for r in cl:
506 506 if branches:
507 507 newb = cl.read(cl.node(r))[5]['branch']
508 508 if newb != b:
509 509 yield 'a', newb
510 510 b = newb
511 511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 512 if p != -1))
513 513 if tags:
514 514 ls = labels.get(r)
515 515 if ls:
516 516 for l in ls:
517 517 yield 'l', (r, l)
518 518 else:
519 519 raise error.Abort(_('need repo for changelog dag'))
520 520
521 521 for line in dagparser.dagtextlines(events(),
522 522 addspaces=spaces,
523 523 wraplabels=True,
524 524 wrapannotations=True,
525 525 wrapnonlinear=dots,
526 526 usedots=dots,
527 527 maxlinewidth=70):
528 528 ui.write(line)
529 529 ui.write("\n")
530 530
531 531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 532 def debugdata(ui, repo, file_, rev=None, **opts):
533 533 """dump the contents of a data file revision"""
534 534 opts = pycompat.byteskwargs(opts)
535 535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 536 if rev is not None:
537 537 raise error.CommandError('debugdata', _('invalid arguments'))
538 538 file_, rev = None, file_
539 539 elif rev is None:
540 540 raise error.CommandError('debugdata', _('invalid arguments'))
541 541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 542 try:
543 543 ui.write(r.revision(r.lookup(rev), raw=True))
544 544 except KeyError:
545 545 raise error.Abort(_('invalid revision identifier %s') % rev)
546 546
547 547 @command('debugdate',
548 548 [('e', 'extended', None, _('try extended date formats'))],
549 549 _('[-e] DATE [RANGE]'),
550 550 norepo=True, optionalrepo=True)
551 551 def debugdate(ui, date, range=None, **opts):
552 552 """parse and display a date"""
553 553 if opts[r"extended"]:
554 554 d = util.parsedate(date, util.extendeddateformats)
555 555 else:
556 556 d = util.parsedate(date)
557 557 ui.write(("internal: %s %s\n") % d)
558 558 ui.write(("standard: %s\n") % util.datestr(d))
559 559 if range:
560 560 m = util.matchdate(range)
561 561 ui.write(("match: %s\n") % m(d[0]))
562 562
563 563 @command('debugdeltachain',
564 564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 565 _('-c|-m|FILE'),
566 566 optionalrepo=True)
567 567 def debugdeltachain(ui, repo, file_=None, **opts):
568 568 """dump information about delta chains in a revlog
569 569
570 570 Output can be templatized. Available template keywords are:
571 571
572 572 :``rev``: revision number
573 573 :``chainid``: delta chain identifier (numbered by unique base)
574 574 :``chainlen``: delta chain length to this revision
575 575 :``prevrev``: previous revision in delta chain
576 576 :``deltatype``: role of delta / how it was computed
577 577 :``compsize``: compressed size of revision
578 578 :``uncompsize``: uncompressed size of revision
579 579 :``chainsize``: total size of compressed revisions in chain
580 580 :``chainratio``: total chain size divided by uncompressed revision size
581 581 (new delta chains typically start at ratio 2.00)
582 582 :``lindist``: linear distance from base revision in delta chain to end
583 583 of this revision
584 584 :``extradist``: total size of revisions not part of this delta chain from
585 585 base of delta chain to end of this revision; a measurement
586 586 of how much extra data we need to read/seek across to read
587 587 the delta chain for this revision
588 588 :``extraratio``: extradist divided by chainsize; another representation of
589 589 how much unrelated data is needed to load this delta chain
590 590
591 591 If the repository is configured to use the sparse read, additional keywords
592 592 are available:
593 593
594 594 :``readsize``: total size of data read from the disk for a revision
595 595 (sum of the sizes of all the blocks)
596 596 :``largestblock``: size of the largest block of data read from the disk
597 597 :``readdensity``: density of useful bytes in the data read from the disk
598 598
599 599 The sparse read can be enabled with experimental.sparse-read = True
600 600 """
601 601 opts = pycompat.byteskwargs(opts)
602 602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
603 603 index = r.index
604 604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 605 withsparseread = getattr(r, '_withsparseread', False)
606 606
607 607 def revinfo(rev):
608 608 e = index[rev]
609 609 compsize = e[1]
610 610 uncompsize = e[2]
611 611 chainsize = 0
612 612
613 613 if generaldelta:
614 614 if e[3] == e[5]:
615 615 deltatype = 'p1'
616 616 elif e[3] == e[6]:
617 617 deltatype = 'p2'
618 618 elif e[3] == rev - 1:
619 619 deltatype = 'prev'
620 620 elif e[3] == rev:
621 621 deltatype = 'base'
622 622 else:
623 623 deltatype = 'other'
624 624 else:
625 625 if e[3] == rev:
626 626 deltatype = 'base'
627 627 else:
628 628 deltatype = 'prev'
629 629
630 630 chain = r._deltachain(rev)[0]
631 631 for iterrev in chain:
632 632 e = index[iterrev]
633 633 chainsize += e[1]
634 634
635 635 return compsize, uncompsize, deltatype, chain, chainsize
636 636
637 637 fm = ui.formatter('debugdeltachain', opts)
638 638
639 639 fm.plain(' rev chain# chainlen prev delta '
640 640 'size rawsize chainsize ratio lindist extradist '
641 641 'extraratio')
642 642 if withsparseread:
643 643 fm.plain(' readsize largestblk rddensity')
644 644 fm.plain('\n')
645 645
646 646 chainbases = {}
647 647 for rev in r:
648 648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
649 649 chainbase = chain[0]
650 650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
651 651 start = r.start
652 652 length = r.length
653 653 basestart = start(chainbase)
654 654 revstart = start(rev)
655 655 lineardist = revstart + comp - basestart
656 656 extradist = lineardist - chainsize
657 657 try:
658 658 prevrev = chain[-2]
659 659 except IndexError:
660 660 prevrev = -1
661 661
662 662 chainratio = float(chainsize) / float(uncomp)
663 663 extraratio = float(extradist) / float(chainsize)
664 664
665 665 fm.startitem()
666 666 fm.write('rev chainid chainlen prevrev deltatype compsize '
667 667 'uncompsize chainsize chainratio lindist extradist '
668 668 'extraratio',
669 669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
670 670 rev, chainid, len(chain), prevrev, deltatype, comp,
671 671 uncomp, chainsize, chainratio, lineardist, extradist,
672 672 extraratio,
673 673 rev=rev, chainid=chainid, chainlen=len(chain),
674 674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
675 675 uncompsize=uncomp, chainsize=chainsize,
676 676 chainratio=chainratio, lindist=lineardist,
677 677 extradist=extradist, extraratio=extraratio)
678 678 if withsparseread:
679 679 readsize = 0
680 680 largestblock = 0
681 681 for revschunk in revlog._slicechunk(r, chain):
682 682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 683 blksize = blkend - start(revschunk[0])
684 684
685 685 readsize += blksize
686 686 if largestblock < blksize:
687 687 largestblock = blksize
688 688
689 689 readdensity = float(chainsize) / float(readsize)
690 690
691 691 fm.write('readsize largestblock readdensity',
692 692 ' %10d %10d %9.5f',
693 693 readsize, largestblock, readdensity,
694 694 readsize=readsize, largestblock=largestblock,
695 695 readdensity=readdensity)
696 696
697 697 fm.plain('\n')
698 698
699 699 fm.end()
700 700
701 701 @command('debugdirstate|debugstate',
702 702 [('', 'nodates', None, _('do not display the saved mtime')),
703 703 ('', 'datesort', None, _('sort by saved mtime'))],
704 704 _('[OPTION]...'))
705 705 def debugstate(ui, repo, **opts):
706 706 """show the contents of the current dirstate"""
707 707
708 708 nodates = opts.get(r'nodates')
709 709 datesort = opts.get(r'datesort')
710 710
711 711 timestr = ""
712 712 if datesort:
713 713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
714 714 else:
715 715 keyfunc = None # sort by filename
716 716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
717 717 if ent[3] == -1:
718 718 timestr = 'unset '
719 719 elif nodates:
720 720 timestr = 'set '
721 721 else:
722 722 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
723 723 time.localtime(ent[3]))
724 724 timestr = encoding.strtolocal(timestr)
725 725 if ent[1] & 0o20000:
726 726 mode = 'lnk'
727 727 else:
728 728 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
729 729 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
730 730 for f in repo.dirstate.copies():
731 731 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
732 732
733 733 @command('debugdiscovery',
734 734 [('', 'old', None, _('use old-style discovery')),
735 735 ('', 'nonheads', None,
736 736 _('use old-style discovery with non-heads included')),
737 ('', 'rev', [], 'restrict discovery to this set of revs'),
737 738 ] + cmdutil.remoteopts,
738 739 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
739 740 def debugdiscovery(ui, repo, remoteurl="default", **opts):
740 741 """runs the changeset discovery protocol in isolation"""
741 742 opts = pycompat.byteskwargs(opts)
742 743 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
743 744 opts.get('branch'))
744 745 remote = hg.peer(repo, opts, remoteurl)
745 746 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
746 747
747 748 # make sure tests are repeatable
748 749 random.seed(12323)
749 750
750 def doit(localheads, remoteheads, remote=remote):
751 def doit(pushedrevs, remoteheads, remote=remote):
751 752 if opts.get('old'):
752 if localheads:
753 raise error.Abort('cannot use localheads with old style '
754 'discovery')
755 753 if not util.safehasattr(remote, 'branches'):
756 754 # enable in-client legacy support
757 755 remote = localrepo.locallegacypeer(remote.local())
758 756 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
759 757 force=True)
760 758 common = set(common)
761 759 if not opts.get('nonheads'):
762 760 ui.write(("unpruned common: %s\n") %
763 761 " ".join(sorted(short(n) for n in common)))
764 762 dag = dagutil.revlogdag(repo.changelog)
765 763 all = dag.ancestorset(dag.internalizeall(common))
766 764 common = dag.externalizeall(dag.headsetofconnecteds(all))
767 765 else:
768 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
766 nodes = None
767 if pushedrevs:
768 revs = scmutil.revrange(repo, pushedrevs)
769 nodes = [repo[r].node() for r in revs]
770 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
771 ancestorsof=nodes)
769 772 common = set(common)
770 773 rheads = set(hds)
771 774 lheads = set(repo.heads())
772 775 ui.write(("common heads: %s\n") %
773 776 " ".join(sorted(short(n) for n in common)))
774 777 if lheads <= common:
775 778 ui.write(("local is subset\n"))
776 779 elif rheads <= common:
777 780 ui.write(("remote is subset\n"))
778 781
779 782 serverlogs = opts.get('serverlog')
780 783 if serverlogs:
781 784 for filename in serverlogs:
782 785 with open(filename, 'r') as logfile:
783 786 line = logfile.readline()
784 787 while line:
785 788 parts = line.strip().split(';')
786 789 op = parts[1]
787 790 if op == 'cg':
788 791 pass
789 792 elif op == 'cgss':
790 793 doit(parts[2].split(' '), parts[3].split(' '))
791 794 elif op == 'unb':
792 795 doit(parts[3].split(' '), parts[2].split(' '))
793 796 line = logfile.readline()
794 797 else:
795 798 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
796 799 opts.get('remote_head'))
797 localrevs = opts.get('local_head')
800 localrevs = opts.get('rev')
798 801 doit(localrevs, remoterevs)
799 802
800 803 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
801 804 def debugextensions(ui, **opts):
802 805 '''show information about active extensions'''
803 806 opts = pycompat.byteskwargs(opts)
804 807 exts = extensions.extensions(ui)
805 808 hgver = util.version()
806 809 fm = ui.formatter('debugextensions', opts)
807 810 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
808 811 isinternal = extensions.ismoduleinternal(extmod)
809 812 extsource = pycompat.fsencode(extmod.__file__)
810 813 if isinternal:
811 814 exttestedwith = [] # never expose magic string to users
812 815 else:
813 816 exttestedwith = getattr(extmod, 'testedwith', '').split()
814 817 extbuglink = getattr(extmod, 'buglink', None)
815 818
816 819 fm.startitem()
817 820
818 821 if ui.quiet or ui.verbose:
819 822 fm.write('name', '%s\n', extname)
820 823 else:
821 824 fm.write('name', '%s', extname)
822 825 if isinternal or hgver in exttestedwith:
823 826 fm.plain('\n')
824 827 elif not exttestedwith:
825 828 fm.plain(_(' (untested!)\n'))
826 829 else:
827 830 lasttestedversion = exttestedwith[-1]
828 831 fm.plain(' (%s!)\n' % lasttestedversion)
829 832
830 833 fm.condwrite(ui.verbose and extsource, 'source',
831 834 _(' location: %s\n'), extsource or "")
832 835
833 836 if ui.verbose:
834 837 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
835 838 fm.data(bundled=isinternal)
836 839
837 840 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
838 841 _(' tested with: %s\n'),
839 842 fm.formatlist(exttestedwith, name='ver'))
840 843
841 844 fm.condwrite(ui.verbose and extbuglink, 'buglink',
842 845 _(' bug reporting: %s\n'), extbuglink or "")
843 846
844 847 fm.end()
845 848
846 849 @command('debugfileset',
847 850 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
848 851 _('[-r REV] FILESPEC'))
849 852 def debugfileset(ui, repo, expr, **opts):
850 853 '''parse and apply a fileset specification'''
851 854 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
852 855 if ui.verbose:
853 856 tree = fileset.parse(expr)
854 857 ui.note(fileset.prettyformat(tree), "\n")
855 858
856 859 for f in ctx.getfileset(expr):
857 860 ui.write("%s\n" % f)
858 861
859 862 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
860 863 def debugfsinfo(ui, path="."):
861 864 """show information detected about current filesystem"""
862 865 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
863 866 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
864 867 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
865 868 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
866 869 casesensitive = '(unknown)'
867 870 try:
868 871 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
869 872 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
870 873 except OSError:
871 874 pass
872 875 ui.write(('case-sensitive: %s\n') % casesensitive)
873 876
874 877 @command('debuggetbundle',
875 878 [('H', 'head', [], _('id of head node'), _('ID')),
876 879 ('C', 'common', [], _('id of common node'), _('ID')),
877 880 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
878 881 _('REPO FILE [-H|-C ID]...'),
879 882 norepo=True)
880 883 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
881 884 """retrieves a bundle from a repo
882 885
883 886 Every ID must be a full-length hex node id string. Saves the bundle to the
884 887 given file.
885 888 """
886 889 opts = pycompat.byteskwargs(opts)
887 890 repo = hg.peer(ui, opts, repopath)
888 891 if not repo.capable('getbundle'):
889 892 raise error.Abort("getbundle() not supported by target repository")
890 893 args = {}
891 894 if common:
892 895 args[r'common'] = [bin(s) for s in common]
893 896 if head:
894 897 args[r'heads'] = [bin(s) for s in head]
895 898 # TODO: get desired bundlecaps from command line.
896 899 args[r'bundlecaps'] = None
897 900 bundle = repo.getbundle('debug', **args)
898 901
899 902 bundletype = opts.get('type', 'bzip2').lower()
900 903 btypes = {'none': 'HG10UN',
901 904 'bzip2': 'HG10BZ',
902 905 'gzip': 'HG10GZ',
903 906 'bundle2': 'HG20'}
904 907 bundletype = btypes.get(bundletype)
905 908 if bundletype not in bundle2.bundletypes:
906 909 raise error.Abort(_('unknown bundle type specified with --type'))
907 910 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
908 911
909 912 @command('debugignore', [], '[FILE]')
910 913 def debugignore(ui, repo, *files, **opts):
911 914 """display the combined ignore pattern and information about ignored files
912 915
913 916 With no argument display the combined ignore pattern.
914 917
915 918 Given space separated file names, shows if the given file is ignored and
916 919 if so, show the ignore rule (file and line number) that matched it.
917 920 """
918 921 ignore = repo.dirstate._ignore
919 922 if not files:
920 923 # Show all the patterns
921 924 ui.write("%s\n" % repr(ignore))
922 925 else:
923 926 m = scmutil.match(repo[None], pats=files)
924 927 for f in m.files():
925 928 nf = util.normpath(f)
926 929 ignored = None
927 930 ignoredata = None
928 931 if nf != '.':
929 932 if ignore(nf):
930 933 ignored = nf
931 934 ignoredata = repo.dirstate._ignorefileandline(nf)
932 935 else:
933 936 for p in util.finddirs(nf):
934 937 if ignore(p):
935 938 ignored = p
936 939 ignoredata = repo.dirstate._ignorefileandline(p)
937 940 break
938 941 if ignored:
939 942 if ignored == nf:
940 943 ui.write(_("%s is ignored\n") % m.uipath(f))
941 944 else:
942 945 ui.write(_("%s is ignored because of "
943 946 "containing folder %s\n")
944 947 % (m.uipath(f), ignored))
945 948 ignorefile, lineno, line = ignoredata
946 949 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
947 950 % (ignorefile, lineno, line))
948 951 else:
949 952 ui.write(_("%s is not ignored\n") % m.uipath(f))
950 953
951 954 @command('debugindex', cmdutil.debugrevlogopts +
952 955 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
953 956 _('[-f FORMAT] -c|-m|FILE'),
954 957 optionalrepo=True)
955 958 def debugindex(ui, repo, file_=None, **opts):
956 959 """dump the contents of an index file"""
957 960 opts = pycompat.byteskwargs(opts)
958 961 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
959 962 format = opts.get('format', 0)
960 963 if format not in (0, 1):
961 964 raise error.Abort(_("unknown format %d") % format)
962 965
963 966 generaldelta = r.version & revlog.FLAG_GENERALDELTA
964 967 if generaldelta:
965 968 basehdr = ' delta'
966 969 else:
967 970 basehdr = ' base'
968 971
969 972 if ui.debugflag:
970 973 shortfn = hex
971 974 else:
972 975 shortfn = short
973 976
974 977 # There might not be anything in r, so have a sane default
975 978 idlen = 12
976 979 for i in r:
977 980 idlen = len(shortfn(r.node(i)))
978 981 break
979 982
980 983 if format == 0:
981 984 ui.write((" rev offset length " + basehdr + " linkrev"
982 985 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
983 986 elif format == 1:
984 987 ui.write((" rev flag offset length"
985 988 " size " + basehdr + " link p1 p2"
986 989 " %s\n") % "nodeid".rjust(idlen))
987 990
988 991 for i in r:
989 992 node = r.node(i)
990 993 if generaldelta:
991 994 base = r.deltaparent(i)
992 995 else:
993 996 base = r.chainbase(i)
994 997 if format == 0:
995 998 try:
996 999 pp = r.parents(node)
997 1000 except Exception:
998 1001 pp = [nullid, nullid]
999 1002 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1000 1003 i, r.start(i), r.length(i), base, r.linkrev(i),
1001 1004 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1002 1005 elif format == 1:
1003 1006 pr = r.parentrevs(i)
1004 1007 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1005 1008 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1006 1009 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1007 1010
1008 1011 @command('debugindexdot', cmdutil.debugrevlogopts,
1009 1012 _('-c|-m|FILE'), optionalrepo=True)
1010 1013 def debugindexdot(ui, repo, file_=None, **opts):
1011 1014 """dump an index DAG as a graphviz dot file"""
1012 1015 opts = pycompat.byteskwargs(opts)
1013 1016 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1014 1017 ui.write(("digraph G {\n"))
1015 1018 for i in r:
1016 1019 node = r.node(i)
1017 1020 pp = r.parents(node)
1018 1021 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1019 1022 if pp[1] != nullid:
1020 1023 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1021 1024 ui.write("}\n")
1022 1025
1023 1026 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1024 1027 def debuginstall(ui, **opts):
1025 1028 '''test Mercurial installation
1026 1029
1027 1030 Returns 0 on success.
1028 1031 '''
1029 1032 opts = pycompat.byteskwargs(opts)
1030 1033
1031 1034 def writetemp(contents):
1032 1035 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1033 1036 f = os.fdopen(fd, pycompat.sysstr("wb"))
1034 1037 f.write(contents)
1035 1038 f.close()
1036 1039 return name
1037 1040
1038 1041 problems = 0
1039 1042
1040 1043 fm = ui.formatter('debuginstall', opts)
1041 1044 fm.startitem()
1042 1045
1043 1046 # encoding
1044 1047 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1045 1048 err = None
1046 1049 try:
1047 1050 codecs.lookup(pycompat.sysstr(encoding.encoding))
1048 1051 except LookupError as inst:
1049 1052 err = util.forcebytestr(inst)
1050 1053 problems += 1
1051 1054 fm.condwrite(err, 'encodingerror', _(" %s\n"
1052 1055 " (check that your locale is properly set)\n"), err)
1053 1056
1054 1057 # Python
1055 1058 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1056 1059 pycompat.sysexecutable)
1057 1060 fm.write('pythonver', _("checking Python version (%s)\n"),
1058 1061 ("%d.%d.%d" % sys.version_info[:3]))
1059 1062 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1060 1063 os.path.dirname(pycompat.fsencode(os.__file__)))
1061 1064
1062 1065 security = set(sslutil.supportedprotocols)
1063 1066 if sslutil.hassni:
1064 1067 security.add('sni')
1065 1068
1066 1069 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1067 1070 fm.formatlist(sorted(security), name='protocol',
1068 1071 fmt='%s', sep=','))
1069 1072
1070 1073 # These are warnings, not errors. So don't increment problem count. This
1071 1074 # may change in the future.
1072 1075 if 'tls1.2' not in security:
1073 1076 fm.plain(_(' TLS 1.2 not supported by Python install; '
1074 1077 'network connections lack modern security\n'))
1075 1078 if 'sni' not in security:
1076 1079 fm.plain(_(' SNI not supported by Python install; may have '
1077 1080 'connectivity issues with some servers\n'))
1078 1081
1079 1082 # TODO print CA cert info
1080 1083
1081 1084 # hg version
1082 1085 hgver = util.version()
1083 1086 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1084 1087 hgver.split('+')[0])
1085 1088 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1086 1089 '+'.join(hgver.split('+')[1:]))
1087 1090
1088 1091 # compiled modules
1089 1092 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1090 1093 policy.policy)
1091 1094 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1092 1095 os.path.dirname(pycompat.fsencode(__file__)))
1093 1096
1094 1097 if policy.policy in ('c', 'allow'):
1095 1098 err = None
1096 1099 try:
1097 1100 from .cext import (
1098 1101 base85,
1099 1102 bdiff,
1100 1103 mpatch,
1101 1104 osutil,
1102 1105 )
1103 1106 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1104 1107 except Exception as inst:
1105 1108 err = util.forcebytestr(inst)
1106 1109 problems += 1
1107 1110 fm.condwrite(err, 'extensionserror', " %s\n", err)
1108 1111
1109 1112 compengines = util.compengines._engines.values()
1110 1113 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1111 1114 fm.formatlist(sorted(e.name() for e in compengines),
1112 1115 name='compengine', fmt='%s', sep=', '))
1113 1116 fm.write('compenginesavail', _('checking available compression engines '
1114 1117 '(%s)\n'),
1115 1118 fm.formatlist(sorted(e.name() for e in compengines
1116 1119 if e.available()),
1117 1120 name='compengine', fmt='%s', sep=', '))
1118 1121 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1119 1122 fm.write('compenginesserver', _('checking available compression engines '
1120 1123 'for wire protocol (%s)\n'),
1121 1124 fm.formatlist([e.name() for e in wirecompengines
1122 1125 if e.wireprotosupport()],
1123 1126 name='compengine', fmt='%s', sep=', '))
1124 1127
1125 1128 # templates
1126 1129 p = templater.templatepaths()
1127 1130 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1128 1131 fm.condwrite(not p, '', _(" no template directories found\n"))
1129 1132 if p:
1130 1133 m = templater.templatepath("map-cmdline.default")
1131 1134 if m:
1132 1135 # template found, check if it is working
1133 1136 err = None
1134 1137 try:
1135 1138 templater.templater.frommapfile(m)
1136 1139 except Exception as inst:
1137 1140 err = util.forcebytestr(inst)
1138 1141 p = None
1139 1142 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1140 1143 else:
1141 1144 p = None
1142 1145 fm.condwrite(p, 'defaulttemplate',
1143 1146 _("checking default template (%s)\n"), m)
1144 1147 fm.condwrite(not m, 'defaulttemplatenotfound',
1145 1148 _(" template '%s' not found\n"), "default")
1146 1149 if not p:
1147 1150 problems += 1
1148 1151 fm.condwrite(not p, '',
1149 1152 _(" (templates seem to have been installed incorrectly)\n"))
1150 1153
1151 1154 # editor
1152 1155 editor = ui.geteditor()
1153 1156 editor = util.expandpath(editor)
1154 1157 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1155 1158 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1156 1159 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1157 1160 _(" No commit editor set and can't find %s in PATH\n"
1158 1161 " (specify a commit editor in your configuration"
1159 1162 " file)\n"), not cmdpath and editor == 'vi' and editor)
1160 1163 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1161 1164 _(" Can't find editor '%s' in PATH\n"
1162 1165 " (specify a commit editor in your configuration"
1163 1166 " file)\n"), not cmdpath and editor)
1164 1167 if not cmdpath and editor != 'vi':
1165 1168 problems += 1
1166 1169
1167 1170 # check username
1168 1171 username = None
1169 1172 err = None
1170 1173 try:
1171 1174 username = ui.username()
1172 1175 except error.Abort as e:
1173 1176 err = util.forcebytestr(e)
1174 1177 problems += 1
1175 1178
1176 1179 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1177 1180 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1178 1181 " (specify a username in your configuration file)\n"), err)
1179 1182
1180 1183 fm.condwrite(not problems, '',
1181 1184 _("no problems detected\n"))
1182 1185 if not problems:
1183 1186 fm.data(problems=problems)
1184 1187 fm.condwrite(problems, 'problems',
1185 1188 _("%d problems detected,"
1186 1189 " please check your install!\n"), problems)
1187 1190 fm.end()
1188 1191
1189 1192 return problems
1190 1193
1191 1194 @command('debugknown', [], _('REPO ID...'), norepo=True)
1192 1195 def debugknown(ui, repopath, *ids, **opts):
1193 1196 """test whether node ids are known to a repo
1194 1197
1195 1198 Every ID must be a full-length hex node id string. Returns a list of 0s
1196 1199 and 1s indicating unknown/known.
1197 1200 """
1198 1201 opts = pycompat.byteskwargs(opts)
1199 1202 repo = hg.peer(ui, opts, repopath)
1200 1203 if not repo.capable('known'):
1201 1204 raise error.Abort("known() not supported by target repository")
1202 1205 flags = repo.known([bin(s) for s in ids])
1203 1206 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1204 1207
1205 1208 @command('debuglabelcomplete', [], _('LABEL...'))
1206 1209 def debuglabelcomplete(ui, repo, *args):
1207 1210 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1208 1211 debugnamecomplete(ui, repo, *args)
1209 1212
1210 1213 @command('debuglocks',
1211 1214 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1212 1215 ('W', 'force-wlock', None,
1213 1216 _('free the working state lock (DANGEROUS)'))],
1214 1217 _('[OPTION]...'))
1215 1218 def debuglocks(ui, repo, **opts):
1216 1219 """show or modify state of locks
1217 1220
1218 1221 By default, this command will show which locks are held. This
1219 1222 includes the user and process holding the lock, the amount of time
1220 1223 the lock has been held, and the machine name where the process is
1221 1224 running if it's not local.
1222 1225
1223 1226 Locks protect the integrity of Mercurial's data, so should be
1224 1227 treated with care. System crashes or other interruptions may cause
1225 1228 locks to not be properly released, though Mercurial will usually
1226 1229 detect and remove such stale locks automatically.
1227 1230
1228 1231 However, detecting stale locks may not always be possible (for
1229 1232 instance, on a shared filesystem). Removing locks may also be
1230 1233 blocked by filesystem permissions.
1231 1234
1232 1235 Returns 0 if no locks are held.
1233 1236
1234 1237 """
1235 1238
1236 1239 if opts.get(r'force_lock'):
1237 1240 repo.svfs.unlink('lock')
1238 1241 if opts.get(r'force_wlock'):
1239 1242 repo.vfs.unlink('wlock')
1240 1243 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1241 1244 return 0
1242 1245
1243 1246 now = time.time()
1244 1247 held = 0
1245 1248
1246 1249 def report(vfs, name, method):
1247 1250 # this causes stale locks to get reaped for more accurate reporting
1248 1251 try:
1249 1252 l = method(False)
1250 1253 except error.LockHeld:
1251 1254 l = None
1252 1255
1253 1256 if l:
1254 1257 l.release()
1255 1258 else:
1256 1259 try:
1257 1260 stat = vfs.lstat(name)
1258 1261 age = now - stat.st_mtime
1259 1262 user = util.username(stat.st_uid)
1260 1263 locker = vfs.readlock(name)
1261 1264 if ":" in locker:
1262 1265 host, pid = locker.split(':')
1263 1266 if host == socket.gethostname():
1264 1267 locker = 'user %s, process %s' % (user, pid)
1265 1268 else:
1266 1269 locker = 'user %s, process %s, host %s' \
1267 1270 % (user, pid, host)
1268 1271 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1269 1272 return 1
1270 1273 except OSError as e:
1271 1274 if e.errno != errno.ENOENT:
1272 1275 raise
1273 1276
1274 1277 ui.write(("%-6s free\n") % (name + ":"))
1275 1278 return 0
1276 1279
1277 1280 held += report(repo.svfs, "lock", repo.lock)
1278 1281 held += report(repo.vfs, "wlock", repo.wlock)
1279 1282
1280 1283 return held
1281 1284
1282 1285 @command('debugmergestate', [], '')
1283 1286 def debugmergestate(ui, repo, *args):
1284 1287 """print merge state
1285 1288
1286 1289 Use --verbose to print out information about whether v1 or v2 merge state
1287 1290 was chosen."""
1288 1291 def _hashornull(h):
1289 1292 if h == nullhex:
1290 1293 return 'null'
1291 1294 else:
1292 1295 return h
1293 1296
1294 1297 def printrecords(version):
1295 1298 ui.write(('* version %s records\n') % version)
1296 1299 if version == 1:
1297 1300 records = v1records
1298 1301 else:
1299 1302 records = v2records
1300 1303
1301 1304 for rtype, record in records:
1302 1305 # pretty print some record types
1303 1306 if rtype == 'L':
1304 1307 ui.write(('local: %s\n') % record)
1305 1308 elif rtype == 'O':
1306 1309 ui.write(('other: %s\n') % record)
1307 1310 elif rtype == 'm':
1308 1311 driver, mdstate = record.split('\0', 1)
1309 1312 ui.write(('merge driver: %s (state "%s")\n')
1310 1313 % (driver, mdstate))
1311 1314 elif rtype in 'FDC':
1312 1315 r = record.split('\0')
1313 1316 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1314 1317 if version == 1:
1315 1318 onode = 'not stored in v1 format'
1316 1319 flags = r[7]
1317 1320 else:
1318 1321 onode, flags = r[7:9]
1319 1322 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1320 1323 % (f, rtype, state, _hashornull(hash)))
1321 1324 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1322 1325 ui.write((' ancestor path: %s (node %s)\n')
1323 1326 % (afile, _hashornull(anode)))
1324 1327 ui.write((' other path: %s (node %s)\n')
1325 1328 % (ofile, _hashornull(onode)))
1326 1329 elif rtype == 'f':
1327 1330 filename, rawextras = record.split('\0', 1)
1328 1331 extras = rawextras.split('\0')
1329 1332 i = 0
1330 1333 extrastrings = []
1331 1334 while i < len(extras):
1332 1335 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1333 1336 i += 2
1334 1337
1335 1338 ui.write(('file extras: %s (%s)\n')
1336 1339 % (filename, ', '.join(extrastrings)))
1337 1340 elif rtype == 'l':
1338 1341 labels = record.split('\0', 2)
1339 1342 labels = [l for l in labels if len(l) > 0]
1340 1343 ui.write(('labels:\n'))
1341 1344 ui.write((' local: %s\n' % labels[0]))
1342 1345 ui.write((' other: %s\n' % labels[1]))
1343 1346 if len(labels) > 2:
1344 1347 ui.write((' base: %s\n' % labels[2]))
1345 1348 else:
1346 1349 ui.write(('unrecognized entry: %s\t%s\n')
1347 1350 % (rtype, record.replace('\0', '\t')))
1348 1351
1349 1352 # Avoid mergestate.read() since it may raise an exception for unsupported
1350 1353 # merge state records. We shouldn't be doing this, but this is OK since this
1351 1354 # command is pretty low-level.
1352 1355 ms = mergemod.mergestate(repo)
1353 1356
1354 1357 # sort so that reasonable information is on top
1355 1358 v1records = ms._readrecordsv1()
1356 1359 v2records = ms._readrecordsv2()
1357 1360 order = 'LOml'
1358 1361 def key(r):
1359 1362 idx = order.find(r[0])
1360 1363 if idx == -1:
1361 1364 return (1, r[1])
1362 1365 else:
1363 1366 return (0, idx)
1364 1367 v1records.sort(key=key)
1365 1368 v2records.sort(key=key)
1366 1369
1367 1370 if not v1records and not v2records:
1368 1371 ui.write(('no merge state found\n'))
1369 1372 elif not v2records:
1370 1373 ui.note(('no version 2 merge state\n'))
1371 1374 printrecords(1)
1372 1375 elif ms._v1v2match(v1records, v2records):
1373 1376 ui.note(('v1 and v2 states match: using v2\n'))
1374 1377 printrecords(2)
1375 1378 else:
1376 1379 ui.note(('v1 and v2 states mismatch: using v1\n'))
1377 1380 printrecords(1)
1378 1381 if ui.verbose:
1379 1382 printrecords(2)
1380 1383
1381 1384 @command('debugnamecomplete', [], _('NAME...'))
1382 1385 def debugnamecomplete(ui, repo, *args):
1383 1386 '''complete "names" - tags, open branch names, bookmark names'''
1384 1387
1385 1388 names = set()
1386 1389 # since we previously only listed open branches, we will handle that
1387 1390 # specially (after this for loop)
1388 1391 for name, ns in repo.names.iteritems():
1389 1392 if name != 'branches':
1390 1393 names.update(ns.listnames(repo))
1391 1394 names.update(tag for (tag, heads, tip, closed)
1392 1395 in repo.branchmap().iterbranches() if not closed)
1393 1396 completions = set()
1394 1397 if not args:
1395 1398 args = ['']
1396 1399 for a in args:
1397 1400 completions.update(n for n in names if n.startswith(a))
1398 1401 ui.write('\n'.join(sorted(completions)))
1399 1402 ui.write('\n')
1400 1403
1401 1404 @command('debugobsolete',
1402 1405 [('', 'flags', 0, _('markers flag')),
1403 1406 ('', 'record-parents', False,
1404 1407 _('record parent information for the precursor')),
1405 1408 ('r', 'rev', [], _('display markers relevant to REV')),
1406 1409 ('', 'exclusive', False, _('restrict display to markers only '
1407 1410 'relevant to REV')),
1408 1411 ('', 'index', False, _('display index of the marker')),
1409 1412 ('', 'delete', [], _('delete markers specified by indices')),
1410 1413 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1411 1414 _('[OBSOLETED [REPLACEMENT ...]]'))
1412 1415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1413 1416 """create arbitrary obsolete marker
1414 1417
1415 1418 With no arguments, displays the list of obsolescence markers."""
1416 1419
1417 1420 opts = pycompat.byteskwargs(opts)
1418 1421
1419 1422 def parsenodeid(s):
1420 1423 try:
1421 1424 # We do not use revsingle/revrange functions here to accept
1422 1425 # arbitrary node identifiers, possibly not present in the
1423 1426 # local repository.
1424 1427 n = bin(s)
1425 1428 if len(n) != len(nullid):
1426 1429 raise TypeError()
1427 1430 return n
1428 1431 except TypeError:
1429 1432 raise error.Abort('changeset references must be full hexadecimal '
1430 1433 'node identifiers')
1431 1434
1432 1435 if opts.get('delete'):
1433 1436 indices = []
1434 1437 for v in opts.get('delete'):
1435 1438 try:
1436 1439 indices.append(int(v))
1437 1440 except ValueError:
1438 1441 raise error.Abort(_('invalid index value: %r') % v,
1439 1442 hint=_('use integers for indices'))
1440 1443
1441 1444 if repo.currenttransaction():
1442 1445 raise error.Abort(_('cannot delete obsmarkers in the middle '
1443 1446 'of transaction.'))
1444 1447
1445 1448 with repo.lock():
1446 1449 n = repair.deleteobsmarkers(repo.obsstore, indices)
1447 1450 ui.write(_('deleted %i obsolescence markers\n') % n)
1448 1451
1449 1452 return
1450 1453
1451 1454 if precursor is not None:
1452 1455 if opts['rev']:
1453 1456 raise error.Abort('cannot select revision when creating marker')
1454 1457 metadata = {}
1455 1458 metadata['user'] = opts['user'] or ui.username()
1456 1459 succs = tuple(parsenodeid(succ) for succ in successors)
1457 1460 l = repo.lock()
1458 1461 try:
1459 1462 tr = repo.transaction('debugobsolete')
1460 1463 try:
1461 1464 date = opts.get('date')
1462 1465 if date:
1463 1466 date = util.parsedate(date)
1464 1467 else:
1465 1468 date = None
1466 1469 prec = parsenodeid(precursor)
1467 1470 parents = None
1468 1471 if opts['record_parents']:
1469 1472 if prec not in repo.unfiltered():
1470 1473 raise error.Abort('cannot used --record-parents on '
1471 1474 'unknown changesets')
1472 1475 parents = repo.unfiltered()[prec].parents()
1473 1476 parents = tuple(p.node() for p in parents)
1474 1477 repo.obsstore.create(tr, prec, succs, opts['flags'],
1475 1478 parents=parents, date=date,
1476 1479 metadata=metadata, ui=ui)
1477 1480 tr.close()
1478 1481 except ValueError as exc:
1479 1482 raise error.Abort(_('bad obsmarker input: %s') % exc)
1480 1483 finally:
1481 1484 tr.release()
1482 1485 finally:
1483 1486 l.release()
1484 1487 else:
1485 1488 if opts['rev']:
1486 1489 revs = scmutil.revrange(repo, opts['rev'])
1487 1490 nodes = [repo[r].node() for r in revs]
1488 1491 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1489 1492 exclusive=opts['exclusive']))
1490 1493 markers.sort(key=lambda x: x._data)
1491 1494 else:
1492 1495 markers = obsutil.getmarkers(repo)
1493 1496
1494 1497 markerstoiter = markers
1495 1498 isrelevant = lambda m: True
1496 1499 if opts.get('rev') and opts.get('index'):
1497 1500 markerstoiter = obsutil.getmarkers(repo)
1498 1501 markerset = set(markers)
1499 1502 isrelevant = lambda m: m in markerset
1500 1503
1501 1504 fm = ui.formatter('debugobsolete', opts)
1502 1505 for i, m in enumerate(markerstoiter):
1503 1506 if not isrelevant(m):
1504 1507 # marker can be irrelevant when we're iterating over a set
1505 1508 # of markers (markerstoiter) which is bigger than the set
1506 1509 # of markers we want to display (markers)
1507 1510 # this can happen if both --index and --rev options are
1508 1511 # provided and thus we need to iterate over all of the markers
1509 1512 # to get the correct indices, but only display the ones that
1510 1513 # are relevant to --rev value
1511 1514 continue
1512 1515 fm.startitem()
1513 1516 ind = i if opts.get('index') else None
1514 1517 cmdutil.showmarker(fm, m, index=ind)
1515 1518 fm.end()
1516 1519
1517 1520 @command('debugpathcomplete',
1518 1521 [('f', 'full', None, _('complete an entire path')),
1519 1522 ('n', 'normal', None, _('show only normal files')),
1520 1523 ('a', 'added', None, _('show only added files')),
1521 1524 ('r', 'removed', None, _('show only removed files'))],
1522 1525 _('FILESPEC...'))
1523 1526 def debugpathcomplete(ui, repo, *specs, **opts):
1524 1527 '''complete part or all of a tracked path
1525 1528
1526 1529 This command supports shells that offer path name completion. It
1527 1530 currently completes only files already known to the dirstate.
1528 1531
1529 1532 Completion extends only to the next path segment unless
1530 1533 --full is specified, in which case entire paths are used.'''
1531 1534
1532 1535 def complete(path, acceptable):
1533 1536 dirstate = repo.dirstate
1534 1537 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1535 1538 rootdir = repo.root + pycompat.ossep
1536 1539 if spec != repo.root and not spec.startswith(rootdir):
1537 1540 return [], []
1538 1541 if os.path.isdir(spec):
1539 1542 spec += '/'
1540 1543 spec = spec[len(rootdir):]
1541 1544 fixpaths = pycompat.ossep != '/'
1542 1545 if fixpaths:
1543 1546 spec = spec.replace(pycompat.ossep, '/')
1544 1547 speclen = len(spec)
1545 1548 fullpaths = opts[r'full']
1546 1549 files, dirs = set(), set()
1547 1550 adddir, addfile = dirs.add, files.add
1548 1551 for f, st in dirstate.iteritems():
1549 1552 if f.startswith(spec) and st[0] in acceptable:
1550 1553 if fixpaths:
1551 1554 f = f.replace('/', pycompat.ossep)
1552 1555 if fullpaths:
1553 1556 addfile(f)
1554 1557 continue
1555 1558 s = f.find(pycompat.ossep, speclen)
1556 1559 if s >= 0:
1557 1560 adddir(f[:s])
1558 1561 else:
1559 1562 addfile(f)
1560 1563 return files, dirs
1561 1564
1562 1565 acceptable = ''
1563 1566 if opts[r'normal']:
1564 1567 acceptable += 'nm'
1565 1568 if opts[r'added']:
1566 1569 acceptable += 'a'
1567 1570 if opts[r'removed']:
1568 1571 acceptable += 'r'
1569 1572 cwd = repo.getcwd()
1570 1573 if not specs:
1571 1574 specs = ['.']
1572 1575
1573 1576 files, dirs = set(), set()
1574 1577 for spec in specs:
1575 1578 f, d = complete(spec, acceptable or 'nmar')
1576 1579 files.update(f)
1577 1580 dirs.update(d)
1578 1581 files.update(dirs)
1579 1582 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1580 1583 ui.write('\n')
1581 1584
1582 1585 @command('debugpickmergetool',
1583 1586 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1584 1587 ('', 'changedelete', None, _('emulate merging change and delete')),
1585 1588 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1586 1589 _('[PATTERN]...'),
1587 1590 inferrepo=True)
1588 1591 def debugpickmergetool(ui, repo, *pats, **opts):
1589 1592 """examine which merge tool is chosen for specified file
1590 1593
1591 1594 As described in :hg:`help merge-tools`, Mercurial examines
1592 1595 configurations below in this order to decide which merge tool is
1593 1596 chosen for specified file.
1594 1597
1595 1598 1. ``--tool`` option
1596 1599 2. ``HGMERGE`` environment variable
1597 1600 3. configurations in ``merge-patterns`` section
1598 1601 4. configuration of ``ui.merge``
1599 1602 5. configurations in ``merge-tools`` section
1600 1603 6. ``hgmerge`` tool (for historical reason only)
1601 1604 7. default tool for fallback (``:merge`` or ``:prompt``)
1602 1605
1603 1606 This command writes out examination result in the style below::
1604 1607
1605 1608 FILE = MERGETOOL
1606 1609
1607 1610 By default, all files known in the first parent context of the
1608 1611 working directory are examined. Use file patterns and/or -I/-X
1609 1612 options to limit target files. -r/--rev is also useful to examine
1610 1613 files in another context without actual updating to it.
1611 1614
1612 1615 With --debug, this command shows warning messages while matching
1613 1616 against ``merge-patterns`` and so on, too. It is recommended to
1614 1617 use this option with explicit file patterns and/or -I/-X options,
1615 1618 because this option increases amount of output per file according
1616 1619 to configurations in hgrc.
1617 1620
1618 1621 With -v/--verbose, this command shows configurations below at
1619 1622 first (only if specified).
1620 1623
1621 1624 - ``--tool`` option
1622 1625 - ``HGMERGE`` environment variable
1623 1626 - configuration of ``ui.merge``
1624 1627
1625 1628 If merge tool is chosen before matching against
1626 1629 ``merge-patterns``, this command can't show any helpful
1627 1630 information, even with --debug. In such case, information above is
1628 1631 useful to know why a merge tool is chosen.
1629 1632 """
1630 1633 opts = pycompat.byteskwargs(opts)
1631 1634 overrides = {}
1632 1635 if opts['tool']:
1633 1636 overrides[('ui', 'forcemerge')] = opts['tool']
1634 1637 ui.note(('with --tool %r\n') % (opts['tool']))
1635 1638
1636 1639 with ui.configoverride(overrides, 'debugmergepatterns'):
1637 1640 hgmerge = encoding.environ.get("HGMERGE")
1638 1641 if hgmerge is not None:
1639 1642 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1640 1643 uimerge = ui.config("ui", "merge")
1641 1644 if uimerge:
1642 1645 ui.note(('with ui.merge=%r\n') % (uimerge))
1643 1646
1644 1647 ctx = scmutil.revsingle(repo, opts.get('rev'))
1645 1648 m = scmutil.match(ctx, pats, opts)
1646 1649 changedelete = opts['changedelete']
1647 1650 for path in ctx.walk(m):
1648 1651 fctx = ctx[path]
1649 1652 try:
1650 1653 if not ui.debugflag:
1651 1654 ui.pushbuffer(error=True)
1652 1655 tool, toolpath = filemerge._picktool(repo, ui, path,
1653 1656 fctx.isbinary(),
1654 1657 'l' in fctx.flags(),
1655 1658 changedelete)
1656 1659 finally:
1657 1660 if not ui.debugflag:
1658 1661 ui.popbuffer()
1659 1662 ui.write(('%s = %s\n') % (path, tool))
1660 1663
1661 1664 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1662 1665 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1663 1666 '''access the pushkey key/value protocol
1664 1667
1665 1668 With two args, list the keys in the given namespace.
1666 1669
1667 1670 With five args, set a key to new if it currently is set to old.
1668 1671 Reports success or failure.
1669 1672 '''
1670 1673
1671 1674 target = hg.peer(ui, {}, repopath)
1672 1675 if keyinfo:
1673 1676 key, old, new = keyinfo
1674 1677 r = target.pushkey(namespace, key, old, new)
1675 1678 ui.status(str(r) + '\n')
1676 1679 return not r
1677 1680 else:
1678 1681 for k, v in sorted(target.listkeys(namespace).iteritems()):
1679 1682 ui.write("%s\t%s\n" % (util.escapestr(k),
1680 1683 util.escapestr(v)))
1681 1684
1682 1685 @command('debugpvec', [], _('A B'))
1683 1686 def debugpvec(ui, repo, a, b=None):
1684 1687 ca = scmutil.revsingle(repo, a)
1685 1688 cb = scmutil.revsingle(repo, b)
1686 1689 pa = pvec.ctxpvec(ca)
1687 1690 pb = pvec.ctxpvec(cb)
1688 1691 if pa == pb:
1689 1692 rel = "="
1690 1693 elif pa > pb:
1691 1694 rel = ">"
1692 1695 elif pa < pb:
1693 1696 rel = "<"
1694 1697 elif pa | pb:
1695 1698 rel = "|"
1696 1699 ui.write(_("a: %s\n") % pa)
1697 1700 ui.write(_("b: %s\n") % pb)
1698 1701 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1699 1702 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1700 1703 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1701 1704 pa.distance(pb), rel))
1702 1705
1703 1706 @command('debugrebuilddirstate|debugrebuildstate',
1704 1707 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1705 1708 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1706 1709 'the working copy parent')),
1707 1710 ],
1708 1711 _('[-r REV]'))
1709 1712 def debugrebuilddirstate(ui, repo, rev, **opts):
1710 1713 """rebuild the dirstate as it would look like for the given revision
1711 1714
1712 1715 If no revision is specified the first current parent will be used.
1713 1716
1714 1717 The dirstate will be set to the files of the given revision.
1715 1718 The actual working directory content or existing dirstate
1716 1719 information such as adds or removes is not considered.
1717 1720
1718 1721 ``minimal`` will only rebuild the dirstate status for files that claim to be
1719 1722 tracked but are not in the parent manifest, or that exist in the parent
1720 1723 manifest but are not in the dirstate. It will not change adds, removes, or
1721 1724 modified files that are in the working copy parent.
1722 1725
1723 1726 One use of this command is to make the next :hg:`status` invocation
1724 1727 check the actual file content.
1725 1728 """
1726 1729 ctx = scmutil.revsingle(repo, rev)
1727 1730 with repo.wlock():
1728 1731 dirstate = repo.dirstate
1729 1732 changedfiles = None
1730 1733 # See command doc for what minimal does.
1731 1734 if opts.get(r'minimal'):
1732 1735 manifestfiles = set(ctx.manifest().keys())
1733 1736 dirstatefiles = set(dirstate)
1734 1737 manifestonly = manifestfiles - dirstatefiles
1735 1738 dsonly = dirstatefiles - manifestfiles
1736 1739 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1737 1740 changedfiles = manifestonly | dsnotadded
1738 1741
1739 1742 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1740 1743
1741 1744 @command('debugrebuildfncache', [], '')
1742 1745 def debugrebuildfncache(ui, repo):
1743 1746 """rebuild the fncache file"""
1744 1747 repair.rebuildfncache(ui, repo)
1745 1748
1746 1749 @command('debugrename',
1747 1750 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1748 1751 _('[-r REV] FILE'))
1749 1752 def debugrename(ui, repo, file1, *pats, **opts):
1750 1753 """dump rename information"""
1751 1754
1752 1755 opts = pycompat.byteskwargs(opts)
1753 1756 ctx = scmutil.revsingle(repo, opts.get('rev'))
1754 1757 m = scmutil.match(ctx, (file1,) + pats, opts)
1755 1758 for abs in ctx.walk(m):
1756 1759 fctx = ctx[abs]
1757 1760 o = fctx.filelog().renamed(fctx.filenode())
1758 1761 rel = m.rel(abs)
1759 1762 if o:
1760 1763 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1761 1764 else:
1762 1765 ui.write(_("%s not renamed\n") % rel)
1763 1766
1764 1767 @command('debugrevlog', cmdutil.debugrevlogopts +
1765 1768 [('d', 'dump', False, _('dump index data'))],
1766 1769 _('-c|-m|FILE'),
1767 1770 optionalrepo=True)
1768 1771 def debugrevlog(ui, repo, file_=None, **opts):
1769 1772 """show data and statistics about a revlog"""
1770 1773 opts = pycompat.byteskwargs(opts)
1771 1774 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1772 1775
1773 1776 if opts.get("dump"):
1774 1777 numrevs = len(r)
1775 1778 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1776 1779 " rawsize totalsize compression heads chainlen\n"))
1777 1780 ts = 0
1778 1781 heads = set()
1779 1782
1780 1783 for rev in xrange(numrevs):
1781 1784 dbase = r.deltaparent(rev)
1782 1785 if dbase == -1:
1783 1786 dbase = rev
1784 1787 cbase = r.chainbase(rev)
1785 1788 clen = r.chainlen(rev)
1786 1789 p1, p2 = r.parentrevs(rev)
1787 1790 rs = r.rawsize(rev)
1788 1791 ts = ts + rs
1789 1792 heads -= set(r.parentrevs(rev))
1790 1793 heads.add(rev)
1791 1794 try:
1792 1795 compression = ts / r.end(rev)
1793 1796 except ZeroDivisionError:
1794 1797 compression = 0
1795 1798 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1796 1799 "%11d %5d %8d\n" %
1797 1800 (rev, p1, p2, r.start(rev), r.end(rev),
1798 1801 r.start(dbase), r.start(cbase),
1799 1802 r.start(p1), r.start(p2),
1800 1803 rs, ts, compression, len(heads), clen))
1801 1804 return 0
1802 1805
1803 1806 v = r.version
1804 1807 format = v & 0xFFFF
1805 1808 flags = []
1806 1809 gdelta = False
1807 1810 if v & revlog.FLAG_INLINE_DATA:
1808 1811 flags.append('inline')
1809 1812 if v & revlog.FLAG_GENERALDELTA:
1810 1813 gdelta = True
1811 1814 flags.append('generaldelta')
1812 1815 if not flags:
1813 1816 flags = ['(none)']
1814 1817
1815 1818 nummerges = 0
1816 1819 numfull = 0
1817 1820 numprev = 0
1818 1821 nump1 = 0
1819 1822 nump2 = 0
1820 1823 numother = 0
1821 1824 nump1prev = 0
1822 1825 nump2prev = 0
1823 1826 chainlengths = []
1824 1827 chainbases = []
1825 1828 chainspans = []
1826 1829
1827 1830 datasize = [None, 0, 0]
1828 1831 fullsize = [None, 0, 0]
1829 1832 deltasize = [None, 0, 0]
1830 1833 chunktypecounts = {}
1831 1834 chunktypesizes = {}
1832 1835
1833 1836 def addsize(size, l):
1834 1837 if l[0] is None or size < l[0]:
1835 1838 l[0] = size
1836 1839 if size > l[1]:
1837 1840 l[1] = size
1838 1841 l[2] += size
1839 1842
1840 1843 numrevs = len(r)
1841 1844 for rev in xrange(numrevs):
1842 1845 p1, p2 = r.parentrevs(rev)
1843 1846 delta = r.deltaparent(rev)
1844 1847 if format > 0:
1845 1848 addsize(r.rawsize(rev), datasize)
1846 1849 if p2 != nullrev:
1847 1850 nummerges += 1
1848 1851 size = r.length(rev)
1849 1852 if delta == nullrev:
1850 1853 chainlengths.append(0)
1851 1854 chainbases.append(r.start(rev))
1852 1855 chainspans.append(size)
1853 1856 numfull += 1
1854 1857 addsize(size, fullsize)
1855 1858 else:
1856 1859 chainlengths.append(chainlengths[delta] + 1)
1857 1860 baseaddr = chainbases[delta]
1858 1861 revaddr = r.start(rev)
1859 1862 chainbases.append(baseaddr)
1860 1863 chainspans.append((revaddr - baseaddr) + size)
1861 1864 addsize(size, deltasize)
1862 1865 if delta == rev - 1:
1863 1866 numprev += 1
1864 1867 if delta == p1:
1865 1868 nump1prev += 1
1866 1869 elif delta == p2:
1867 1870 nump2prev += 1
1868 1871 elif delta == p1:
1869 1872 nump1 += 1
1870 1873 elif delta == p2:
1871 1874 nump2 += 1
1872 1875 elif delta != nullrev:
1873 1876 numother += 1
1874 1877
1875 1878 # Obtain data on the raw chunks in the revlog.
1876 1879 segment = r._getsegmentforrevs(rev, rev)[1]
1877 1880 if segment:
1878 1881 chunktype = bytes(segment[0:1])
1879 1882 else:
1880 1883 chunktype = 'empty'
1881 1884
1882 1885 if chunktype not in chunktypecounts:
1883 1886 chunktypecounts[chunktype] = 0
1884 1887 chunktypesizes[chunktype] = 0
1885 1888
1886 1889 chunktypecounts[chunktype] += 1
1887 1890 chunktypesizes[chunktype] += size
1888 1891
1889 1892 # Adjust size min value for empty cases
1890 1893 for size in (datasize, fullsize, deltasize):
1891 1894 if size[0] is None:
1892 1895 size[0] = 0
1893 1896
1894 1897 numdeltas = numrevs - numfull
1895 1898 numoprev = numprev - nump1prev - nump2prev
1896 1899 totalrawsize = datasize[2]
1897 1900 datasize[2] /= numrevs
1898 1901 fulltotal = fullsize[2]
1899 1902 fullsize[2] /= numfull
1900 1903 deltatotal = deltasize[2]
1901 1904 if numrevs - numfull > 0:
1902 1905 deltasize[2] /= numrevs - numfull
1903 1906 totalsize = fulltotal + deltatotal
1904 1907 avgchainlen = sum(chainlengths) / numrevs
1905 1908 maxchainlen = max(chainlengths)
1906 1909 maxchainspan = max(chainspans)
1907 1910 compratio = 1
1908 1911 if totalsize:
1909 1912 compratio = totalrawsize / totalsize
1910 1913
1911 1914 basedfmtstr = '%%%dd\n'
1912 1915 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1913 1916
1914 1917 def dfmtstr(max):
1915 1918 return basedfmtstr % len(str(max))
1916 1919 def pcfmtstr(max, padding=0):
1917 1920 return basepcfmtstr % (len(str(max)), ' ' * padding)
1918 1921
1919 1922 def pcfmt(value, total):
1920 1923 if total:
1921 1924 return (value, 100 * float(value) / total)
1922 1925 else:
1923 1926 return value, 100.0
1924 1927
1925 1928 ui.write(('format : %d\n') % format)
1926 1929 ui.write(('flags : %s\n') % ', '.join(flags))
1927 1930
1928 1931 ui.write('\n')
1929 1932 fmt = pcfmtstr(totalsize)
1930 1933 fmt2 = dfmtstr(totalsize)
1931 1934 ui.write(('revisions : ') + fmt2 % numrevs)
1932 1935 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1933 1936 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1934 1937 ui.write(('revisions : ') + fmt2 % numrevs)
1935 1938 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1936 1939 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1937 1940 ui.write(('revision size : ') + fmt2 % totalsize)
1938 1941 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1939 1942 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1940 1943
1941 1944 def fmtchunktype(chunktype):
1942 1945 if chunktype == 'empty':
1943 1946 return ' %s : ' % chunktype
1944 1947 elif chunktype in pycompat.bytestr(string.ascii_letters):
1945 1948 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1946 1949 else:
1947 1950 return ' 0x%s : ' % hex(chunktype)
1948 1951
1949 1952 ui.write('\n')
1950 1953 ui.write(('chunks : ') + fmt2 % numrevs)
1951 1954 for chunktype in sorted(chunktypecounts):
1952 1955 ui.write(fmtchunktype(chunktype))
1953 1956 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1954 1957 ui.write(('chunks size : ') + fmt2 % totalsize)
1955 1958 for chunktype in sorted(chunktypecounts):
1956 1959 ui.write(fmtchunktype(chunktype))
1957 1960 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1958 1961
1959 1962 ui.write('\n')
1960 1963 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1961 1964 ui.write(('avg chain length : ') + fmt % avgchainlen)
1962 1965 ui.write(('max chain length : ') + fmt % maxchainlen)
1963 1966 ui.write(('max chain reach : ') + fmt % maxchainspan)
1964 1967 ui.write(('compression ratio : ') + fmt % compratio)
1965 1968
1966 1969 if format > 0:
1967 1970 ui.write('\n')
1968 1971 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1969 1972 % tuple(datasize))
1970 1973 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1971 1974 % tuple(fullsize))
1972 1975 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1973 1976 % tuple(deltasize))
1974 1977
1975 1978 if numdeltas > 0:
1976 1979 ui.write('\n')
1977 1980 fmt = pcfmtstr(numdeltas)
1978 1981 fmt2 = pcfmtstr(numdeltas, 4)
1979 1982 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1980 1983 if numprev > 0:
1981 1984 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1982 1985 numprev))
1983 1986 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1984 1987 numprev))
1985 1988 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1986 1989 numprev))
1987 1990 if gdelta:
1988 1991 ui.write(('deltas against p1 : ')
1989 1992 + fmt % pcfmt(nump1, numdeltas))
1990 1993 ui.write(('deltas against p2 : ')
1991 1994 + fmt % pcfmt(nump2, numdeltas))
1992 1995 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1993 1996 numdeltas))
1994 1997
1995 1998 @command('debugrevspec',
1996 1999 [('', 'optimize', None,
1997 2000 _('print parsed tree after optimizing (DEPRECATED)')),
1998 2001 ('', 'show-revs', True, _('print list of result revisions (default)')),
1999 2002 ('s', 'show-set', None, _('print internal representation of result set')),
2000 2003 ('p', 'show-stage', [],
2001 2004 _('print parsed tree at the given stage'), _('NAME')),
2002 2005 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2003 2006 ('', 'verify-optimized', False, _('verify optimized result')),
2004 2007 ],
2005 2008 ('REVSPEC'))
2006 2009 def debugrevspec(ui, repo, expr, **opts):
2007 2010 """parse and apply a revision specification
2008 2011
2009 2012 Use -p/--show-stage option to print the parsed tree at the given stages.
2010 2013 Use -p all to print tree at every stage.
2011 2014
2012 2015 Use --no-show-revs option with -s or -p to print only the set
2013 2016 representation or the parsed tree respectively.
2014 2017
2015 2018 Use --verify-optimized to compare the optimized result with the unoptimized
2016 2019 one. Returns 1 if the optimized result differs.
2017 2020 """
2018 2021 opts = pycompat.byteskwargs(opts)
2019 2022 aliases = ui.configitems('revsetalias')
2020 2023 stages = [
2021 2024 ('parsed', lambda tree: tree),
2022 2025 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2023 2026 ui.warn)),
2024 2027 ('concatenated', revsetlang.foldconcat),
2025 2028 ('analyzed', revsetlang.analyze),
2026 2029 ('optimized', revsetlang.optimize),
2027 2030 ]
2028 2031 if opts['no_optimized']:
2029 2032 stages = stages[:-1]
2030 2033 if opts['verify_optimized'] and opts['no_optimized']:
2031 2034 raise error.Abort(_('cannot use --verify-optimized with '
2032 2035 '--no-optimized'))
2033 2036 stagenames = set(n for n, f in stages)
2034 2037
2035 2038 showalways = set()
2036 2039 showchanged = set()
2037 2040 if ui.verbose and not opts['show_stage']:
2038 2041 # show parsed tree by --verbose (deprecated)
2039 2042 showalways.add('parsed')
2040 2043 showchanged.update(['expanded', 'concatenated'])
2041 2044 if opts['optimize']:
2042 2045 showalways.add('optimized')
2043 2046 if opts['show_stage'] and opts['optimize']:
2044 2047 raise error.Abort(_('cannot use --optimize with --show-stage'))
2045 2048 if opts['show_stage'] == ['all']:
2046 2049 showalways.update(stagenames)
2047 2050 else:
2048 2051 for n in opts['show_stage']:
2049 2052 if n not in stagenames:
2050 2053 raise error.Abort(_('invalid stage name: %s') % n)
2051 2054 showalways.update(opts['show_stage'])
2052 2055
2053 2056 treebystage = {}
2054 2057 printedtree = None
2055 2058 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2056 2059 for n, f in stages:
2057 2060 treebystage[n] = tree = f(tree)
2058 2061 if n in showalways or (n in showchanged and tree != printedtree):
2059 2062 if opts['show_stage'] or n != 'parsed':
2060 2063 ui.write(("* %s:\n") % n)
2061 2064 ui.write(revsetlang.prettyformat(tree), "\n")
2062 2065 printedtree = tree
2063 2066
2064 2067 if opts['verify_optimized']:
2065 2068 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2066 2069 brevs = revset.makematcher(treebystage['optimized'])(repo)
2067 2070 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2068 2071 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2069 2072 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2070 2073 arevs = list(arevs)
2071 2074 brevs = list(brevs)
2072 2075 if arevs == brevs:
2073 2076 return 0
2074 2077 ui.write(('--- analyzed\n'), label='diff.file_a')
2075 2078 ui.write(('+++ optimized\n'), label='diff.file_b')
2076 2079 sm = difflib.SequenceMatcher(None, arevs, brevs)
2077 2080 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2078 2081 if tag in ('delete', 'replace'):
2079 2082 for c in arevs[alo:ahi]:
2080 2083 ui.write('-%s\n' % c, label='diff.deleted')
2081 2084 if tag in ('insert', 'replace'):
2082 2085 for c in brevs[blo:bhi]:
2083 2086 ui.write('+%s\n' % c, label='diff.inserted')
2084 2087 if tag == 'equal':
2085 2088 for c in arevs[alo:ahi]:
2086 2089 ui.write(' %s\n' % c)
2087 2090 return 1
2088 2091
2089 2092 func = revset.makematcher(tree)
2090 2093 revs = func(repo)
2091 2094 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2092 2095 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2093 2096 if not opts['show_revs']:
2094 2097 return
2095 2098 for c in revs:
2096 2099 ui.write("%s\n" % c)
2097 2100
2098 2101 @command('debugsetparents', [], _('REV1 [REV2]'))
2099 2102 def debugsetparents(ui, repo, rev1, rev2=None):
2100 2103 """manually set the parents of the current working directory
2101 2104
2102 2105 This is useful for writing repository conversion tools, but should
2103 2106 be used with care. For example, neither the working directory nor the
2104 2107 dirstate is updated, so file status may be incorrect after running this
2105 2108 command.
2106 2109
2107 2110 Returns 0 on success.
2108 2111 """
2109 2112
2110 2113 r1 = scmutil.revsingle(repo, rev1).node()
2111 2114 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2112 2115
2113 2116 with repo.wlock():
2114 2117 repo.setparents(r1, r2)
2115 2118
2116 2119 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2117 2120 def debugssl(ui, repo, source=None, **opts):
2118 2121 '''test a secure connection to a server
2119 2122
2120 2123 This builds the certificate chain for the server on Windows, installing the
2121 2124 missing intermediates and trusted root via Windows Update if necessary. It
2122 2125 does nothing on other platforms.
2123 2126
2124 2127 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2125 2128 that server is used. See :hg:`help urls` for more information.
2126 2129
2127 2130 If the update succeeds, retry the original operation. Otherwise, the cause
2128 2131 of the SSL error is likely another issue.
2129 2132 '''
2130 2133 if not pycompat.iswindows:
2131 2134 raise error.Abort(_('certificate chain building is only possible on '
2132 2135 'Windows'))
2133 2136
2134 2137 if not source:
2135 2138 if not repo:
2136 2139 raise error.Abort(_("there is no Mercurial repository here, and no "
2137 2140 "server specified"))
2138 2141 source = "default"
2139 2142
2140 2143 source, branches = hg.parseurl(ui.expandpath(source))
2141 2144 url = util.url(source)
2142 2145 addr = None
2143 2146
2144 2147 if url.scheme == 'https':
2145 2148 addr = (url.host, url.port or 443)
2146 2149 elif url.scheme == 'ssh':
2147 2150 addr = (url.host, url.port or 22)
2148 2151 else:
2149 2152 raise error.Abort(_("only https and ssh connections are supported"))
2150 2153
2151 2154 from . import win32
2152 2155
2153 2156 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2154 2157 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2155 2158
2156 2159 try:
2157 2160 s.connect(addr)
2158 2161 cert = s.getpeercert(True)
2159 2162
2160 2163 ui.status(_('checking the certificate chain for %s\n') % url.host)
2161 2164
2162 2165 complete = win32.checkcertificatechain(cert, build=False)
2163 2166
2164 2167 if not complete:
2165 2168 ui.status(_('certificate chain is incomplete, updating... '))
2166 2169
2167 2170 if not win32.checkcertificatechain(cert):
2168 2171 ui.status(_('failed.\n'))
2169 2172 else:
2170 2173 ui.status(_('done.\n'))
2171 2174 else:
2172 2175 ui.status(_('full certificate chain is available\n'))
2173 2176 finally:
2174 2177 s.close()
2175 2178
2176 2179 @command('debugsub',
2177 2180 [('r', 'rev', '',
2178 2181 _('revision to check'), _('REV'))],
2179 2182 _('[-r REV] [REV]'))
2180 2183 def debugsub(ui, repo, rev=None):
2181 2184 ctx = scmutil.revsingle(repo, rev, None)
2182 2185 for k, v in sorted(ctx.substate.items()):
2183 2186 ui.write(('path %s\n') % k)
2184 2187 ui.write((' source %s\n') % v[0])
2185 2188 ui.write((' revision %s\n') % v[1])
2186 2189
2187 2190 @command('debugsuccessorssets',
2188 2191 [('', 'closest', False, _('return closest successors sets only'))],
2189 2192 _('[REV]'))
2190 2193 def debugsuccessorssets(ui, repo, *revs, **opts):
2191 2194 """show set of successors for revision
2192 2195
2193 2196 A successors set of changeset A is a consistent group of revisions that
2194 2197 succeed A. It contains non-obsolete changesets only unless closests
2195 2198 successors set is set.
2196 2199
2197 2200 In most cases a changeset A has a single successors set containing a single
2198 2201 successor (changeset A replaced by A').
2199 2202
2200 2203 A changeset that is made obsolete with no successors are called "pruned".
2201 2204 Such changesets have no successors sets at all.
2202 2205
2203 2206 A changeset that has been "split" will have a successors set containing
2204 2207 more than one successor.
2205 2208
2206 2209 A changeset that has been rewritten in multiple different ways is called
2207 2210 "divergent". Such changesets have multiple successor sets (each of which
2208 2211 may also be split, i.e. have multiple successors).
2209 2212
2210 2213 Results are displayed as follows::
2211 2214
2212 2215 <rev1>
2213 2216 <successors-1A>
2214 2217 <rev2>
2215 2218 <successors-2A>
2216 2219 <successors-2B1> <successors-2B2> <successors-2B3>
2217 2220
2218 2221 Here rev2 has two possible (i.e. divergent) successors sets. The first
2219 2222 holds one element, whereas the second holds three (i.e. the changeset has
2220 2223 been split).
2221 2224 """
2222 2225 # passed to successorssets caching computation from one call to another
2223 2226 cache = {}
2224 2227 ctx2str = str
2225 2228 node2str = short
2226 2229 if ui.debug():
2227 2230 def ctx2str(ctx):
2228 2231 return ctx.hex()
2229 2232 node2str = hex
2230 2233 for rev in scmutil.revrange(repo, revs):
2231 2234 ctx = repo[rev]
2232 2235 ui.write('%s\n'% ctx2str(ctx))
2233 2236 for succsset in obsutil.successorssets(repo, ctx.node(),
2234 2237 closest=opts['closest'],
2235 2238 cache=cache):
2236 2239 if succsset:
2237 2240 ui.write(' ')
2238 2241 ui.write(node2str(succsset[0]))
2239 2242 for node in succsset[1:]:
2240 2243 ui.write(' ')
2241 2244 ui.write(node2str(node))
2242 2245 ui.write('\n')
2243 2246
2244 2247 @command('debugtemplate',
2245 2248 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2246 2249 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2247 2250 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2248 2251 optionalrepo=True)
2249 2252 def debugtemplate(ui, repo, tmpl, **opts):
2250 2253 """parse and apply a template
2251 2254
2252 2255 If -r/--rev is given, the template is processed as a log template and
2253 2256 applied to the given changesets. Otherwise, it is processed as a generic
2254 2257 template.
2255 2258
2256 2259 Use --verbose to print the parsed tree.
2257 2260 """
2258 2261 revs = None
2259 2262 if opts[r'rev']:
2260 2263 if repo is None:
2261 2264 raise error.RepoError(_('there is no Mercurial repository here '
2262 2265 '(.hg not found)'))
2263 2266 revs = scmutil.revrange(repo, opts[r'rev'])
2264 2267
2265 2268 props = {}
2266 2269 for d in opts[r'define']:
2267 2270 try:
2268 2271 k, v = (e.strip() for e in d.split('=', 1))
2269 2272 if not k or k == 'ui':
2270 2273 raise ValueError
2271 2274 props[k] = v
2272 2275 except ValueError:
2273 2276 raise error.Abort(_('malformed keyword definition: %s') % d)
2274 2277
2275 2278 if ui.verbose:
2276 2279 aliases = ui.configitems('templatealias')
2277 2280 tree = templater.parse(tmpl)
2278 2281 ui.note(templater.prettyformat(tree), '\n')
2279 2282 newtree = templater.expandaliases(tree, aliases)
2280 2283 if newtree != tree:
2281 2284 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2282 2285
2283 2286 if revs is None:
2284 2287 t = formatter.maketemplater(ui, tmpl)
2285 2288 props['ui'] = ui
2286 2289 ui.write(t.render(props))
2287 2290 else:
2288 2291 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2289 2292 for r in revs:
2290 2293 displayer.show(repo[r], **pycompat.strkwargs(props))
2291 2294 displayer.close()
2292 2295
2293 2296 @command('debugupdatecaches', [])
2294 2297 def debugupdatecaches(ui, repo, *pats, **opts):
2295 2298 """warm all known caches in the repository"""
2296 2299 with repo.wlock(), repo.lock():
2297 2300 repo.updatecaches()
2298 2301
2299 2302 @command('debugupgraderepo', [
2300 2303 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2301 2304 ('', 'run', False, _('performs an upgrade')),
2302 2305 ])
2303 2306 def debugupgraderepo(ui, repo, run=False, optimize=None):
2304 2307 """upgrade a repository to use different features
2305 2308
2306 2309 If no arguments are specified, the repository is evaluated for upgrade
2307 2310 and a list of problems and potential optimizations is printed.
2308 2311
2309 2312 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2310 2313 can be influenced via additional arguments. More details will be provided
2311 2314 by the command output when run without ``--run``.
2312 2315
2313 2316 During the upgrade, the repository will be locked and no writes will be
2314 2317 allowed.
2315 2318
2316 2319 At the end of the upgrade, the repository may not be readable while new
2317 2320 repository data is swapped in. This window will be as long as it takes to
2318 2321 rename some directories inside the ``.hg`` directory. On most machines, this
2319 2322 should complete almost instantaneously and the chances of a consumer being
2320 2323 unable to access the repository should be low.
2321 2324 """
2322 2325 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2323 2326
2324 2327 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2325 2328 inferrepo=True)
2326 2329 def debugwalk(ui, repo, *pats, **opts):
2327 2330 """show how files match on given patterns"""
2328 2331 opts = pycompat.byteskwargs(opts)
2329 2332 m = scmutil.match(repo[None], pats, opts)
2330 2333 ui.write(('matcher: %r\n' % m))
2331 2334 items = list(repo[None].walk(m))
2332 2335 if not items:
2333 2336 return
2334 2337 f = lambda fn: fn
2335 2338 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2336 2339 f = lambda fn: util.normpath(fn)
2337 2340 fmt = 'f %%-%ds %%-%ds %%s' % (
2338 2341 max([len(abs) for abs in items]),
2339 2342 max([len(m.rel(abs)) for abs in items]))
2340 2343 for abs in items:
2341 2344 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2342 2345 ui.write("%s\n" % line.rstrip())
2343 2346
2344 2347 @command('debugwireargs',
2345 2348 [('', 'three', '', 'three'),
2346 2349 ('', 'four', '', 'four'),
2347 2350 ('', 'five', '', 'five'),
2348 2351 ] + cmdutil.remoteopts,
2349 2352 _('REPO [OPTIONS]... [ONE [TWO]]'),
2350 2353 norepo=True)
2351 2354 def debugwireargs(ui, repopath, *vals, **opts):
2352 2355 opts = pycompat.byteskwargs(opts)
2353 2356 repo = hg.peer(ui, opts, repopath)
2354 2357 for opt in cmdutil.remoteopts:
2355 2358 del opts[opt[1]]
2356 2359 args = {}
2357 2360 for k, v in opts.iteritems():
2358 2361 if v:
2359 2362 args[k] = v
2360 2363 # run twice to check that we don't mess up the stream for the next command
2361 2364 res1 = repo.debugwireargs(*vals, **args)
2362 2365 res2 = repo.debugwireargs(*vals, **args)
2363 2366 ui.write("%s\n" % res1)
2364 2367 if res1 != res2:
2365 2368 ui.warn("%s\n" % res2)
@@ -1,258 +1,263 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import collections
46 46 import random
47 47
48 48 from .i18n import _
49 49 from .node import (
50 50 nullid,
51 51 nullrev,
52 52 )
53 53 from . import (
54 54 dagutil,
55 55 error,
56 56 util,
57 57 )
58 58
59 59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 60 """update an existing sample to match the expected size
61 61
62 62 The sample is updated with nodes exponentially distant from each head of the
63 63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64 64
65 65 If a target size is specified, the sampling will stop once this size is
66 66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 67 reached.
68 68
69 69 :dag: a dag object from dagutil
70 70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 71 :sample: a sample to update
72 72 :quicksamplesize: optional target size of the sample"""
73 73 # if nodes is empty we scan the entire graph
74 74 if nodes:
75 75 heads = dag.headsetofconnecteds(nodes)
76 76 else:
77 77 heads = dag.heads()
78 78 dist = {}
79 79 visit = collections.deque(heads)
80 80 seen = set()
81 81 factor = 1
82 82 while visit:
83 83 curr = visit.popleft()
84 84 if curr in seen:
85 85 continue
86 86 d = dist.setdefault(curr, 1)
87 87 if d > factor:
88 88 factor *= 2
89 89 if d == factor:
90 90 sample.add(curr)
91 91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 92 return
93 93 seen.add(curr)
94 94 for p in dag.parents(curr):
95 95 if not nodes or p in nodes:
96 96 dist.setdefault(p, d + 1)
97 97 visit.append(p)
98 98
99 99 def _takequicksample(dag, nodes, size):
100 100 """takes a quick sample of size <size>
101 101
102 102 It is meant for initial sampling and focuses on querying heads and close
103 103 ancestors of heads.
104 104
105 105 :dag: a dag object
106 106 :nodes: set of nodes to discover
107 107 :size: the maximum size of the sample"""
108 108 sample = dag.headsetofconnecteds(nodes)
109 109 if size <= len(sample):
110 110 return _limitsample(sample, size)
111 111 _updatesample(dag, None, sample, quicksamplesize=size)
112 112 return sample
113 113
114 114 def _takefullsample(dag, nodes, size):
115 115 sample = dag.headsetofconnecteds(nodes)
116 116 # update from heads
117 117 _updatesample(dag, nodes, sample)
118 118 # update from roots
119 119 _updatesample(dag.inverse(), nodes, sample)
120 120 assert sample
121 121 sample = _limitsample(sample, size)
122 122 if len(sample) < size:
123 123 more = size - len(sample)
124 124 sample.update(random.sample(list(nodes - sample), more))
125 125 return sample
126 126
127 127 def _limitsample(sample, desiredlen):
128 128 """return a random subset of sample of at most desiredlen item"""
129 129 if len(sample) > desiredlen:
130 130 sample = set(random.sample(sample, desiredlen))
131 131 return sample
132 132
133 133 def findcommonheads(ui, local, remote,
134 134 initialsamplesize=100,
135 135 fullsamplesize=200,
136 abortwhenunrelated=True):
136 abortwhenunrelated=True,
137 ancestorsof=None):
137 138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 139 missing nodes from or in remote.
139 140 '''
140 141 start = util.timer()
141 142
142 143 roundtrips = 0
143 144 cl = local.changelog
144 dag = dagutil.revlogdag(cl)
145 localsubset = None
146 if ancestorsof is not None:
147 rev = local.changelog.rev
148 localsubset = [rev(n) for n in ancestorsof]
149 dag = dagutil.revlogdag(cl, localsubset=localsubset)
145 150
146 151 # early exit if we know all the specified remote heads already
147 152 ui.debug("query 1; heads\n")
148 153 roundtrips += 1
149 154 ownheads = dag.heads()
150 155 sample = _limitsample(ownheads, initialsamplesize)
151 156 # indices between sample and externalized version must match
152 157 sample = list(sample)
153 158 batch = remote.iterbatch()
154 159 batch.heads()
155 160 batch.known(dag.externalizeall(sample))
156 161 batch.submit()
157 162 srvheadhashes, yesno = batch.results()
158 163
159 164 if cl.tip() == nullid:
160 165 if srvheadhashes != [nullid]:
161 166 return [nullid], True, srvheadhashes
162 167 return [nullid], False, []
163 168
164 169 # start actual discovery (we note this before the next "if" for
165 170 # compatibility reasons)
166 171 ui.status(_("searching for changes\n"))
167 172
168 173 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
169 174 if len(srvheads) == len(srvheadhashes):
170 175 ui.debug("all remote heads known locally\n")
171 176 return (srvheadhashes, False, srvheadhashes,)
172 177
173 178 if sample and len(ownheads) <= initialsamplesize and all(yesno):
174 179 ui.note(_("all local heads known remotely\n"))
175 180 ownheadhashes = dag.externalizeall(ownheads)
176 181 return (ownheadhashes, True, srvheadhashes,)
177 182
178 183 # full blown discovery
179 184
180 185 # own nodes I know we both know
181 186 # treat remote heads (and maybe own heads) as a first implicit sample
182 187 # response
183 188 common = cl.incrementalmissingrevs(srvheads)
184 189 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
185 190 common.addbases(commoninsample)
186 191 # own nodes where I don't know if remote knows them
187 192 undecided = set(common.missingancestors(ownheads))
188 193 # own nodes I know remote lacks
189 194 missing = set()
190 195
191 196 full = False
192 197 while undecided:
193 198
194 199 if sample:
195 200 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
196 201 missing.update(dag.descendantset(missinginsample, missing))
197 202
198 203 undecided.difference_update(missing)
199 204
200 205 if not undecided:
201 206 break
202 207
203 208 if full or common.hasbases():
204 209 if full:
205 210 ui.note(_("sampling from both directions\n"))
206 211 else:
207 212 ui.debug("taking initial sample\n")
208 213 samplefunc = _takefullsample
209 214 targetsize = fullsamplesize
210 215 else:
211 216 # use even cheaper initial sample
212 217 ui.debug("taking quick initial sample\n")
213 218 samplefunc = _takequicksample
214 219 targetsize = initialsamplesize
215 220 if len(undecided) < targetsize:
216 221 sample = list(undecided)
217 222 else:
218 223 sample = samplefunc(dag, undecided, targetsize)
219 224 sample = _limitsample(sample, targetsize)
220 225
221 226 roundtrips += 1
222 227 ui.progress(_('searching'), roundtrips, unit=_('queries'))
223 228 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
224 229 % (roundtrips, len(undecided), len(sample)))
225 230 # indices between sample and externalized version must match
226 231 sample = list(sample)
227 232 yesno = remote.known(dag.externalizeall(sample))
228 233 full = True
229 234
230 235 if sample:
231 236 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
232 237 common.addbases(commoninsample)
233 238 common.removeancestorsfrom(undecided)
234 239
235 240 # heads(common) == heads(common.bases) since common represents common.bases
236 241 # and all its ancestors
237 242 result = dag.headsetofconnecteds(common.bases)
238 243 # common.bases can include nullrev, but our contract requires us to not
239 244 # return any heads in that case, so discard that
240 245 result.discard(nullrev)
241 246 elapsed = util.timer() - start
242 247 ui.progress(_('searching'), None)
243 248 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
244 249 msg = ('found %d common and %d unknown server heads,'
245 250 ' %d roundtrips in %.4fs\n')
246 251 missing = set(result) - set(srvheads)
247 252 ui.log('discovery', msg, len(result), len(missing), roundtrips,
248 253 elapsed)
249 254
250 255 if not result and srvheadhashes != [nullid]:
251 256 if abortwhenunrelated:
252 257 raise error.Abort(_("repository is unrelated"))
253 258 else:
254 259 ui.warn(_("warning: repository is unrelated\n"))
255 260 return ({nullid}, True, srvheadhashes,)
256 261
257 262 anyincoming = (srvheadhashes != [nullid])
258 263 return dag.externalizeall(result), anyincoming, srvheadhashes
@@ -1,385 +1,385 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 add
4 4 addremove
5 5 annotate
6 6 archive
7 7 backout
8 8 bisect
9 9 bookmarks
10 10 branch
11 11 branches
12 12 bundle
13 13 cat
14 14 clone
15 15 commit
16 16 config
17 17 copy
18 18 diff
19 19 export
20 20 files
21 21 forget
22 22 graft
23 23 grep
24 24 heads
25 25 help
26 26 identify
27 27 import
28 28 incoming
29 29 init
30 30 locate
31 31 log
32 32 manifest
33 33 merge
34 34 outgoing
35 35 parents
36 36 paths
37 37 phase
38 38 pull
39 39 push
40 40 recover
41 41 remove
42 42 rename
43 43 resolve
44 44 revert
45 45 rollback
46 46 root
47 47 serve
48 48 status
49 49 summary
50 50 tag
51 51 tags
52 52 tip
53 53 unbundle
54 54 update
55 55 verify
56 56 version
57 57
58 58 Show all commands that start with "a"
59 59 $ hg debugcomplete a
60 60 add
61 61 addremove
62 62 annotate
63 63 archive
64 64
65 65 Do not show debug commands if there are other candidates
66 66 $ hg debugcomplete d
67 67 diff
68 68
69 69 Show debug commands if there are no other candidates
70 70 $ hg debugcomplete debug
71 71 debugancestor
72 72 debugapplystreamclonebundle
73 73 debugbuilddag
74 74 debugbundle
75 75 debugcapabilities
76 76 debugcheckstate
77 77 debugcolor
78 78 debugcommands
79 79 debugcomplete
80 80 debugconfig
81 81 debugcreatestreamclonebundle
82 82 debugdag
83 83 debugdata
84 84 debugdate
85 85 debugdeltachain
86 86 debugdirstate
87 87 debugdiscovery
88 88 debugextensions
89 89 debugfileset
90 90 debugfsinfo
91 91 debuggetbundle
92 92 debugignore
93 93 debugindex
94 94 debugindexdot
95 95 debuginstall
96 96 debugknown
97 97 debuglabelcomplete
98 98 debuglocks
99 99 debugmergestate
100 100 debugnamecomplete
101 101 debugobsolete
102 102 debugpathcomplete
103 103 debugpickmergetool
104 104 debugpushkey
105 105 debugpvec
106 106 debugrebuilddirstate
107 107 debugrebuildfncache
108 108 debugrename
109 109 debugrevlog
110 110 debugrevspec
111 111 debugsetparents
112 112 debugssl
113 113 debugsub
114 114 debugsuccessorssets
115 115 debugtemplate
116 116 debugupdatecaches
117 117 debugupgraderepo
118 118 debugwalk
119 119 debugwireargs
120 120
121 121 Do not show the alias of a debug command if there are other candidates
122 122 (this should hide rawcommit)
123 123 $ hg debugcomplete r
124 124 recover
125 125 remove
126 126 rename
127 127 resolve
128 128 revert
129 129 rollback
130 130 root
131 131 Show the alias of a debug command if there are no other candidates
132 132 $ hg debugcomplete rawc
133 133
134 134
135 135 Show the global options
136 136 $ hg debugcomplete --options | sort
137 137 --color
138 138 --config
139 139 --cwd
140 140 --debug
141 141 --debugger
142 142 --encoding
143 143 --encodingmode
144 144 --help
145 145 --hidden
146 146 --noninteractive
147 147 --pager
148 148 --profile
149 149 --quiet
150 150 --repository
151 151 --time
152 152 --traceback
153 153 --verbose
154 154 --version
155 155 -R
156 156 -h
157 157 -q
158 158 -v
159 159 -y
160 160
161 161 Show the options for the "serve" command
162 162 $ hg debugcomplete --options serve | sort
163 163 --accesslog
164 164 --address
165 165 --certificate
166 166 --cmdserver
167 167 --color
168 168 --config
169 169 --cwd
170 170 --daemon
171 171 --daemon-postexec
172 172 --debug
173 173 --debugger
174 174 --encoding
175 175 --encodingmode
176 176 --errorlog
177 177 --help
178 178 --hidden
179 179 --ipv6
180 180 --name
181 181 --noninteractive
182 182 --pager
183 183 --pid-file
184 184 --port
185 185 --prefix
186 186 --profile
187 187 --quiet
188 188 --repository
189 189 --stdio
190 190 --style
191 191 --subrepos
192 192 --templates
193 193 --time
194 194 --traceback
195 195 --verbose
196 196 --version
197 197 --web-conf
198 198 -6
199 199 -A
200 200 -E
201 201 -R
202 202 -S
203 203 -a
204 204 -d
205 205 -h
206 206 -n
207 207 -p
208 208 -q
209 209 -t
210 210 -v
211 211 -y
212 212
213 213 Show an error if we use --options with an ambiguous abbreviation
214 214 $ hg debugcomplete --options s
215 215 hg: command 's' is ambiguous:
216 216 serve showconfig status summary
217 217 [255]
218 218
219 219 Show all commands + options
220 220 $ hg debugcommands
221 221 add: include, exclude, subrepos, dry-run
222 222 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
223 223 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
224 224 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
225 225 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
226 226 export: output, switch-parent, rev, text, git, binary, nodates
227 227 forget: include, exclude
228 228 init: ssh, remotecmd, insecure
229 229 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
230 230 merge: force, rev, preview, tool
231 231 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
232 232 push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
233 233 remove: after, force, subrepos, include, exclude
234 234 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
235 235 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
236 236 summary: remote
237 237 update: clean, check, merge, date, rev, tool
238 238 addremove: similarity, subrepos, include, exclude, dry-run
239 239 archive: no-decode, prefix, rev, type, subrepos, include, exclude
240 240 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
241 241 bisect: reset, good, bad, skip, extend, command, noupdate
242 242 bookmarks: force, rev, delete, rename, inactive, template
243 243 branch: force, clean
244 244 branches: active, closed, template
245 245 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
246 246 cat: output, rev, decode, include, exclude, template
247 247 config: untrusted, edit, local, global, template
248 248 copy: after, force, include, exclude, dry-run
249 249 debugancestor:
250 250 debugapplystreamclonebundle:
251 251 debugbuilddag: mergeable-file, overwritten-file, new-file
252 252 debugbundle: all, part-type, spec
253 253 debugcapabilities:
254 254 debugcheckstate:
255 255 debugcolor: style
256 256 debugcommands:
257 257 debugcomplete: options
258 258 debugcreatestreamclonebundle:
259 259 debugdag: tags, branches, dots, spaces
260 260 debugdata: changelog, manifest, dir
261 261 debugdate: extended
262 262 debugdeltachain: changelog, manifest, dir, template
263 263 debugdirstate: nodates, datesort
264 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
264 debugdiscovery: old, nonheads, rev, ssh, remotecmd, insecure
265 265 debugextensions: template
266 266 debugfileset: rev
267 267 debugfsinfo:
268 268 debuggetbundle: head, common, type
269 269 debugignore:
270 270 debugindex: changelog, manifest, dir, format
271 271 debugindexdot: changelog, manifest, dir
272 272 debuginstall: template
273 273 debugknown:
274 274 debuglabelcomplete:
275 275 debuglocks: force-lock, force-wlock
276 276 debugmergestate:
277 277 debugnamecomplete:
278 278 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
279 279 debugpathcomplete: full, normal, added, removed
280 280 debugpickmergetool: rev, changedelete, include, exclude, tool
281 281 debugpushkey:
282 282 debugpvec:
283 283 debugrebuilddirstate: rev, minimal
284 284 debugrebuildfncache:
285 285 debugrename: rev
286 286 debugrevlog: changelog, manifest, dir, dump
287 287 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
288 288 debugsetparents:
289 289 debugssl:
290 290 debugsub: rev
291 291 debugsuccessorssets: closest
292 292 debugtemplate: rev, define
293 293 debugupdatecaches:
294 294 debugupgraderepo: optimize, run
295 295 debugwalk: include, exclude
296 296 debugwireargs: three, four, five, ssh, remotecmd, insecure
297 297 files: rev, print0, include, exclude, template, subrepos
298 298 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
299 299 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
300 300 heads: rev, topo, active, closed, style, template
301 301 help: extension, command, keyword, system
302 302 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
303 303 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
304 304 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
305 305 locate: rev, print0, fullpath, include, exclude
306 306 manifest: rev, all, template
307 307 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
308 308 parents: rev, style, template
309 309 paths: template
310 310 phase: public, draft, secret, force, rev
311 311 recover:
312 312 rename: after, force, include, exclude, dry-run
313 313 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
314 314 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
315 315 rollback: dry-run, force
316 316 root:
317 317 tag: force, local, rev, remove, edit, message, date, user
318 318 tags: template
319 319 tip: patch, git, style, template
320 320 unbundle: update
321 321 verify:
322 322 version: template
323 323
324 324 $ hg init a
325 325 $ cd a
326 326 $ echo fee > fee
327 327 $ hg ci -q -Amfee
328 328 $ hg tag fee
329 329 $ mkdir fie
330 330 $ echo dead > fie/dead
331 331 $ echo live > fie/live
332 332 $ hg bookmark fo
333 333 $ hg branch -q fie
334 334 $ hg ci -q -Amfie
335 335 $ echo fo > fo
336 336 $ hg branch -qf default
337 337 $ hg ci -q -Amfo
338 338 $ echo Fum > Fum
339 339 $ hg ci -q -AmFum
340 340 $ hg bookmark Fum
341 341
342 342 Test debugpathcomplete
343 343
344 344 $ hg debugpathcomplete f
345 345 fee
346 346 fie
347 347 fo
348 348 $ hg debugpathcomplete -f f
349 349 fee
350 350 fie/dead
351 351 fie/live
352 352 fo
353 353
354 354 $ hg rm Fum
355 355 $ hg debugpathcomplete -r F
356 356 Fum
357 357
358 358 Test debugnamecomplete
359 359
360 360 $ hg debugnamecomplete
361 361 Fum
362 362 default
363 363 fee
364 364 fie
365 365 fo
366 366 tip
367 367 $ hg debugnamecomplete f
368 368 fee
369 369 fie
370 370 fo
371 371
372 372 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
373 373 used for completions in some shells.
374 374
375 375 $ hg debuglabelcomplete
376 376 Fum
377 377 default
378 378 fee
379 379 fie
380 380 fo
381 381 tip
382 382 $ hg debuglabelcomplete f
383 383 fee
384 384 fie
385 385 fo
@@ -1,413 +1,552 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
19 22 > echo "% -- b -> a tree"
20 23 > hg -R b debugdiscovery a --verbose --old
21 24 > echo
22 25 > echo "% -- b -> a set"
23 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
24 30 > cd ..
25 31 > }
26 32
27 33
28 34 Small superset:
29 35
30 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 37 > +2:f +1:a1:b1
32 38 > <f +4 :a2
33 39 > +5 :b2
34 40 > <f +3 :b3'
35 41
36 42 % -- a -> b tree
37 43 comparing with b
38 44 searching for changes
39 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 46 common heads: 01241442b3c2 b5714e113bc0
41 47 local is subset
42 48
43 49 % -- a -> b set
44 50 comparing with b
45 51 query 1; heads
46 52 searching for changes
47 53 all local heads known remotely
48 54 common heads: 01241442b3c2 b5714e113bc0
49 55 local is subset
50 56
57 % -- a -> b set (tip only)
58 comparing with b
59 query 1; heads
60 searching for changes
61 all local heads known remotely
62 common heads: b5714e113bc0
63
51 64 % -- b -> a tree
52 65 comparing with a
53 66 searching for changes
54 67 unpruned common: 01241442b3c2 b5714e113bc0
55 68 common heads: 01241442b3c2 b5714e113bc0
56 69 remote is subset
57 70
58 71 % -- b -> a set
59 72 comparing with a
60 73 query 1; heads
61 74 searching for changes
62 75 all remote heads known locally
63 76 common heads: 01241442b3c2 b5714e113bc0
64 77 remote is subset
78
79 % -- b -> a set (tip only)
80 comparing with a
81 query 1; heads
82 searching for changes
83 all remote heads known locally
84 common heads: 01241442b3c2 b5714e113bc0
85 remote is subset
65 86
66 87
67 88 Many new:
68 89
69 90 $ testdesc '-ra1 -ra2' '-rb' '
70 91 > +2:f +3:a1 +3:b
71 92 > <f +30 :a2'
72 93
73 94 % -- a -> b tree
74 95 comparing with b
75 96 searching for changes
76 97 unpruned common: bebd167eb94d
77 98 common heads: bebd167eb94d
78 99
79 100 % -- a -> b set
80 101 comparing with b
81 102 query 1; heads
82 103 searching for changes
83 104 taking initial sample
84 105 searching: 2 queries
85 106 query 2; still undecided: 29, sample size is: 29
86 107 2 total queries in *.????s (glob)
87 108 common heads: bebd167eb94d
88 109
110 % -- a -> b set (tip only)
111 comparing with b
112 query 1; heads
113 searching for changes
114 taking quick initial sample
115 searching: 2 queries
116 query 2; still undecided: 31, sample size is: 31
117 2 total queries in *.????s (glob)
118 common heads: 66f7d451a68b
119
89 120 % -- b -> a tree
90 121 comparing with a
91 122 searching for changes
92 123 unpruned common: 66f7d451a68b bebd167eb94d
93 124 common heads: bebd167eb94d
94 125
95 126 % -- b -> a set
96 127 comparing with a
97 128 query 1; heads
98 129 searching for changes
99 130 taking initial sample
100 131 searching: 2 queries
101 132 query 2; still undecided: 2, sample size is: 2
102 133 2 total queries in *.????s (glob)
103 134 common heads: bebd167eb94d
135
136 % -- b -> a set (tip only)
137 comparing with a
138 query 1; heads
139 searching for changes
140 taking initial sample
141 searching: 2 queries
142 query 2; still undecided: 2, sample size is: 2
143 2 total queries in *.????s (glob)
144 common heads: bebd167eb94d
104 145
105 146 Both sides many new with stub:
106 147
107 148 $ testdesc '-ra1 -ra2' '-rb' '
108 149 > +2:f +2:a1 +30 :b
109 150 > <f +30 :a2'
110 151
111 152 % -- a -> b tree
112 153 comparing with b
113 154 searching for changes
114 155 unpruned common: 2dc09a01254d
115 156 common heads: 2dc09a01254d
116 157
117 158 % -- a -> b set
118 159 comparing with b
119 160 query 1; heads
120 161 searching for changes
121 162 taking initial sample
122 163 searching: 2 queries
123 164 query 2; still undecided: 29, sample size is: 29
124 165 2 total queries in *.????s (glob)
125 166 common heads: 2dc09a01254d
126 167
168 % -- a -> b set (tip only)
169 comparing with b
170 query 1; heads
171 searching for changes
172 taking quick initial sample
173 searching: 2 queries
174 query 2; still undecided: 31, sample size is: 31
175 2 total queries in *.????s (glob)
176 common heads: 66f7d451a68b
177
127 178 % -- b -> a tree
128 179 comparing with a
129 180 searching for changes
130 181 unpruned common: 2dc09a01254d 66f7d451a68b
131 182 common heads: 2dc09a01254d
132 183
133 184 % -- b -> a set
134 185 comparing with a
135 186 query 1; heads
136 187 searching for changes
137 188 taking initial sample
138 189 searching: 2 queries
139 190 query 2; still undecided: 29, sample size is: 29
140 191 2 total queries in *.????s (glob)
141 192 common heads: 2dc09a01254d
193
194 % -- b -> a set (tip only)
195 comparing with a
196 query 1; heads
197 searching for changes
198 taking initial sample
199 searching: 2 queries
200 query 2; still undecided: 29, sample size is: 29
201 2 total queries in *.????s (glob)
202 common heads: 2dc09a01254d
142 203
143 204
144 205 Both many new:
145 206
146 207 $ testdesc '-ra' '-rb' '
147 208 > +2:f +30 :b
148 209 > <f +30 :a'
149 210
150 211 % -- a -> b tree
151 212 comparing with b
152 213 searching for changes
153 214 unpruned common: 66f7d451a68b
154 215 common heads: 66f7d451a68b
155 216
156 217 % -- a -> b set
157 218 comparing with b
158 219 query 1; heads
159 220 searching for changes
160 221 taking quick initial sample
161 222 searching: 2 queries
162 223 query 2; still undecided: 31, sample size is: 31
163 224 2 total queries in *.????s (glob)
164 225 common heads: 66f7d451a68b
165 226
227 % -- a -> b set (tip only)
228 comparing with b
229 query 1; heads
230 searching for changes
231 taking quick initial sample
232 searching: 2 queries
233 query 2; still undecided: 31, sample size is: 31
234 2 total queries in *.????s (glob)
235 common heads: 66f7d451a68b
236
166 237 % -- b -> a tree
167 238 comparing with a
168 239 searching for changes
169 240 unpruned common: 66f7d451a68b
170 241 common heads: 66f7d451a68b
171 242
172 243 % -- b -> a set
173 244 comparing with a
174 245 query 1; heads
175 246 searching for changes
176 247 taking quick initial sample
177 248 searching: 2 queries
178 249 query 2; still undecided: 31, sample size is: 31
179 250 2 total queries in *.????s (glob)
180 251 common heads: 66f7d451a68b
252
253 % -- b -> a set (tip only)
254 comparing with a
255 query 1; heads
256 searching for changes
257 taking quick initial sample
258 searching: 2 queries
259 query 2; still undecided: 31, sample size is: 31
260 2 total queries in *.????s (glob)
261 common heads: 66f7d451a68b
181 262
182 263
183 264 Both many new skewed:
184 265
185 266 $ testdesc '-ra' '-rb' '
186 267 > +2:f +30 :b
187 268 > <f +50 :a'
188 269
189 270 % -- a -> b tree
190 271 comparing with b
191 272 searching for changes
192 273 unpruned common: 66f7d451a68b
193 274 common heads: 66f7d451a68b
194 275
195 276 % -- a -> b set
196 277 comparing with b
197 278 query 1; heads
198 279 searching for changes
199 280 taking quick initial sample
200 281 searching: 2 queries
201 282 query 2; still undecided: 51, sample size is: 51
202 283 2 total queries in *.????s (glob)
203 284 common heads: 66f7d451a68b
204 285
286 % -- a -> b set (tip only)
287 comparing with b
288 query 1; heads
289 searching for changes
290 taking quick initial sample
291 searching: 2 queries
292 query 2; still undecided: 51, sample size is: 51
293 2 total queries in *.????s (glob)
294 common heads: 66f7d451a68b
295
205 296 % -- b -> a tree
206 297 comparing with a
207 298 searching for changes
208 299 unpruned common: 66f7d451a68b
209 300 common heads: 66f7d451a68b
210 301
211 302 % -- b -> a set
212 303 comparing with a
213 304 query 1; heads
214 305 searching for changes
215 306 taking quick initial sample
216 307 searching: 2 queries
217 308 query 2; still undecided: 31, sample size is: 31
218 309 2 total queries in *.????s (glob)
219 310 common heads: 66f7d451a68b
311
312 % -- b -> a set (tip only)
313 comparing with a
314 query 1; heads
315 searching for changes
316 taking quick initial sample
317 searching: 2 queries
318 query 2; still undecided: 31, sample size is: 31
319 2 total queries in *.????s (glob)
320 common heads: 66f7d451a68b
220 321
221 322
222 323 Both many new on top of long history:
223 324
224 325 $ testdesc '-ra' '-rb' '
225 326 > +1000:f +30 :b
226 327 > <f +50 :a'
227 328
228 329 % -- a -> b tree
229 330 comparing with b
230 331 searching for changes
231 332 unpruned common: 7ead0cba2838
232 333 common heads: 7ead0cba2838
233 334
234 335 % -- a -> b set
235 336 comparing with b
236 337 query 1; heads
237 338 searching for changes
238 339 taking quick initial sample
239 340 searching: 2 queries
240 341 query 2; still undecided: 1049, sample size is: 11
241 342 sampling from both directions
242 343 searching: 3 queries
243 344 query 3; still undecided: 31, sample size is: 31
244 345 3 total queries in *.????s (glob)
245 346 common heads: 7ead0cba2838
246 347
348 % -- a -> b set (tip only)
349 comparing with b
350 query 1; heads
351 searching for changes
352 taking quick initial sample
353 searching: 2 queries
354 query 2; still undecided: 1049, sample size is: 11
355 sampling from both directions
356 searching: 3 queries
357 query 3; still undecided: 31, sample size is: 31
358 3 total queries in *.????s (glob)
359 common heads: 7ead0cba2838
360
247 361 % -- b -> a tree
248 362 comparing with a
249 363 searching for changes
250 364 unpruned common: 7ead0cba2838
251 365 common heads: 7ead0cba2838
252 366
253 367 % -- b -> a set
254 368 comparing with a
255 369 query 1; heads
256 370 searching for changes
257 371 taking quick initial sample
258 372 searching: 2 queries
259 373 query 2; still undecided: 1029, sample size is: 11
260 374 sampling from both directions
261 375 searching: 3 queries
262 376 query 3; still undecided: 15, sample size is: 15
263 377 3 total queries in *.????s (glob)
264 378 common heads: 7ead0cba2838
379
380 % -- b -> a set (tip only)
381 comparing with a
382 query 1; heads
383 searching for changes
384 taking quick initial sample
385 searching: 2 queries
386 query 2; still undecided: 1029, sample size is: 11
387 sampling from both directions
388 searching: 3 queries
389 query 3; still undecided: 15, sample size is: 15
390 3 total queries in *.????s (glob)
391 common heads: 7ead0cba2838
265 392
266 393
267 394 One with >200 heads, which used to use up all of the sample:
268 395
269 396 $ hg init manyheads
270 397 $ cd manyheads
271 398 $ echo "+300:r @a" >dagdesc
272 399 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
273 400 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 401 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 402 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 403 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 404 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 405 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 406 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 407 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 408 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 409 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 410 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 411 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 412 $ echo "@b *r+3" >>dagdesc # one more head
286 413 $ hg debugbuilddag <dagdesc
287 414 reading DAG from stdin
288 415
289 416 $ hg heads -t --template . | wc -c
290 417 \s*261 (re)
291 418
292 419 $ hg clone -b a . a
293 420 adding changesets
294 421 adding manifests
295 422 adding file changes
296 423 added 1340 changesets with 0 changes to 0 files (+259 heads)
297 424 new changesets 1ea73414a91b:1c51e2c80832
298 425 updating to branch a
299 426 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 427 $ hg clone -b b . b
301 428 adding changesets
302 429 adding manifests
303 430 adding file changes
304 431 added 304 changesets with 0 changes to 0 files
305 432 new changesets 1ea73414a91b:513314ca8b3a
306 433 updating to branch b
307 434 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
308 435
309 436 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
310 437 comparing with b
311 438 query 1; heads
312 439 searching for changes
313 440 taking quick initial sample
314 441 searching: 2 queries
315 442 query 2; still undecided: 1240, sample size is: 100
316 443 sampling from both directions
317 444 searching: 3 queries
318 445 query 3; still undecided: 1140, sample size is: 200
319 446 sampling from both directions
320 447 searching: 4 queries
321 448 query 4; still undecided: \d+, sample size is: 200 (re)
322 449 sampling from both directions
323 450 searching: 5 queries
324 451 query 5; still undecided: \d+, sample size is: 200 (re)
325 452 sampling from both directions
326 453 searching: 6 queries
327 454 query 6; still undecided: \d+, sample size is: \d+ (re)
328 455 6 total queries in *.????s (glob)
329 456 common heads: 3ee37d65064a
457 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
458 comparing with b
459 query 1; heads
460 searching for changes
461 taking quick initial sample
462 searching: 2 queries
463 query 2; still undecided: 303, sample size is: 9
464 sampling from both directions
465 searching: 3 queries
466 query 3; still undecided: 3, sample size is: 3
467 3 total queries in *.????s (glob)
468 common heads: 3ee37d65064a
330 469
331 470 Test actual protocol when pulling one new head in addition to common heads
332 471
333 472 $ hg clone -U b c
334 473 $ hg -R c id -ir tip
335 474 513314ca8b3a
336 475 $ hg -R c up -qr default
337 476 $ touch c/f
338 477 $ hg -R c ci -Aqm "extra head"
339 478 $ hg -R c id -i
340 479 e64a39e7da8b
341 480
342 481 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
343 482 $ cat hg.pid >> $DAEMON_PIDS
344 483
345 484 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
346 485 comparing with http://localhost:$HGPORT/
347 486 searching for changes
348 487 e64a39e7da8b
349 488
350 489 $ killdaemons.py
351 490 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
352 491 "GET /?cmd=capabilities HTTP/1.1" 200 -
353 492 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
354 493 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
355 494 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$
356 495 $ cat errors.log
357 496
358 497 $ cd ..
359 498
360 499
361 500 Issue 4438 - test coverage for 3ef893520a85 issues.
362 501
363 502 $ mkdir issue4438
364 503 $ cd issue4438
365 504 #if false
366 505 generate new bundles:
367 506 $ hg init r1
368 507 $ for i in `$PYTHON $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
369 508 $ hg clone -q r1 r2
370 509 $ for i in `$PYTHON $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
371 510 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
372 511 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
373 512 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
374 513 #else
375 514 use existing bundles:
376 515 $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
377 516 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
378 517 #endif
379 518
380 519 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
381 520
382 521 $ hg -R r1 outgoing r2 -T'{rev} '
383 522 comparing with r2
384 523 searching for changes
385 524 101 102 103 104 105 106 107 108 109 110 (no-eol)
386 525
387 526 The case where all the 'initialsamplesize' samples already were common would
388 527 give 'all remote heads known locally' without checking the remaining heads -
389 528 fixed in 86c35b7ae300:
390 529
391 530 $ cat >> $TESTTMP/unrandomsample.py << EOF
392 531 > import random
393 532 > def sample(population, k):
394 533 > return sorted(population)[:k]
395 534 > random.sample = sample
396 535 > EOF
397 536
398 537 $ cat >> r1/.hg/hgrc << EOF
399 538 > [extensions]
400 539 > unrandomsample = $TESTTMP/unrandomsample.py
401 540 > EOF
402 541
403 542 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
404 543 comparing with r2
405 544 searching for changes
406 545 101 102 103 104 105 106 107 108 109 110 (no-eol)
407 546 $ hg -R r1 --config extensions.blackbox= blackbox
408 547 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
409 548 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
410 549 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
411 550 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
412 551 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 --config *extensions.blackbox=* blackbox (glob)
413 552 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now