##// END OF EJS Templates
discovery: also audit the number of queries done...
marmoute -
r49881:f054a557 default
parent child Browse files
Show More
@@ -1,4883 +1,4884 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 requirements,
73 requirements,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 )
93 )
94 from .interfaces import repository
94 from .interfaces import repository
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 urlutil,
101 urlutil,
102 )
102 )
103
103
104 from .revlogutils import (
104 from .revlogutils import (
105 deltas as deltautil,
105 deltas as deltautil,
106 nodemap,
106 nodemap,
107 rewrite,
107 rewrite,
108 sidedata,
108 sidedata,
109 )
109 )
110
110
111 release = lockmod.release
111 release = lockmod.release
112
112
113 table = {}
113 table = {}
114 table.update(strip.command._table)
114 table.update(strip.command._table)
115 command = registrar.command(table)
115 command = registrar.command(table)
116
116
117
117
118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 def debugancestor(ui, repo, *args):
119 def debugancestor(ui, repo, *args):
120 """find the ancestor revision of two revisions in a given index"""
120 """find the ancestor revision of two revisions in a given index"""
121 if len(args) == 3:
121 if len(args) == 3:
122 index, rev1, rev2 = args
122 index, rev1, rev2 = args
123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 lookup = r.lookup
124 lookup = r.lookup
125 elif len(args) == 2:
125 elif len(args) == 2:
126 if not repo:
126 if not repo:
127 raise error.Abort(
127 raise error.Abort(
128 _(b'there is no Mercurial repository here (.hg not found)')
128 _(b'there is no Mercurial repository here (.hg not found)')
129 )
129 )
130 rev1, rev2 = args
130 rev1, rev2 = args
131 r = repo.changelog
131 r = repo.changelog
132 lookup = repo.lookup
132 lookup = repo.lookup
133 else:
133 else:
134 raise error.Abort(_(b'either two or three arguments required'))
134 raise error.Abort(_(b'either two or three arguments required'))
135 a = r.ancestor(lookup(rev1), lookup(rev2))
135 a = r.ancestor(lookup(rev1), lookup(rev2))
136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137
137
138
138
139 @command(b'debugantivirusrunning', [])
139 @command(b'debugantivirusrunning', [])
140 def debugantivirusrunning(ui, repo):
140 def debugantivirusrunning(ui, repo):
141 """attempt to trigger an antivirus scanner to see if one is active"""
141 """attempt to trigger an antivirus scanner to see if one is active"""
142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 f.write(
143 f.write(
144 util.b85decode(
144 util.b85decode(
145 # This is a base85-armored version of the EICAR test file. See
145 # This is a base85-armored version of the EICAR test file. See
146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 )
149 )
150 )
150 )
151 # Give an AV engine time to scan the file.
151 # Give an AV engine time to scan the file.
152 time.sleep(2)
152 time.sleep(2)
153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154
154
155
155
156 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 def debugapplystreamclonebundle(ui, repo, fname):
157 def debugapplystreamclonebundle(ui, repo, fname):
158 """apply a stream clone bundle file"""
158 """apply a stream clone bundle file"""
159 f = hg.openpath(ui, fname)
159 f = hg.openpath(ui, fname)
160 gen = exchange.readbundle(ui, f, fname)
160 gen = exchange.readbundle(ui, f, fname)
161 gen.apply(repo)
161 gen.apply(repo)
162
162
163
163
164 @command(
164 @command(
165 b'debugbuilddag',
165 b'debugbuilddag',
166 [
166 [
167 (
167 (
168 b'm',
168 b'm',
169 b'mergeable-file',
169 b'mergeable-file',
170 None,
170 None,
171 _(b'add single file mergeable changes'),
171 _(b'add single file mergeable changes'),
172 ),
172 ),
173 (
173 (
174 b'o',
174 b'o',
175 b'overwritten-file',
175 b'overwritten-file',
176 None,
176 None,
177 _(b'add single file all revs overwrite'),
177 _(b'add single file all revs overwrite'),
178 ),
178 ),
179 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (
180 (
181 b'',
181 b'',
182 b'from-existing',
182 b'from-existing',
183 None,
183 None,
184 _(b'continue from a non-empty repository'),
184 _(b'continue from a non-empty repository'),
185 ),
185 ),
186 ],
186 ],
187 _(b'[OPTION]... [TEXT]'),
187 _(b'[OPTION]... [TEXT]'),
188 )
188 )
189 def debugbuilddag(
189 def debugbuilddag(
190 ui,
190 ui,
191 repo,
191 repo,
192 text=None,
192 text=None,
193 mergeable_file=False,
193 mergeable_file=False,
194 overwritten_file=False,
194 overwritten_file=False,
195 new_file=False,
195 new_file=False,
196 from_existing=False,
196 from_existing=False,
197 ):
197 ):
198 """builds a repo with a given DAG from scratch in the current empty repo
198 """builds a repo with a given DAG from scratch in the current empty repo
199
199
200 The description of the DAG is read from stdin if not given on the
200 The description of the DAG is read from stdin if not given on the
201 command line.
201 command line.
202
202
203 Elements:
203 Elements:
204
204
205 - "+n" is a linear run of n nodes based on the current default parent
205 - "+n" is a linear run of n nodes based on the current default parent
206 - "." is a single node based on the current default parent
206 - "." is a single node based on the current default parent
207 - "$" resets the default parent to null (implied at the start);
207 - "$" resets the default parent to null (implied at the start);
208 otherwise the default parent is always the last node created
208 otherwise the default parent is always the last node created
209 - "<p" sets the default parent to the backref p
209 - "<p" sets the default parent to the backref p
210 - "*p" is a fork at parent p, which is a backref
210 - "*p" is a fork at parent p, which is a backref
211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 - "/p2" is a merge of the preceding node and p2
212 - "/p2" is a merge of the preceding node and p2
213 - ":tag" defines a local tag for the preceding node
213 - ":tag" defines a local tag for the preceding node
214 - "@branch" sets the named branch for subsequent nodes
214 - "@branch" sets the named branch for subsequent nodes
215 - "#...\\n" is a comment up to the end of the line
215 - "#...\\n" is a comment up to the end of the line
216
216
217 Whitespace between the above elements is ignored.
217 Whitespace between the above elements is ignored.
218
218
219 A backref is either
219 A backref is either
220
220
221 - a number n, which references the node curr-n, where curr is the current
221 - a number n, which references the node curr-n, where curr is the current
222 node, or
222 node, or
223 - the name of a local tag you placed earlier using ":tag", or
223 - the name of a local tag you placed earlier using ":tag", or
224 - empty to denote the default parent.
224 - empty to denote the default parent.
225
225
226 All string valued-elements are either strictly alphanumeric, or must
226 All string valued-elements are either strictly alphanumeric, or must
227 be enclosed in double quotes ("..."), with "\\" as escape character.
227 be enclosed in double quotes ("..."), with "\\" as escape character.
228 """
228 """
229
229
230 if text is None:
230 if text is None:
231 ui.status(_(b"reading DAG from stdin\n"))
231 ui.status(_(b"reading DAG from stdin\n"))
232 text = ui.fin.read()
232 text = ui.fin.read()
233
233
234 cl = repo.changelog
234 cl = repo.changelog
235 if len(cl) > 0 and not from_existing:
235 if len(cl) > 0 and not from_existing:
236 raise error.Abort(_(b'repository is not empty'))
236 raise error.Abort(_(b'repository is not empty'))
237
237
238 # determine number of revs in DAG
238 # determine number of revs in DAG
239 total = 0
239 total = 0
240 for type, data in dagparser.parsedag(text):
240 for type, data in dagparser.parsedag(text):
241 if type == b'n':
241 if type == b'n':
242 total += 1
242 total += 1
243
243
244 if mergeable_file:
244 if mergeable_file:
245 linesperrev = 2
245 linesperrev = 2
246 # make a file with k lines per rev
246 # make a file with k lines per rev
247 initialmergedlines = [
247 initialmergedlines = [
248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 ]
249 ]
250 initialmergedlines.append(b"")
250 initialmergedlines.append(b"")
251
251
252 tags = []
252 tags = []
253 progress = ui.makeprogress(
253 progress = ui.makeprogress(
254 _(b'building'), unit=_(b'revisions'), total=total
254 _(b'building'), unit=_(b'revisions'), total=total
255 )
255 )
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 at = -1
257 at = -1
258 atbranch = b'default'
258 atbranch = b'default'
259 nodeids = []
259 nodeids = []
260 id = 0
260 id = 0
261 progress.update(id)
261 progress.update(id)
262 for type, data in dagparser.parsedag(text):
262 for type, data in dagparser.parsedag(text):
263 if type == b'n':
263 if type == b'n':
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 id, ps = data
265 id, ps = data
266
266
267 files = []
267 files = []
268 filecontent = {}
268 filecontent = {}
269
269
270 p2 = None
270 p2 = None
271 if mergeable_file:
271 if mergeable_file:
272 fn = b"mf"
272 fn = b"mf"
273 p1 = repo[ps[0]]
273 p1 = repo[ps[0]]
274 if len(ps) > 1:
274 if len(ps) > 1:
275 p2 = repo[ps[1]]
275 p2 = repo[ps[1]]
276 pa = p1.ancestor(p2)
276 pa = p1.ancestor(p2)
277 base, local, other = [
277 base, local, other = [
278 x[fn].data() for x in (pa, p1, p2)
278 x[fn].data() for x in (pa, p1, p2)
279 ]
279 ]
280 m3 = simplemerge.Merge3Text(base, local, other)
280 m3 = simplemerge.Merge3Text(base, local, other)
281 ml = [
281 ml = [
282 l.strip()
282 l.strip()
283 for l in simplemerge.render_minimized(m3)[0]
283 for l in simplemerge.render_minimized(m3)[0]
284 ]
284 ]
285 ml.append(b"")
285 ml.append(b"")
286 elif at > 0:
286 elif at > 0:
287 ml = p1[fn].data().split(b"\n")
287 ml = p1[fn].data().split(b"\n")
288 else:
288 else:
289 ml = initialmergedlines
289 ml = initialmergedlines
290 ml[id * linesperrev] += b" r%i" % id
290 ml[id * linesperrev] += b" r%i" % id
291 mergedtext = b"\n".join(ml)
291 mergedtext = b"\n".join(ml)
292 files.append(fn)
292 files.append(fn)
293 filecontent[fn] = mergedtext
293 filecontent[fn] = mergedtext
294
294
295 if overwritten_file:
295 if overwritten_file:
296 fn = b"of"
296 fn = b"of"
297 files.append(fn)
297 files.append(fn)
298 filecontent[fn] = b"r%i\n" % id
298 filecontent[fn] = b"r%i\n" % id
299
299
300 if new_file:
300 if new_file:
301 fn = b"nf%i" % id
301 fn = b"nf%i" % id
302 files.append(fn)
302 files.append(fn)
303 filecontent[fn] = b"r%i\n" % id
303 filecontent[fn] = b"r%i\n" % id
304 if len(ps) > 1:
304 if len(ps) > 1:
305 if not p2:
305 if not p2:
306 p2 = repo[ps[1]]
306 p2 = repo[ps[1]]
307 for fn in p2:
307 for fn in p2:
308 if fn.startswith(b"nf"):
308 if fn.startswith(b"nf"):
309 files.append(fn)
309 files.append(fn)
310 filecontent[fn] = p2[fn].data()
310 filecontent[fn] = p2[fn].data()
311
311
312 def fctxfn(repo, cx, path):
312 def fctxfn(repo, cx, path):
313 if path in filecontent:
313 if path in filecontent:
314 return context.memfilectx(
314 return context.memfilectx(
315 repo, cx, path, filecontent[path]
315 repo, cx, path, filecontent[path]
316 )
316 )
317 return None
317 return None
318
318
319 if len(ps) == 0 or ps[0] < 0:
319 if len(ps) == 0 or ps[0] < 0:
320 pars = [None, None]
320 pars = [None, None]
321 elif len(ps) == 1:
321 elif len(ps) == 1:
322 pars = [nodeids[ps[0]], None]
322 pars = [nodeids[ps[0]], None]
323 else:
323 else:
324 pars = [nodeids[p] for p in ps]
324 pars = [nodeids[p] for p in ps]
325 cx = context.memctx(
325 cx = context.memctx(
326 repo,
326 repo,
327 pars,
327 pars,
328 b"r%i" % id,
328 b"r%i" % id,
329 files,
329 files,
330 fctxfn,
330 fctxfn,
331 date=(id, 0),
331 date=(id, 0),
332 user=b"debugbuilddag",
332 user=b"debugbuilddag",
333 extra={b'branch': atbranch},
333 extra={b'branch': atbranch},
334 )
334 )
335 nodeid = repo.commitctx(cx)
335 nodeid = repo.commitctx(cx)
336 nodeids.append(nodeid)
336 nodeids.append(nodeid)
337 at = id
337 at = id
338 elif type == b'l':
338 elif type == b'l':
339 id, name = data
339 id, name = data
340 ui.note((b'tag %s\n' % name))
340 ui.note((b'tag %s\n' % name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 elif type == b'a':
342 elif type == b'a':
343 ui.note((b'branch %s\n' % data))
343 ui.note((b'branch %s\n' % data))
344 atbranch = data
344 atbranch = data
345 progress.update(id)
345 progress.update(id)
346
346
347 if tags:
347 if tags:
348 repo.vfs.write(b"localtags", b"".join(tags))
348 repo.vfs.write(b"localtags", b"".join(tags))
349
349
350
350
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 indent_string = b' ' * indent
352 indent_string = b' ' * indent
353 if all:
353 if all:
354 ui.writenoi18n(
354 ui.writenoi18n(
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 % indent_string
356 % indent_string
357 )
357 )
358
358
359 def showchunks(named):
359 def showchunks(named):
360 ui.write(b"\n%s%s\n" % (indent_string, named))
360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 for deltadata in gen.deltaiter():
361 for deltadata in gen.deltaiter():
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 ui.write(
363 ui.write(
364 b"%s%s %s %s %s %s %d\n"
364 b"%s%s %s %s %s %s %d\n"
365 % (
365 % (
366 indent_string,
366 indent_string,
367 hex(node),
367 hex(node),
368 hex(p1),
368 hex(p1),
369 hex(p2),
369 hex(p2),
370 hex(cs),
370 hex(cs),
371 hex(deltabase),
371 hex(deltabase),
372 len(delta),
372 len(delta),
373 )
373 )
374 )
374 )
375
375
376 gen.changelogheader()
376 gen.changelogheader()
377 showchunks(b"changelog")
377 showchunks(b"changelog")
378 gen.manifestheader()
378 gen.manifestheader()
379 showchunks(b"manifest")
379 showchunks(b"manifest")
380 for chunkdata in iter(gen.filelogheader, {}):
380 for chunkdata in iter(gen.filelogheader, {}):
381 fname = chunkdata[b'filename']
381 fname = chunkdata[b'filename']
382 showchunks(fname)
382 showchunks(fname)
383 else:
383 else:
384 if isinstance(gen, bundle2.unbundle20):
384 if isinstance(gen, bundle2.unbundle20):
385 raise error.Abort(_(b'use debugbundle2 for this file'))
385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 gen.changelogheader()
386 gen.changelogheader()
387 for deltadata in gen.deltaiter():
387 for deltadata in gen.deltaiter():
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390
390
391
391
392 def _debugobsmarkers(ui, part, indent=0, **opts):
392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 """display version and markers contained in 'data'"""
393 """display version and markers contained in 'data'"""
394 opts = pycompat.byteskwargs(opts)
394 opts = pycompat.byteskwargs(opts)
395 data = part.read()
395 data = part.read()
396 indent_string = b' ' * indent
396 indent_string = b' ' * indent
397 try:
397 try:
398 version, markers = obsolete._readmarkers(data)
398 version, markers = obsolete._readmarkers(data)
399 except error.UnknownVersion as exc:
399 except error.UnknownVersion as exc:
400 msg = b"%sunsupported version: %s (%d bytes)\n"
400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg %= indent_string, exc.version, len(data)
401 msg %= indent_string, exc.version, len(data)
402 ui.write(msg)
402 ui.write(msg)
403 else:
403 else:
404 msg = b"%sversion: %d (%d bytes)\n"
404 msg = b"%sversion: %d (%d bytes)\n"
405 msg %= indent_string, version, len(data)
405 msg %= indent_string, version, len(data)
406 ui.write(msg)
406 ui.write(msg)
407 fm = ui.formatter(b'debugobsolete', opts)
407 fm = ui.formatter(b'debugobsolete', opts)
408 for rawmarker in sorted(markers):
408 for rawmarker in sorted(markers):
409 m = obsutil.marker(None, rawmarker)
409 m = obsutil.marker(None, rawmarker)
410 fm.startitem()
410 fm.startitem()
411 fm.plain(indent_string)
411 fm.plain(indent_string)
412 cmdutil.showmarker(fm, m)
412 cmdutil.showmarker(fm, m)
413 fm.end()
413 fm.end()
414
414
415
415
416 def _debugphaseheads(ui, data, indent=0):
416 def _debugphaseheads(ui, data, indent=0):
417 """display version and markers contained in 'data'"""
417 """display version and markers contained in 'data'"""
418 indent_string = b' ' * indent
418 indent_string = b' ' * indent
419 headsbyphase = phases.binarydecode(data)
419 headsbyphase = phases.binarydecode(data)
420 for phase in phases.allphases:
420 for phase in phases.allphases:
421 for head in headsbyphase[phase]:
421 for head in headsbyphase[phase]:
422 ui.write(indent_string)
422 ui.write(indent_string)
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424
424
425
425
426 def _quasirepr(thing):
426 def _quasirepr(thing):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 return b'{%s}' % (
428 return b'{%s}' % (
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 )
430 )
431 return pycompat.bytestr(repr(thing))
431 return pycompat.bytestr(repr(thing))
432
432
433
433
434 def _debugbundle2(ui, gen, all=None, **opts):
434 def _debugbundle2(ui, gen, all=None, **opts):
435 """lists the contents of a bundle2"""
435 """lists the contents of a bundle2"""
436 if not isinstance(gen, bundle2.unbundle20):
436 if not isinstance(gen, bundle2.unbundle20):
437 raise error.Abort(_(b'not a bundle2 file'))
437 raise error.Abort(_(b'not a bundle2 file'))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 parttypes = opts.get('part_type', [])
439 parttypes = opts.get('part_type', [])
440 for part in gen.iterparts():
440 for part in gen.iterparts():
441 if parttypes and part.type not in parttypes:
441 if parttypes and part.type not in parttypes:
442 continue
442 continue
443 msg = b'%s -- %s (mandatory: %r)\n'
443 msg = b'%s -- %s (mandatory: %r)\n'
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 if part.type == b'changegroup':
445 if part.type == b'changegroup':
446 version = part.params.get(b'version', b'01')
446 version = part.params.get(b'version', b'01')
447 cg = changegroup.getunbundler(version, part, b'UN')
447 cg = changegroup.getunbundler(version, part, b'UN')
448 if not ui.quiet:
448 if not ui.quiet:
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 if part.type == b'obsmarkers':
450 if part.type == b'obsmarkers':
451 if not ui.quiet:
451 if not ui.quiet:
452 _debugobsmarkers(ui, part, indent=4, **opts)
452 _debugobsmarkers(ui, part, indent=4, **opts)
453 if part.type == b'phase-heads':
453 if part.type == b'phase-heads':
454 if not ui.quiet:
454 if not ui.quiet:
455 _debugphaseheads(ui, part, indent=4)
455 _debugphaseheads(ui, part, indent=4)
456
456
457
457
458 @command(
458 @command(
459 b'debugbundle',
459 b'debugbundle',
460 [
460 [
461 (b'a', b'all', None, _(b'show all details')),
461 (b'a', b'all', None, _(b'show all details')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 ],
464 ],
465 _(b'FILE'),
465 _(b'FILE'),
466 norepo=True,
466 norepo=True,
467 )
467 )
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 """lists the contents of a bundle"""
469 """lists the contents of a bundle"""
470 with hg.openpath(ui, bundlepath) as f:
470 with hg.openpath(ui, bundlepath) as f:
471 if spec:
471 if spec:
472 spec = exchange.getbundlespec(ui, f)
472 spec = exchange.getbundlespec(ui, f)
473 ui.write(b'%s\n' % spec)
473 ui.write(b'%s\n' % spec)
474 return
474 return
475
475
476 gen = exchange.readbundle(ui, f, bundlepath)
476 gen = exchange.readbundle(ui, f, bundlepath)
477 if isinstance(gen, bundle2.unbundle20):
477 if isinstance(gen, bundle2.unbundle20):
478 return _debugbundle2(ui, gen, all=all, **opts)
478 return _debugbundle2(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
480
480
481
481
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 def debugcapabilities(ui, path, **opts):
483 def debugcapabilities(ui, path, **opts):
484 """lists the capabilities of a remote peer"""
484 """lists the capabilities of a remote peer"""
485 opts = pycompat.byteskwargs(opts)
485 opts = pycompat.byteskwargs(opts)
486 peer = hg.peer(ui, opts, path)
486 peer = hg.peer(ui, opts, path)
487 try:
487 try:
488 caps = peer.capabilities()
488 caps = peer.capabilities()
489 ui.writenoi18n(b'Main capabilities:\n')
489 ui.writenoi18n(b'Main capabilities:\n')
490 for c in sorted(caps):
490 for c in sorted(caps):
491 ui.write(b' %s\n' % c)
491 ui.write(b' %s\n' % c)
492 b2caps = bundle2.bundle2caps(peer)
492 b2caps = bundle2.bundle2caps(peer)
493 if b2caps:
493 if b2caps:
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 for key, values in sorted(b2caps.items()):
495 for key, values in sorted(b2caps.items()):
496 ui.write(b' %s\n' % key)
496 ui.write(b' %s\n' % key)
497 for v in values:
497 for v in values:
498 ui.write(b' %s\n' % v)
498 ui.write(b' %s\n' % v)
499 finally:
499 finally:
500 peer.close()
500 peer.close()
501
501
502
502
503 @command(
503 @command(
504 b'debugchangedfiles',
504 b'debugchangedfiles',
505 [
505 [
506 (
506 (
507 b'',
507 b'',
508 b'compute',
508 b'compute',
509 False,
509 False,
510 b"compute information instead of reading it from storage",
510 b"compute information instead of reading it from storage",
511 ),
511 ),
512 ],
512 ],
513 b'REV',
513 b'REV',
514 )
514 )
515 def debugchangedfiles(ui, repo, rev, **opts):
515 def debugchangedfiles(ui, repo, rev, **opts):
516 """list the stored files changes for a revision"""
516 """list the stored files changes for a revision"""
517 ctx = logcmdutil.revsingle(repo, rev, None)
517 ctx = logcmdutil.revsingle(repo, rev, None)
518 files = None
518 files = None
519
519
520 if opts['compute']:
520 if opts['compute']:
521 files = metadata.compute_all_files_changes(ctx)
521 files = metadata.compute_all_files_changes(ctx)
522 else:
522 else:
523 sd = repo.changelog.sidedata(ctx.rev())
523 sd = repo.changelog.sidedata(ctx.rev())
524 files_block = sd.get(sidedata.SD_FILES)
524 files_block = sd.get(sidedata.SD_FILES)
525 if files_block is not None:
525 if files_block is not None:
526 files = metadata.decode_files_sidedata(sd)
526 files = metadata.decode_files_sidedata(sd)
527 if files is not None:
527 if files is not None:
528 for f in sorted(files.touched):
528 for f in sorted(files.touched):
529 if f in files.added:
529 if f in files.added:
530 action = b"added"
530 action = b"added"
531 elif f in files.removed:
531 elif f in files.removed:
532 action = b"removed"
532 action = b"removed"
533 elif f in files.merged:
533 elif f in files.merged:
534 action = b"merged"
534 action = b"merged"
535 elif f in files.salvaged:
535 elif f in files.salvaged:
536 action = b"salvaged"
536 action = b"salvaged"
537 else:
537 else:
538 action = b"touched"
538 action = b"touched"
539
539
540 copy_parent = b""
540 copy_parent = b""
541 copy_source = b""
541 copy_source = b""
542 if f in files.copied_from_p1:
542 if f in files.copied_from_p1:
543 copy_parent = b"p1"
543 copy_parent = b"p1"
544 copy_source = files.copied_from_p1[f]
544 copy_source = files.copied_from_p1[f]
545 elif f in files.copied_from_p2:
545 elif f in files.copied_from_p2:
546 copy_parent = b"p2"
546 copy_parent = b"p2"
547 copy_source = files.copied_from_p2[f]
547 copy_source = files.copied_from_p2[f]
548
548
549 data = (action, copy_parent, f, copy_source)
549 data = (action, copy_parent, f, copy_source)
550 template = b"%-8s %2s: %s, %s;\n"
550 template = b"%-8s %2s: %s, %s;\n"
551 ui.write(template % data)
551 ui.write(template % data)
552
552
553
553
554 @command(b'debugcheckstate', [], b'')
554 @command(b'debugcheckstate', [], b'')
555 def debugcheckstate(ui, repo):
555 def debugcheckstate(ui, repo):
556 """validate the correctness of the current dirstate"""
556 """validate the correctness of the current dirstate"""
557 parent1, parent2 = repo.dirstate.parents()
557 parent1, parent2 = repo.dirstate.parents()
558 m1 = repo[parent1].manifest()
558 m1 = repo[parent1].manifest()
559 m2 = repo[parent2].manifest()
559 m2 = repo[parent2].manifest()
560 errors = 0
560 errors = 0
561 for err in repo.dirstate.verify(m1, m2):
561 for err in repo.dirstate.verify(m1, m2):
562 ui.warn(err[0] % err[1:])
562 ui.warn(err[0] % err[1:])
563 errors += 1
563 errors += 1
564 if errors:
564 if errors:
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 raise error.Abort(errstr)
566 raise error.Abort(errstr)
567
567
568
568
569 @command(
569 @command(
570 b'debugcolor',
570 b'debugcolor',
571 [(b'', b'style', None, _(b'show all configured styles'))],
571 [(b'', b'style', None, _(b'show all configured styles'))],
572 b'hg debugcolor',
572 b'hg debugcolor',
573 )
573 )
574 def debugcolor(ui, repo, **opts):
574 def debugcolor(ui, repo, **opts):
575 """show available color, effects or style"""
575 """show available color, effects or style"""
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 if opts.get('style'):
577 if opts.get('style'):
578 return _debugdisplaystyle(ui)
578 return _debugdisplaystyle(ui)
579 else:
579 else:
580 return _debugdisplaycolor(ui)
580 return _debugdisplaycolor(ui)
581
581
582
582
583 def _debugdisplaycolor(ui):
583 def _debugdisplaycolor(ui):
584 ui = ui.copy()
584 ui = ui.copy()
585 ui._styles.clear()
585 ui._styles.clear()
586 for effect in color._activeeffects(ui).keys():
586 for effect in color._activeeffects(ui).keys():
587 ui._styles[effect] = effect
587 ui._styles[effect] = effect
588 if ui._terminfoparams:
588 if ui._terminfoparams:
589 for k, v in ui.configitems(b'color'):
589 for k, v in ui.configitems(b'color'):
590 if k.startswith(b'color.'):
590 if k.startswith(b'color.'):
591 ui._styles[k] = k[6:]
591 ui._styles[k] = k[6:]
592 elif k.startswith(b'terminfo.'):
592 elif k.startswith(b'terminfo.'):
593 ui._styles[k] = k[9:]
593 ui._styles[k] = k[9:]
594 ui.write(_(b'available colors:\n'))
594 ui.write(_(b'available colors:\n'))
595 # sort label with a '_' after the other to group '_background' entry.
595 # sort label with a '_' after the other to group '_background' entry.
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 for colorname, label in items:
597 for colorname, label in items:
598 ui.write(b'%s\n' % colorname, label=label)
598 ui.write(b'%s\n' % colorname, label=label)
599
599
600
600
601 def _debugdisplaystyle(ui):
601 def _debugdisplaystyle(ui):
602 ui.write(_(b'available style:\n'))
602 ui.write(_(b'available style:\n'))
603 if not ui._styles:
603 if not ui._styles:
604 return
604 return
605 width = max(len(s) for s in ui._styles)
605 width = max(len(s) for s in ui._styles)
606 for label, effects in sorted(ui._styles.items()):
606 for label, effects in sorted(ui._styles.items()):
607 ui.write(b'%s' % label, label=label)
607 ui.write(b'%s' % label, label=label)
608 if effects:
608 if effects:
609 # 50
609 # 50
610 ui.write(b': ')
610 ui.write(b': ')
611 ui.write(b' ' * (max(0, width - len(label))))
611 ui.write(b' ' * (max(0, width - len(label))))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 ui.write(b'\n')
613 ui.write(b'\n')
614
614
615
615
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 def debugcreatestreamclonebundle(ui, repo, fname):
617 def debugcreatestreamclonebundle(ui, repo, fname):
618 """create a stream clone bundle file
618 """create a stream clone bundle file
619
619
620 Stream bundles are special bundles that are essentially archives of
620 Stream bundles are special bundles that are essentially archives of
621 revlog files. They are commonly used for cloning very quickly.
621 revlog files. They are commonly used for cloning very quickly.
622 """
622 """
623 # TODO we may want to turn this into an abort when this functionality
623 # TODO we may want to turn this into an abort when this functionality
624 # is moved into `hg bundle`.
624 # is moved into `hg bundle`.
625 if phases.hassecret(repo):
625 if phases.hassecret(repo):
626 ui.warn(
626 ui.warn(
627 _(
627 _(
628 b'(warning: stream clone bundle will contain secret '
628 b'(warning: stream clone bundle will contain secret '
629 b'revisions)\n'
629 b'revisions)\n'
630 )
630 )
631 )
631 )
632
632
633 requirements, gen = streamclone.generatebundlev1(repo)
633 requirements, gen = streamclone.generatebundlev1(repo)
634 changegroup.writechunks(ui, gen, fname)
634 changegroup.writechunks(ui, gen, fname)
635
635
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637
637
638
638
639 @command(
639 @command(
640 b'debugdag',
640 b'debugdag',
641 [
641 [
642 (b't', b'tags', None, _(b'use tags as labels')),
642 (b't', b'tags', None, _(b'use tags as labels')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 (b'', b'dots', None, _(b'use dots for runs')),
644 (b'', b'dots', None, _(b'use dots for runs')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 ],
646 ],
647 _(b'[OPTION]... [FILE [REV]...]'),
647 _(b'[OPTION]... [FILE [REV]...]'),
648 optionalrepo=True,
648 optionalrepo=True,
649 )
649 )
650 def debugdag(ui, repo, file_=None, *revs, **opts):
650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 """format the changelog or an index DAG as a concise textual description
651 """format the changelog or an index DAG as a concise textual description
652
652
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 revision numbers, they get labeled in the output as rN.
654 revision numbers, they get labeled in the output as rN.
655
655
656 Otherwise, the changelog DAG of the current repo is emitted.
656 Otherwise, the changelog DAG of the current repo is emitted.
657 """
657 """
658 spaces = opts.get('spaces')
658 spaces = opts.get('spaces')
659 dots = opts.get('dots')
659 dots = opts.get('dots')
660 if file_:
660 if file_:
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 revs = {int(r) for r in revs}
662 revs = {int(r) for r in revs}
663
663
664 def events():
664 def events():
665 for r in rlog:
665 for r in rlog:
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 if r in revs:
667 if r in revs:
668 yield b'l', (r, b"r%i" % r)
668 yield b'l', (r, b"r%i" % r)
669
669
670 elif repo:
670 elif repo:
671 cl = repo.changelog
671 cl = repo.changelog
672 tags = opts.get('tags')
672 tags = opts.get('tags')
673 branches = opts.get('branches')
673 branches = opts.get('branches')
674 if tags:
674 if tags:
675 labels = {}
675 labels = {}
676 for l, n in repo.tags().items():
676 for l, n in repo.tags().items():
677 labels.setdefault(cl.rev(n), []).append(l)
677 labels.setdefault(cl.rev(n), []).append(l)
678
678
679 def events():
679 def events():
680 b = b"default"
680 b = b"default"
681 for r in cl:
681 for r in cl:
682 if branches:
682 if branches:
683 newb = cl.read(cl.node(r))[5][b'branch']
683 newb = cl.read(cl.node(r))[5][b'branch']
684 if newb != b:
684 if newb != b:
685 yield b'a', newb
685 yield b'a', newb
686 b = newb
686 b = newb
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 if tags:
688 if tags:
689 ls = labels.get(r)
689 ls = labels.get(r)
690 if ls:
690 if ls:
691 for l in ls:
691 for l in ls:
692 yield b'l', (r, l)
692 yield b'l', (r, l)
693
693
694 else:
694 else:
695 raise error.Abort(_(b'need repo for changelog dag'))
695 raise error.Abort(_(b'need repo for changelog dag'))
696
696
697 for line in dagparser.dagtextlines(
697 for line in dagparser.dagtextlines(
698 events(),
698 events(),
699 addspaces=spaces,
699 addspaces=spaces,
700 wraplabels=True,
700 wraplabels=True,
701 wrapannotations=True,
701 wrapannotations=True,
702 wrapnonlinear=dots,
702 wrapnonlinear=dots,
703 usedots=dots,
703 usedots=dots,
704 maxlinewidth=70,
704 maxlinewidth=70,
705 ):
705 ):
706 ui.write(line)
706 ui.write(line)
707 ui.write(b"\n")
707 ui.write(b"\n")
708
708
709
709
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 def debugdata(ui, repo, file_, rev=None, **opts):
711 def debugdata(ui, repo, file_, rev=None, **opts):
712 """dump the contents of a data file revision"""
712 """dump the contents of a data file revision"""
713 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 if rev is not None:
715 if rev is not None:
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 file_, rev = None, file_
717 file_, rev = None, file_
718 elif rev is None:
718 elif rev is None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 try:
721 try:
722 ui.write(r.rawdata(r.lookup(rev)))
722 ui.write(r.rawdata(r.lookup(rev)))
723 except KeyError:
723 except KeyError:
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725
725
726
726
727 @command(
727 @command(
728 b'debugdate',
728 b'debugdate',
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 _(b'[-e] DATE [RANGE]'),
730 _(b'[-e] DATE [RANGE]'),
731 norepo=True,
731 norepo=True,
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdate(ui, date, range=None, **opts):
734 def debugdate(ui, date, range=None, **opts):
735 """parse and display a date"""
735 """parse and display a date"""
736 if opts["extended"]:
736 if opts["extended"]:
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 else:
738 else:
739 d = dateutil.parsedate(date)
739 d = dateutil.parsedate(date)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 if range:
742 if range:
743 m = dateutil.matchdate(range)
743 m = dateutil.matchdate(range)
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745
745
746
746
747 @command(
747 @command(
748 b'debugdeltachain',
748 b'debugdeltachain',
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 _(b'-c|-m|FILE'),
750 _(b'-c|-m|FILE'),
751 optionalrepo=True,
751 optionalrepo=True,
752 )
752 )
753 def debugdeltachain(ui, repo, file_=None, **opts):
753 def debugdeltachain(ui, repo, file_=None, **opts):
754 """dump information about delta chains in a revlog
754 """dump information about delta chains in a revlog
755
755
756 Output can be templatized. Available template keywords are:
756 Output can be templatized. Available template keywords are:
757
757
758 :``rev``: revision number
758 :``rev``: revision number
759 :``chainid``: delta chain identifier (numbered by unique base)
759 :``chainid``: delta chain identifier (numbered by unique base)
760 :``chainlen``: delta chain length to this revision
760 :``chainlen``: delta chain length to this revision
761 :``prevrev``: previous revision in delta chain
761 :``prevrev``: previous revision in delta chain
762 :``deltatype``: role of delta / how it was computed
762 :``deltatype``: role of delta / how it was computed
763 :``compsize``: compressed size of revision
763 :``compsize``: compressed size of revision
764 :``uncompsize``: uncompressed size of revision
764 :``uncompsize``: uncompressed size of revision
765 :``chainsize``: total size of compressed revisions in chain
765 :``chainsize``: total size of compressed revisions in chain
766 :``chainratio``: total chain size divided by uncompressed revision size
766 :``chainratio``: total chain size divided by uncompressed revision size
767 (new delta chains typically start at ratio 2.00)
767 (new delta chains typically start at ratio 2.00)
768 :``lindist``: linear distance from base revision in delta chain to end
768 :``lindist``: linear distance from base revision in delta chain to end
769 of this revision
769 of this revision
770 :``extradist``: total size of revisions not part of this delta chain from
770 :``extradist``: total size of revisions not part of this delta chain from
771 base of delta chain to end of this revision; a measurement
771 base of delta chain to end of this revision; a measurement
772 of how much extra data we need to read/seek across to read
772 of how much extra data we need to read/seek across to read
773 the delta chain for this revision
773 the delta chain for this revision
774 :``extraratio``: extradist divided by chainsize; another representation of
774 :``extraratio``: extradist divided by chainsize; another representation of
775 how much unrelated data is needed to load this delta chain
775 how much unrelated data is needed to load this delta chain
776
776
777 If the repository is configured to use the sparse read, additional keywords
777 If the repository is configured to use the sparse read, additional keywords
778 are available:
778 are available:
779
779
780 :``readsize``: total size of data read from the disk for a revision
780 :``readsize``: total size of data read from the disk for a revision
781 (sum of the sizes of all the blocks)
781 (sum of the sizes of all the blocks)
782 :``largestblock``: size of the largest block of data read from the disk
782 :``largestblock``: size of the largest block of data read from the disk
783 :``readdensity``: density of useful bytes in the data read from the disk
783 :``readdensity``: density of useful bytes in the data read from the disk
784 :``srchunks``: in how many data hunks the whole revision would be read
784 :``srchunks``: in how many data hunks the whole revision would be read
785
785
786 The sparse read can be enabled with experimental.sparse-read = True
786 The sparse read can be enabled with experimental.sparse-read = True
787 """
787 """
788 opts = pycompat.byteskwargs(opts)
788 opts = pycompat.byteskwargs(opts)
789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 index = r.index
790 index = r.index
791 start = r.start
791 start = r.start
792 length = r.length
792 length = r.length
793 generaldelta = r._generaldelta
793 generaldelta = r._generaldelta
794 withsparseread = getattr(r, '_withsparseread', False)
794 withsparseread = getattr(r, '_withsparseread', False)
795
795
796 def revinfo(rev):
796 def revinfo(rev):
797 e = index[rev]
797 e = index[rev]
798 compsize = e[1]
798 compsize = e[1]
799 uncompsize = e[2]
799 uncompsize = e[2]
800 chainsize = 0
800 chainsize = 0
801
801
802 if generaldelta:
802 if generaldelta:
803 if e[3] == e[5]:
803 if e[3] == e[5]:
804 deltatype = b'p1'
804 deltatype = b'p1'
805 elif e[3] == e[6]:
805 elif e[3] == e[6]:
806 deltatype = b'p2'
806 deltatype = b'p2'
807 elif e[3] == rev - 1:
807 elif e[3] == rev - 1:
808 deltatype = b'prev'
808 deltatype = b'prev'
809 elif e[3] == rev:
809 elif e[3] == rev:
810 deltatype = b'base'
810 deltatype = b'base'
811 else:
811 else:
812 deltatype = b'other'
812 deltatype = b'other'
813 else:
813 else:
814 if e[3] == rev:
814 if e[3] == rev:
815 deltatype = b'base'
815 deltatype = b'base'
816 else:
816 else:
817 deltatype = b'prev'
817 deltatype = b'prev'
818
818
819 chain = r._deltachain(rev)[0]
819 chain = r._deltachain(rev)[0]
820 for iterrev in chain:
820 for iterrev in chain:
821 e = index[iterrev]
821 e = index[iterrev]
822 chainsize += e[1]
822 chainsize += e[1]
823
823
824 return compsize, uncompsize, deltatype, chain, chainsize
824 return compsize, uncompsize, deltatype, chain, chainsize
825
825
826 fm = ui.formatter(b'debugdeltachain', opts)
826 fm = ui.formatter(b'debugdeltachain', opts)
827
827
828 fm.plain(
828 fm.plain(
829 b' rev chain# chainlen prev delta '
829 b' rev chain# chainlen prev delta '
830 b'size rawsize chainsize ratio lindist extradist '
830 b'size rawsize chainsize ratio lindist extradist '
831 b'extraratio'
831 b'extraratio'
832 )
832 )
833 if withsparseread:
833 if withsparseread:
834 fm.plain(b' readsize largestblk rddensity srchunks')
834 fm.plain(b' readsize largestblk rddensity srchunks')
835 fm.plain(b'\n')
835 fm.plain(b'\n')
836
836
837 chainbases = {}
837 chainbases = {}
838 for rev in r:
838 for rev in r:
839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 chainbase = chain[0]
840 chainbase = chain[0]
841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 basestart = start(chainbase)
842 basestart = start(chainbase)
843 revstart = start(rev)
843 revstart = start(rev)
844 lineardist = revstart + comp - basestart
844 lineardist = revstart + comp - basestart
845 extradist = lineardist - chainsize
845 extradist = lineardist - chainsize
846 try:
846 try:
847 prevrev = chain[-2]
847 prevrev = chain[-2]
848 except IndexError:
848 except IndexError:
849 prevrev = -1
849 prevrev = -1
850
850
851 if uncomp != 0:
851 if uncomp != 0:
852 chainratio = float(chainsize) / float(uncomp)
852 chainratio = float(chainsize) / float(uncomp)
853 else:
853 else:
854 chainratio = chainsize
854 chainratio = chainsize
855
855
856 if chainsize != 0:
856 if chainsize != 0:
857 extraratio = float(extradist) / float(chainsize)
857 extraratio = float(extradist) / float(chainsize)
858 else:
858 else:
859 extraratio = extradist
859 extraratio = extradist
860
860
861 fm.startitem()
861 fm.startitem()
862 fm.write(
862 fm.write(
863 b'rev chainid chainlen prevrev deltatype compsize '
863 b'rev chainid chainlen prevrev deltatype compsize '
864 b'uncompsize chainsize chainratio lindist extradist '
864 b'uncompsize chainsize chainratio lindist extradist '
865 b'extraratio',
865 b'extraratio',
866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 rev,
867 rev,
868 chainid,
868 chainid,
869 len(chain),
869 len(chain),
870 prevrev,
870 prevrev,
871 deltatype,
871 deltatype,
872 comp,
872 comp,
873 uncomp,
873 uncomp,
874 chainsize,
874 chainsize,
875 chainratio,
875 chainratio,
876 lineardist,
876 lineardist,
877 extradist,
877 extradist,
878 extraratio,
878 extraratio,
879 rev=rev,
879 rev=rev,
880 chainid=chainid,
880 chainid=chainid,
881 chainlen=len(chain),
881 chainlen=len(chain),
882 prevrev=prevrev,
882 prevrev=prevrev,
883 deltatype=deltatype,
883 deltatype=deltatype,
884 compsize=comp,
884 compsize=comp,
885 uncompsize=uncomp,
885 uncompsize=uncomp,
886 chainsize=chainsize,
886 chainsize=chainsize,
887 chainratio=chainratio,
887 chainratio=chainratio,
888 lindist=lineardist,
888 lindist=lineardist,
889 extradist=extradist,
889 extradist=extradist,
890 extraratio=extraratio,
890 extraratio=extraratio,
891 )
891 )
892 if withsparseread:
892 if withsparseread:
893 readsize = 0
893 readsize = 0
894 largestblock = 0
894 largestblock = 0
895 srchunks = 0
895 srchunks = 0
896
896
897 for revschunk in deltautil.slicechunk(r, chain):
897 for revschunk in deltautil.slicechunk(r, chain):
898 srchunks += 1
898 srchunks += 1
899 blkend = start(revschunk[-1]) + length(revschunk[-1])
899 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 blksize = blkend - start(revschunk[0])
900 blksize = blkend - start(revschunk[0])
901
901
902 readsize += blksize
902 readsize += blksize
903 if largestblock < blksize:
903 if largestblock < blksize:
904 largestblock = blksize
904 largestblock = blksize
905
905
906 if readsize:
906 if readsize:
907 readdensity = float(chainsize) / float(readsize)
907 readdensity = float(chainsize) / float(readsize)
908 else:
908 else:
909 readdensity = 1
909 readdensity = 1
910
910
911 fm.write(
911 fm.write(
912 b'readsize largestblock readdensity srchunks',
912 b'readsize largestblock readdensity srchunks',
913 b' %10d %10d %9.5f %8d',
913 b' %10d %10d %9.5f %8d',
914 readsize,
914 readsize,
915 largestblock,
915 largestblock,
916 readdensity,
916 readdensity,
917 srchunks,
917 srchunks,
918 readsize=readsize,
918 readsize=readsize,
919 largestblock=largestblock,
919 largestblock=largestblock,
920 readdensity=readdensity,
920 readdensity=readdensity,
921 srchunks=srchunks,
921 srchunks=srchunks,
922 )
922 )
923
923
924 fm.plain(b'\n')
924 fm.plain(b'\n')
925
925
926 fm.end()
926 fm.end()
927
927
928
928
929 @command(
929 @command(
930 b'debugdirstate|debugstate',
930 b'debugdirstate|debugstate',
931 [
931 [
932 (
932 (
933 b'',
933 b'',
934 b'nodates',
934 b'nodates',
935 None,
935 None,
936 _(b'do not display the saved mtime (DEPRECATED)'),
936 _(b'do not display the saved mtime (DEPRECATED)'),
937 ),
937 ),
938 (b'', b'dates', True, _(b'display the saved mtime')),
938 (b'', b'dates', True, _(b'display the saved mtime')),
939 (b'', b'datesort', None, _(b'sort by saved mtime')),
939 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 (
940 (
941 b'',
941 b'',
942 b'all',
942 b'all',
943 False,
943 False,
944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
945 ),
945 ),
946 ],
946 ],
947 _(b'[OPTION]...'),
947 _(b'[OPTION]...'),
948 )
948 )
949 def debugstate(ui, repo, **opts):
949 def debugstate(ui, repo, **opts):
950 """show the contents of the current dirstate"""
950 """show the contents of the current dirstate"""
951
951
952 nodates = not opts['dates']
952 nodates = not opts['dates']
953 if opts.get('nodates') is not None:
953 if opts.get('nodates') is not None:
954 nodates = True
954 nodates = True
955 datesort = opts.get('datesort')
955 datesort = opts.get('datesort')
956
956
957 if datesort:
957 if datesort:
958
958
959 def keyfunc(entry):
959 def keyfunc(entry):
960 filename, _state, _mode, _size, mtime = entry
960 filename, _state, _mode, _size, mtime = entry
961 return (mtime, filename)
961 return (mtime, filename)
962
962
963 else:
963 else:
964 keyfunc = None # sort by filename
964 keyfunc = None # sort by filename
965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
966 entries.sort(key=keyfunc)
966 entries.sort(key=keyfunc)
967 for entry in entries:
967 for entry in entries:
968 filename, state, mode, size, mtime = entry
968 filename, state, mode, size, mtime = entry
969 if mtime == -1:
969 if mtime == -1:
970 timestr = b'unset '
970 timestr = b'unset '
971 elif nodates:
971 elif nodates:
972 timestr = b'set '
972 timestr = b'set '
973 else:
973 else:
974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
975 timestr = encoding.strtolocal(timestr)
975 timestr = encoding.strtolocal(timestr)
976 if mode & 0o20000:
976 if mode & 0o20000:
977 mode = b'lnk'
977 mode = b'lnk'
978 else:
978 else:
979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
981 for f in repo.dirstate.copies():
981 for f in repo.dirstate.copies():
982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
983
983
984
984
985 @command(
985 @command(
986 b'debugdirstateignorepatternshash',
986 b'debugdirstateignorepatternshash',
987 [],
987 [],
988 _(b''),
988 _(b''),
989 )
989 )
990 def debugdirstateignorepatternshash(ui, repo, **opts):
990 def debugdirstateignorepatternshash(ui, repo, **opts):
991 """show the hash of ignore patterns stored in dirstate if v2,
991 """show the hash of ignore patterns stored in dirstate if v2,
992 or nothing for dirstate-v2
992 or nothing for dirstate-v2
993 """
993 """
994 if repo.dirstate._use_dirstate_v2:
994 if repo.dirstate._use_dirstate_v2:
995 docket = repo.dirstate._map.docket
995 docket = repo.dirstate._map.docket
996 hash_len = 20 # 160 bits for SHA-1
996 hash_len = 20 # 160 bits for SHA-1
997 hash_bytes = docket.tree_metadata[-hash_len:]
997 hash_bytes = docket.tree_metadata[-hash_len:]
998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
999
999
1000
1000
1001 @command(
1001 @command(
1002 b'debugdiscovery',
1002 b'debugdiscovery',
1003 [
1003 [
1004 (b'', b'old', None, _(b'use old-style discovery')),
1004 (b'', b'old', None, _(b'use old-style discovery')),
1005 (
1005 (
1006 b'',
1006 b'',
1007 b'nonheads',
1007 b'nonheads',
1008 None,
1008 None,
1009 _(b'use old-style discovery with non-heads included'),
1009 _(b'use old-style discovery with non-heads included'),
1010 ),
1010 ),
1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1013 (
1013 (
1014 b'',
1014 b'',
1015 b'local-as-revs',
1015 b'local-as-revs',
1016 b"",
1016 b"",
1017 b'treat local has having these revisions only',
1017 b'treat local has having these revisions only',
1018 ),
1018 ),
1019 (
1019 (
1020 b'',
1020 b'',
1021 b'remote-as-revs',
1021 b'remote-as-revs',
1022 b"",
1022 b"",
1023 b'use local as remote, with only these revisions',
1023 b'use local as remote, with only these revisions',
1024 ),
1024 ),
1025 ]
1025 ]
1026 + cmdutil.remoteopts
1026 + cmdutil.remoteopts
1027 + cmdutil.formatteropts,
1027 + cmdutil.formatteropts,
1028 _(b'[--rev REV] [OTHER]'),
1028 _(b'[--rev REV] [OTHER]'),
1029 )
1029 )
1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1031 """runs the changeset discovery protocol in isolation
1031 """runs the changeset discovery protocol in isolation
1032
1032
1033 The local peer can be "replaced" by a subset of the local repository by
1033 The local peer can be "replaced" by a subset of the local repository by
1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1035 be "replaced" by a subset of the local repository using the
1035 be "replaced" by a subset of the local repository using the
1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1037 discovery situation.
1037 discovery situation.
1038
1038
1039 The following developer oriented config are relevant for people playing with this command:
1039 The following developer oriented config are relevant for people playing with this command:
1040
1040
1041 * devel.discovery.exchange-heads=True
1041 * devel.discovery.exchange-heads=True
1042
1042
1043 If False, the discovery will not start with
1043 If False, the discovery will not start with
1044 remote head fetching and local head querying.
1044 remote head fetching and local head querying.
1045
1045
1046 * devel.discovery.grow-sample=True
1046 * devel.discovery.grow-sample=True
1047
1047
1048 If False, the sample size used in set discovery will not be increased
1048 If False, the sample size used in set discovery will not be increased
1049 through the process
1049 through the process
1050
1050
1051 * devel.discovery.grow-sample.dynamic=True
1051 * devel.discovery.grow-sample.dynamic=True
1052
1052
1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1054 adapted to the shape of the undecided set (it is set to the max of:
1054 adapted to the shape of the undecided set (it is set to the max of:
1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1056
1056
1057 * devel.discovery.grow-sample.rate=1.05
1057 * devel.discovery.grow-sample.rate=1.05
1058
1058
1059 the rate at which the sample grow
1059 the rate at which the sample grow
1060
1060
1061 * devel.discovery.randomize=True
1061 * devel.discovery.randomize=True
1062
1062
1063 If andom sampling during discovery are deterministic. It is meant for
1063 If andom sampling during discovery are deterministic. It is meant for
1064 integration tests.
1064 integration tests.
1065
1065
1066 * devel.discovery.sample-size=200
1066 * devel.discovery.sample-size=200
1067
1067
1068 Control the initial size of the discovery sample
1068 Control the initial size of the discovery sample
1069
1069
1070 * devel.discovery.sample-size.initial=100
1070 * devel.discovery.sample-size.initial=100
1071
1071
1072 Control the initial size of the discovery for initial change
1072 Control the initial size of the discovery for initial change
1073 """
1073 """
1074 opts = pycompat.byteskwargs(opts)
1074 opts = pycompat.byteskwargs(opts)
1075 unfi = repo.unfiltered()
1075 unfi = repo.unfiltered()
1076
1076
1077 # setup potential extra filtering
1077 # setup potential extra filtering
1078 local_revs = opts[b"local_as_revs"]
1078 local_revs = opts[b"local_as_revs"]
1079 remote_revs = opts[b"remote_as_revs"]
1079 remote_revs = opts[b"remote_as_revs"]
1080
1080
1081 # make sure tests are repeatable
1081 # make sure tests are repeatable
1082 random.seed(int(opts[b'seed']))
1082 random.seed(int(opts[b'seed']))
1083
1083
1084 if not remote_revs:
1084 if not remote_revs:
1085
1085
1086 remoteurl, branches = urlutil.get_unique_pull_path(
1086 remoteurl, branches = urlutil.get_unique_pull_path(
1087 b'debugdiscovery', repo, ui, remoteurl
1087 b'debugdiscovery', repo, ui, remoteurl
1088 )
1088 )
1089 remote = hg.peer(repo, opts, remoteurl)
1089 remote = hg.peer(repo, opts, remoteurl)
1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1091 else:
1091 else:
1092 branches = (None, [])
1092 branches = (None, [])
1093 remote_filtered_revs = logcmdutil.revrange(
1093 remote_filtered_revs = logcmdutil.revrange(
1094 unfi, [b"not (::(%s))" % remote_revs]
1094 unfi, [b"not (::(%s))" % remote_revs]
1095 )
1095 )
1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1097
1097
1098 def remote_func(x):
1098 def remote_func(x):
1099 return remote_filtered_revs
1099 return remote_filtered_revs
1100
1100
1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1102
1102
1103 remote = repo.peer()
1103 remote = repo.peer()
1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1105
1105
1106 if local_revs:
1106 if local_revs:
1107 local_filtered_revs = logcmdutil.revrange(
1107 local_filtered_revs = logcmdutil.revrange(
1108 unfi, [b"not (::(%s))" % local_revs]
1108 unfi, [b"not (::(%s))" % local_revs]
1109 )
1109 )
1110 local_filtered_revs = frozenset(local_filtered_revs)
1110 local_filtered_revs = frozenset(local_filtered_revs)
1111
1111
1112 def local_func(x):
1112 def local_func(x):
1113 return local_filtered_revs
1113 return local_filtered_revs
1114
1114
1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1116 repo = repo.filtered(b'debug-discovery-local-filter')
1116 repo = repo.filtered(b'debug-discovery-local-filter')
1117
1117
1118 data = {}
1118 data = {}
1119 if opts.get(b'old'):
1119 if opts.get(b'old'):
1120
1120
1121 def doit(pushedrevs, remoteheads, remote=remote):
1121 def doit(pushedrevs, remoteheads, remote=remote):
1122 if not util.safehasattr(remote, b'branches'):
1122 if not util.safehasattr(remote, b'branches'):
1123 # enable in-client legacy support
1123 # enable in-client legacy support
1124 remote = localrepo.locallegacypeer(remote.local())
1124 remote = localrepo.locallegacypeer(remote.local())
1125 common, _in, hds = treediscovery.findcommonincoming(
1125 common, _in, hds = treediscovery.findcommonincoming(
1126 repo, remote, force=True, audit=data
1126 repo, remote, force=True, audit=data
1127 )
1127 )
1128 common = set(common)
1128 common = set(common)
1129 if not opts.get(b'nonheads'):
1129 if not opts.get(b'nonheads'):
1130 ui.writenoi18n(
1130 ui.writenoi18n(
1131 b"unpruned common: %s\n"
1131 b"unpruned common: %s\n"
1132 % b" ".join(sorted(short(n) for n in common))
1132 % b" ".join(sorted(short(n) for n in common))
1133 )
1133 )
1134
1134
1135 clnode = repo.changelog.node
1135 clnode = repo.changelog.node
1136 common = repo.revs(b'heads(::%ln)', common)
1136 common = repo.revs(b'heads(::%ln)', common)
1137 common = {clnode(r) for r in common}
1137 common = {clnode(r) for r in common}
1138 return common, hds
1138 return common, hds
1139
1139
1140 else:
1140 else:
1141
1141
1142 def doit(pushedrevs, remoteheads, remote=remote):
1142 def doit(pushedrevs, remoteheads, remote=remote):
1143 nodes = None
1143 nodes = None
1144 if pushedrevs:
1144 if pushedrevs:
1145 revs = logcmdutil.revrange(repo, pushedrevs)
1145 revs = logcmdutil.revrange(repo, pushedrevs)
1146 nodes = [repo[r].node() for r in revs]
1146 nodes = [repo[r].node() for r in revs]
1147 common, any, hds = setdiscovery.findcommonheads(
1147 common, any, hds = setdiscovery.findcommonheads(
1148 ui, repo, remote, ancestorsof=nodes, audit=data
1148 ui, repo, remote, ancestorsof=nodes, audit=data
1149 )
1149 )
1150 return common, hds
1150 return common, hds
1151
1151
1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1153 localrevs = opts[b'rev']
1153 localrevs = opts[b'rev']
1154
1154
1155 fm = ui.formatter(b'debugdiscovery', opts)
1155 fm = ui.formatter(b'debugdiscovery', opts)
1156 if fm.strict_format:
1156 if fm.strict_format:
1157
1157
1158 @contextlib.contextmanager
1158 @contextlib.contextmanager
1159 def may_capture_output():
1159 def may_capture_output():
1160 ui.pushbuffer()
1160 ui.pushbuffer()
1161 yield
1161 yield
1162 data[b'output'] = ui.popbuffer()
1162 data[b'output'] = ui.popbuffer()
1163
1163
1164 else:
1164 else:
1165 may_capture_output = util.nullcontextmanager
1165 may_capture_output = util.nullcontextmanager
1166 with may_capture_output():
1166 with may_capture_output():
1167 with util.timedcm('debug-discovery') as t:
1167 with util.timedcm('debug-discovery') as t:
1168 common, hds = doit(localrevs, remoterevs)
1168 common, hds = doit(localrevs, remoterevs)
1169
1169
1170 # compute all statistics
1170 # compute all statistics
1171 heads_common = set(common)
1171 heads_common = set(common)
1172 heads_remote = set(hds)
1172 heads_remote = set(hds)
1173 heads_local = set(repo.heads())
1173 heads_local = set(repo.heads())
1174 # note: they cannot be a local or remote head that is in common and not
1174 # note: they cannot be a local or remote head that is in common and not
1175 # itself a head of common.
1175 # itself a head of common.
1176 heads_common_local = heads_common & heads_local
1176 heads_common_local = heads_common & heads_local
1177 heads_common_remote = heads_common & heads_remote
1177 heads_common_remote = heads_common & heads_remote
1178 heads_common_both = heads_common & heads_remote & heads_local
1178 heads_common_both = heads_common & heads_remote & heads_local
1179
1179
1180 all = repo.revs(b'all()')
1180 all = repo.revs(b'all()')
1181 common = repo.revs(b'::%ln', common)
1181 common = repo.revs(b'::%ln', common)
1182 roots_common = repo.revs(b'roots(::%ld)', common)
1182 roots_common = repo.revs(b'roots(::%ld)', common)
1183 missing = repo.revs(b'not ::%ld', common)
1183 missing = repo.revs(b'not ::%ld', common)
1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1186 assert len(common) + len(missing) == len(all)
1186 assert len(common) + len(missing) == len(all)
1187
1187
1188 initial_undecided = repo.revs(
1188 initial_undecided = repo.revs(
1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1190 )
1190 )
1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1193 common_initial_undecided = initial_undecided & common
1193 common_initial_undecided = initial_undecided & common
1194 missing_initial_undecided = initial_undecided & missing
1194 missing_initial_undecided = initial_undecided & missing
1195
1195
1196 data[b'elapsed'] = t.elapsed
1196 data[b'elapsed'] = t.elapsed
1197 data[b'nb-common-heads'] = len(heads_common)
1197 data[b'nb-common-heads'] = len(heads_common)
1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1201 data[b'nb-common-roots'] = len(roots_common)
1201 data[b'nb-common-roots'] = len(roots_common)
1202 data[b'nb-head-local'] = len(heads_local)
1202 data[b'nb-head-local'] = len(heads_local)
1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1204 data[b'nb-head-remote'] = len(heads_remote)
1204 data[b'nb-head-remote'] = len(heads_remote)
1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1206 heads_common_remote
1206 heads_common_remote
1207 )
1207 )
1208 data[b'nb-revs'] = len(all)
1208 data[b'nb-revs'] = len(all)
1209 data[b'nb-revs-common'] = len(common)
1209 data[b'nb-revs-common'] = len(common)
1210 data[b'nb-revs-missing'] = len(missing)
1210 data[b'nb-revs-missing'] = len(missing)
1211 data[b'nb-missing-heads'] = len(heads_missing)
1211 data[b'nb-missing-heads'] = len(heads_missing)
1212 data[b'nb-missing-roots'] = len(roots_missing)
1212 data[b'nb-missing-roots'] = len(roots_missing)
1213 data[b'nb-ini_und'] = len(initial_undecided)
1213 data[b'nb-ini_und'] = len(initial_undecided)
1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1218
1218
1219 fm.startitem()
1219 fm.startitem()
1220 fm.data(**pycompat.strkwargs(data))
1220 fm.data(**pycompat.strkwargs(data))
1221 # display discovery summary
1221 # display discovery summary
1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1224 fm.plain(b"queries: %(total-queries)9d\n" % data)
1224 fm.plain(b"heads summary:\n")
1225 fm.plain(b"heads summary:\n")
1225 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247
1248
1248 if ui.verbose:
1249 if ui.verbose:
1249 fm.plain(
1250 fm.plain(
1250 b"common heads: %s\n"
1251 b"common heads: %s\n"
1251 % b" ".join(sorted(short(n) for n in heads_common))
1252 % b" ".join(sorted(short(n) for n in heads_common))
1252 )
1253 )
1253 fm.end()
1254 fm.end()
1254
1255
1255
1256
1256 _chunksize = 4 << 10
1257 _chunksize = 4 << 10
1257
1258
1258
1259
1259 @command(
1260 @command(
1260 b'debugdownload',
1261 b'debugdownload',
1261 [
1262 [
1262 (b'o', b'output', b'', _(b'path')),
1263 (b'o', b'output', b'', _(b'path')),
1263 ],
1264 ],
1264 optionalrepo=True,
1265 optionalrepo=True,
1265 )
1266 )
1266 def debugdownload(ui, repo, url, output=None, **opts):
1267 def debugdownload(ui, repo, url, output=None, **opts):
1267 """download a resource using Mercurial logic and config"""
1268 """download a resource using Mercurial logic and config"""
1268 fh = urlmod.open(ui, url, output)
1269 fh = urlmod.open(ui, url, output)
1269
1270
1270 dest = ui
1271 dest = ui
1271 if output:
1272 if output:
1272 dest = open(output, b"wb", _chunksize)
1273 dest = open(output, b"wb", _chunksize)
1273 try:
1274 try:
1274 data = fh.read(_chunksize)
1275 data = fh.read(_chunksize)
1275 while data:
1276 while data:
1276 dest.write(data)
1277 dest.write(data)
1277 data = fh.read(_chunksize)
1278 data = fh.read(_chunksize)
1278 finally:
1279 finally:
1279 if output:
1280 if output:
1280 dest.close()
1281 dest.close()
1281
1282
1282
1283
1283 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 def debugextensions(ui, repo, **opts):
1285 def debugextensions(ui, repo, **opts):
1285 '''show information about active extensions'''
1286 '''show information about active extensions'''
1286 opts = pycompat.byteskwargs(opts)
1287 opts = pycompat.byteskwargs(opts)
1287 exts = extensions.extensions(ui)
1288 exts = extensions.extensions(ui)
1288 hgver = util.version()
1289 hgver = util.version()
1289 fm = ui.formatter(b'debugextensions', opts)
1290 fm = ui.formatter(b'debugextensions', opts)
1290 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 isinternal = extensions.ismoduleinternal(extmod)
1292 isinternal = extensions.ismoduleinternal(extmod)
1292 extsource = None
1293 extsource = None
1293
1294
1294 if util.safehasattr(extmod, '__file__'):
1295 if util.safehasattr(extmod, '__file__'):
1295 extsource = pycompat.fsencode(extmod.__file__)
1296 extsource = pycompat.fsencode(extmod.__file__)
1296 elif getattr(sys, 'oxidized', False):
1297 elif getattr(sys, 'oxidized', False):
1297 extsource = pycompat.sysexecutable
1298 extsource = pycompat.sysexecutable
1298 if isinternal:
1299 if isinternal:
1299 exttestedwith = [] # never expose magic string to users
1300 exttestedwith = [] # never expose magic string to users
1300 else:
1301 else:
1301 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 extbuglink = getattr(extmod, 'buglink', None)
1303 extbuglink = getattr(extmod, 'buglink', None)
1303
1304
1304 fm.startitem()
1305 fm.startitem()
1305
1306
1306 if ui.quiet or ui.verbose:
1307 if ui.quiet or ui.verbose:
1307 fm.write(b'name', b'%s\n', extname)
1308 fm.write(b'name', b'%s\n', extname)
1308 else:
1309 else:
1309 fm.write(b'name', b'%s', extname)
1310 fm.write(b'name', b'%s', extname)
1310 if isinternal or hgver in exttestedwith:
1311 if isinternal or hgver in exttestedwith:
1311 fm.plain(b'\n')
1312 fm.plain(b'\n')
1312 elif not exttestedwith:
1313 elif not exttestedwith:
1313 fm.plain(_(b' (untested!)\n'))
1314 fm.plain(_(b' (untested!)\n'))
1314 else:
1315 else:
1315 lasttestedversion = exttestedwith[-1]
1316 lasttestedversion = exttestedwith[-1]
1316 fm.plain(b' (%s!)\n' % lasttestedversion)
1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1317
1318
1318 fm.condwrite(
1319 fm.condwrite(
1319 ui.verbose and extsource,
1320 ui.verbose and extsource,
1320 b'source',
1321 b'source',
1321 _(b' location: %s\n'),
1322 _(b' location: %s\n'),
1322 extsource or b"",
1323 extsource or b"",
1323 )
1324 )
1324
1325
1325 if ui.verbose:
1326 if ui.verbose:
1326 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 fm.data(bundled=isinternal)
1328 fm.data(bundled=isinternal)
1328
1329
1329 fm.condwrite(
1330 fm.condwrite(
1330 ui.verbose and exttestedwith,
1331 ui.verbose and exttestedwith,
1331 b'testedwith',
1332 b'testedwith',
1332 _(b' tested with: %s\n'),
1333 _(b' tested with: %s\n'),
1333 fm.formatlist(exttestedwith, name=b'ver'),
1334 fm.formatlist(exttestedwith, name=b'ver'),
1334 )
1335 )
1335
1336
1336 fm.condwrite(
1337 fm.condwrite(
1337 ui.verbose and extbuglink,
1338 ui.verbose and extbuglink,
1338 b'buglink',
1339 b'buglink',
1339 _(b' bug reporting: %s\n'),
1340 _(b' bug reporting: %s\n'),
1340 extbuglink or b"",
1341 extbuglink or b"",
1341 )
1342 )
1342
1343
1343 fm.end()
1344 fm.end()
1344
1345
1345
1346
1346 @command(
1347 @command(
1347 b'debugfileset',
1348 b'debugfileset',
1348 [
1349 [
1349 (
1350 (
1350 b'r',
1351 b'r',
1351 b'rev',
1352 b'rev',
1352 b'',
1353 b'',
1353 _(b'apply the filespec on this revision'),
1354 _(b'apply the filespec on this revision'),
1354 _(b'REV'),
1355 _(b'REV'),
1355 ),
1356 ),
1356 (
1357 (
1357 b'',
1358 b'',
1358 b'all-files',
1359 b'all-files',
1359 False,
1360 False,
1360 _(b'test files from all revisions and working directory'),
1361 _(b'test files from all revisions and working directory'),
1361 ),
1362 ),
1362 (
1363 (
1363 b's',
1364 b's',
1364 b'show-matcher',
1365 b'show-matcher',
1365 None,
1366 None,
1366 _(b'print internal representation of matcher'),
1367 _(b'print internal representation of matcher'),
1367 ),
1368 ),
1368 (
1369 (
1369 b'p',
1370 b'p',
1370 b'show-stage',
1371 b'show-stage',
1371 [],
1372 [],
1372 _(b'print parsed tree at the given stage'),
1373 _(b'print parsed tree at the given stage'),
1373 _(b'NAME'),
1374 _(b'NAME'),
1374 ),
1375 ),
1375 ],
1376 ],
1376 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 )
1378 )
1378 def debugfileset(ui, repo, expr, **opts):
1379 def debugfileset(ui, repo, expr, **opts):
1379 '''parse and apply a fileset specification'''
1380 '''parse and apply a fileset specification'''
1380 from . import fileset
1381 from . import fileset
1381
1382
1382 fileset.symbols # force import of fileset so we have predicates to optimize
1383 fileset.symbols # force import of fileset so we have predicates to optimize
1383 opts = pycompat.byteskwargs(opts)
1384 opts = pycompat.byteskwargs(opts)
1384 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385
1386
1386 stages = [
1387 stages = [
1387 (b'parsed', pycompat.identity),
1388 (b'parsed', pycompat.identity),
1388 (b'analyzed', filesetlang.analyze),
1389 (b'analyzed', filesetlang.analyze),
1389 (b'optimized', filesetlang.optimize),
1390 (b'optimized', filesetlang.optimize),
1390 ]
1391 ]
1391 stagenames = {n for n, f in stages}
1392 stagenames = {n for n, f in stages}
1392
1393
1393 showalways = set()
1394 showalways = set()
1394 if ui.verbose and not opts[b'show_stage']:
1395 if ui.verbose and not opts[b'show_stage']:
1395 # show parsed tree by --verbose (deprecated)
1396 # show parsed tree by --verbose (deprecated)
1396 showalways.add(b'parsed')
1397 showalways.add(b'parsed')
1397 if opts[b'show_stage'] == [b'all']:
1398 if opts[b'show_stage'] == [b'all']:
1398 showalways.update(stagenames)
1399 showalways.update(stagenames)
1399 else:
1400 else:
1400 for n in opts[b'show_stage']:
1401 for n in opts[b'show_stage']:
1401 if n not in stagenames:
1402 if n not in stagenames:
1402 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 showalways.update(opts[b'show_stage'])
1404 showalways.update(opts[b'show_stage'])
1404
1405
1405 tree = filesetlang.parse(expr)
1406 tree = filesetlang.parse(expr)
1406 for n, f in stages:
1407 for n, f in stages:
1407 tree = f(tree)
1408 tree = f(tree)
1408 if n in showalways:
1409 if n in showalways:
1409 if opts[b'show_stage'] or n != b'parsed':
1410 if opts[b'show_stage'] or n != b'parsed':
1410 ui.write(b"* %s:\n" % n)
1411 ui.write(b"* %s:\n" % n)
1411 ui.write(filesetlang.prettyformat(tree), b"\n")
1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1412
1413
1413 files = set()
1414 files = set()
1414 if opts[b'all_files']:
1415 if opts[b'all_files']:
1415 for r in repo:
1416 for r in repo:
1416 c = repo[r]
1417 c = repo[r]
1417 files.update(c.files())
1418 files.update(c.files())
1418 files.update(c.substate)
1419 files.update(c.substate)
1419 if opts[b'all_files'] or ctx.rev() is None:
1420 if opts[b'all_files'] or ctx.rev() is None:
1420 wctx = repo[None]
1421 wctx = repo[None]
1421 files.update(
1422 files.update(
1422 repo.dirstate.walk(
1423 repo.dirstate.walk(
1423 scmutil.matchall(repo),
1424 scmutil.matchall(repo),
1424 subrepos=list(wctx.substate),
1425 subrepos=list(wctx.substate),
1425 unknown=True,
1426 unknown=True,
1426 ignored=True,
1427 ignored=True,
1427 )
1428 )
1428 )
1429 )
1429 files.update(wctx.substate)
1430 files.update(wctx.substate)
1430 else:
1431 else:
1431 files.update(ctx.files())
1432 files.update(ctx.files())
1432 files.update(ctx.substate)
1433 files.update(ctx.substate)
1433
1434
1434 m = ctx.matchfileset(repo.getcwd(), expr)
1435 m = ctx.matchfileset(repo.getcwd(), expr)
1435 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 for f in sorted(files):
1438 for f in sorted(files):
1438 if not m(f):
1439 if not m(f):
1439 continue
1440 continue
1440 ui.write(b"%s\n" % f)
1441 ui.write(b"%s\n" % f)
1441
1442
1442
1443
1443 @command(
1444 @command(
1444 b"debug-repair-issue6528",
1445 b"debug-repair-issue6528",
1445 [
1446 [
1446 (
1447 (
1447 b'',
1448 b'',
1448 b'to-report',
1449 b'to-report',
1449 b'',
1450 b'',
1450 _(b'build a report of affected revisions to this file'),
1451 _(b'build a report of affected revisions to this file'),
1451 _(b'FILE'),
1452 _(b'FILE'),
1452 ),
1453 ),
1453 (
1454 (
1454 b'',
1455 b'',
1455 b'from-report',
1456 b'from-report',
1456 b'',
1457 b'',
1457 _(b'repair revisions listed in this report file'),
1458 _(b'repair revisions listed in this report file'),
1458 _(b'FILE'),
1459 _(b'FILE'),
1459 ),
1460 ),
1460 (
1461 (
1461 b'',
1462 b'',
1462 b'paranoid',
1463 b'paranoid',
1463 False,
1464 False,
1464 _(b'check that both detection methods do the same thing'),
1465 _(b'check that both detection methods do the same thing'),
1465 ),
1466 ),
1466 ]
1467 ]
1467 + cmdutil.dryrunopts,
1468 + cmdutil.dryrunopts,
1468 )
1469 )
1469 def debug_repair_issue6528(ui, repo, **opts):
1470 def debug_repair_issue6528(ui, repo, **opts):
1470 """find affected revisions and repair them. See issue6528 for more details.
1471 """find affected revisions and repair them. See issue6528 for more details.
1471
1472
1472 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 computation of affected revisions for a given repository across clones.
1474 computation of affected revisions for a given repository across clones.
1474 The report format is line-based (with empty lines ignored):
1475 The report format is line-based (with empty lines ignored):
1475
1476
1476 ```
1477 ```
1477 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 ```
1479 ```
1479
1480
1480 There can be multiple broken revisions per filelog, they are separated by
1481 There can be multiple broken revisions per filelog, they are separated by
1481 a comma with no spaces. The only space is between the revision(s) and the
1482 a comma with no spaces. The only space is between the revision(s) and the
1482 filename.
1483 filename.
1483
1484
1484 Note that this does *not* mean that this repairs future affected revisions,
1485 Note that this does *not* mean that this repairs future affected revisions,
1485 that needs a separate fix at the exchange level that was introduced in
1486 that needs a separate fix at the exchange level that was introduced in
1486 Mercurial 5.9.1.
1487 Mercurial 5.9.1.
1487
1488
1488 There is a `--paranoid` flag to test that the fast implementation is correct
1489 There is a `--paranoid` flag to test that the fast implementation is correct
1489 by checking it against the slow implementation. Since this matter is quite
1490 by checking it against the slow implementation. Since this matter is quite
1490 urgent and testing every edge-case is probably quite costly, we use this
1491 urgent and testing every edge-case is probably quite costly, we use this
1491 method to test on large repositories as a fuzzing method of sorts.
1492 method to test on large repositories as a fuzzing method of sorts.
1492 """
1493 """
1493 cmdutil.check_incompatible_arguments(
1494 cmdutil.check_incompatible_arguments(
1494 opts, 'to_report', ['from_report', 'dry_run']
1495 opts, 'to_report', ['from_report', 'dry_run']
1495 )
1496 )
1496 dry_run = opts.get('dry_run')
1497 dry_run = opts.get('dry_run')
1497 to_report = opts.get('to_report')
1498 to_report = opts.get('to_report')
1498 from_report = opts.get('from_report')
1499 from_report = opts.get('from_report')
1499 paranoid = opts.get('paranoid')
1500 paranoid = opts.get('paranoid')
1500 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 # narrow down the search for users that know what they're looking for?
1502 # narrow down the search for users that know what they're looking for?
1502
1503
1503 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 raise error.Abort(_(msg))
1506 raise error.Abort(_(msg))
1506
1507
1507 rewrite.repair_issue6528(
1508 rewrite.repair_issue6528(
1508 ui,
1509 ui,
1509 repo,
1510 repo,
1510 dry_run=dry_run,
1511 dry_run=dry_run,
1511 to_report=to_report,
1512 to_report=to_report,
1512 from_report=from_report,
1513 from_report=from_report,
1513 paranoid=paranoid,
1514 paranoid=paranoid,
1514 )
1515 )
1515
1516
1516
1517
1517 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 def debugformat(ui, repo, **opts):
1519 def debugformat(ui, repo, **opts):
1519 """display format information about the current repository
1520 """display format information about the current repository
1520
1521
1521 Use --verbose to get extra information about current config value and
1522 Use --verbose to get extra information about current config value and
1522 Mercurial default."""
1523 Mercurial default."""
1523 opts = pycompat.byteskwargs(opts)
1524 opts = pycompat.byteskwargs(opts)
1524 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526
1527
1527 def makeformatname(name):
1528 def makeformatname(name):
1528 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529
1530
1530 fm = ui.formatter(b'debugformat', opts)
1531 fm = ui.formatter(b'debugformat', opts)
1531 if fm.isplain():
1532 if fm.isplain():
1532
1533
1533 def formatvalue(value):
1534 def formatvalue(value):
1534 if util.safehasattr(value, b'startswith'):
1535 if util.safehasattr(value, b'startswith'):
1535 return value
1536 return value
1536 if value:
1537 if value:
1537 return b'yes'
1538 return b'yes'
1538 else:
1539 else:
1539 return b'no'
1540 return b'no'
1540
1541
1541 else:
1542 else:
1542 formatvalue = pycompat.identity
1543 formatvalue = pycompat.identity
1543
1544
1544 fm.plain(b'format-variant')
1545 fm.plain(b'format-variant')
1545 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 fm.plain(b' repo')
1547 fm.plain(b' repo')
1547 if ui.verbose:
1548 if ui.verbose:
1548 fm.plain(b' config default')
1549 fm.plain(b' config default')
1549 fm.plain(b'\n')
1550 fm.plain(b'\n')
1550 for fv in upgrade.allformatvariant:
1551 for fv in upgrade.allformatvariant:
1551 fm.startitem()
1552 fm.startitem()
1552 repovalue = fv.fromrepo(repo)
1553 repovalue = fv.fromrepo(repo)
1553 configvalue = fv.fromconfig(repo)
1554 configvalue = fv.fromconfig(repo)
1554
1555
1555 if repovalue != configvalue:
1556 if repovalue != configvalue:
1556 namelabel = b'formatvariant.name.mismatchconfig'
1557 namelabel = b'formatvariant.name.mismatchconfig'
1557 repolabel = b'formatvariant.repo.mismatchconfig'
1558 repolabel = b'formatvariant.repo.mismatchconfig'
1558 elif repovalue != fv.default:
1559 elif repovalue != fv.default:
1559 namelabel = b'formatvariant.name.mismatchdefault'
1560 namelabel = b'formatvariant.name.mismatchdefault'
1560 repolabel = b'formatvariant.repo.mismatchdefault'
1561 repolabel = b'formatvariant.repo.mismatchdefault'
1561 else:
1562 else:
1562 namelabel = b'formatvariant.name.uptodate'
1563 namelabel = b'formatvariant.name.uptodate'
1563 repolabel = b'formatvariant.repo.uptodate'
1564 repolabel = b'formatvariant.repo.uptodate'
1564
1565
1565 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 if fv.default != configvalue:
1568 if fv.default != configvalue:
1568 configlabel = b'formatvariant.config.special'
1569 configlabel = b'formatvariant.config.special'
1569 else:
1570 else:
1570 configlabel = b'formatvariant.config.default'
1571 configlabel = b'formatvariant.config.default'
1571 fm.condwrite(
1572 fm.condwrite(
1572 ui.verbose,
1573 ui.verbose,
1573 b'config',
1574 b'config',
1574 b' %6s',
1575 b' %6s',
1575 formatvalue(configvalue),
1576 formatvalue(configvalue),
1576 label=configlabel,
1577 label=configlabel,
1577 )
1578 )
1578 fm.condwrite(
1579 fm.condwrite(
1579 ui.verbose,
1580 ui.verbose,
1580 b'default',
1581 b'default',
1581 b' %7s',
1582 b' %7s',
1582 formatvalue(fv.default),
1583 formatvalue(fv.default),
1583 label=b'formatvariant.default',
1584 label=b'formatvariant.default',
1584 )
1585 )
1585 fm.plain(b'\n')
1586 fm.plain(b'\n')
1586 fm.end()
1587 fm.end()
1587
1588
1588
1589
1589 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 def debugfsinfo(ui, path=b"."):
1591 def debugfsinfo(ui, path=b"."):
1591 """show information detected about current filesystem"""
1592 """show information detected about current filesystem"""
1592 ui.writenoi18n(b'path: %s\n' % path)
1593 ui.writenoi18n(b'path: %s\n' % path)
1593 ui.writenoi18n(
1594 ui.writenoi18n(
1594 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 )
1596 )
1596 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 ui.writenoi18n(
1599 ui.writenoi18n(
1599 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 )
1601 )
1601 ui.writenoi18n(
1602 ui.writenoi18n(
1602 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 )
1604 )
1604 casesensitive = b'(unknown)'
1605 casesensitive = b'(unknown)'
1605 try:
1606 try:
1606 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 except OSError:
1609 except OSError:
1609 pass
1610 pass
1610 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611
1612
1612
1613
1613 @command(
1614 @command(
1614 b'debuggetbundle',
1615 b'debuggetbundle',
1615 [
1616 [
1616 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 (
1619 (
1619 b't',
1620 b't',
1620 b'type',
1621 b'type',
1621 b'bzip2',
1622 b'bzip2',
1622 _(b'bundle compression type to use'),
1623 _(b'bundle compression type to use'),
1623 _(b'TYPE'),
1624 _(b'TYPE'),
1624 ),
1625 ),
1625 ],
1626 ],
1626 _(b'REPO FILE [-H|-C ID]...'),
1627 _(b'REPO FILE [-H|-C ID]...'),
1627 norepo=True,
1628 norepo=True,
1628 )
1629 )
1629 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 """retrieves a bundle from a repo
1631 """retrieves a bundle from a repo
1631
1632
1632 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 given file.
1634 given file.
1634 """
1635 """
1635 opts = pycompat.byteskwargs(opts)
1636 opts = pycompat.byteskwargs(opts)
1636 repo = hg.peer(ui, opts, repopath)
1637 repo = hg.peer(ui, opts, repopath)
1637 if not repo.capable(b'getbundle'):
1638 if not repo.capable(b'getbundle'):
1638 raise error.Abort(b"getbundle() not supported by target repository")
1639 raise error.Abort(b"getbundle() not supported by target repository")
1639 args = {}
1640 args = {}
1640 if common:
1641 if common:
1641 args['common'] = [bin(s) for s in common]
1642 args['common'] = [bin(s) for s in common]
1642 if head:
1643 if head:
1643 args['heads'] = [bin(s) for s in head]
1644 args['heads'] = [bin(s) for s in head]
1644 # TODO: get desired bundlecaps from command line.
1645 # TODO: get desired bundlecaps from command line.
1645 args['bundlecaps'] = None
1646 args['bundlecaps'] = None
1646 bundle = repo.getbundle(b'debug', **args)
1647 bundle = repo.getbundle(b'debug', **args)
1647
1648
1648 bundletype = opts.get(b'type', b'bzip2').lower()
1649 bundletype = opts.get(b'type', b'bzip2').lower()
1649 btypes = {
1650 btypes = {
1650 b'none': b'HG10UN',
1651 b'none': b'HG10UN',
1651 b'bzip2': b'HG10BZ',
1652 b'bzip2': b'HG10BZ',
1652 b'gzip': b'HG10GZ',
1653 b'gzip': b'HG10GZ',
1653 b'bundle2': b'HG20',
1654 b'bundle2': b'HG20',
1654 }
1655 }
1655 bundletype = btypes.get(bundletype)
1656 bundletype = btypes.get(bundletype)
1656 if bundletype not in bundle2.bundletypes:
1657 if bundletype not in bundle2.bundletypes:
1657 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659
1660
1660
1661
1661 @command(b'debugignore', [], b'[FILE]')
1662 @command(b'debugignore', [], b'[FILE]')
1662 def debugignore(ui, repo, *files, **opts):
1663 def debugignore(ui, repo, *files, **opts):
1663 """display the combined ignore pattern and information about ignored files
1664 """display the combined ignore pattern and information about ignored files
1664
1665
1665 With no argument display the combined ignore pattern.
1666 With no argument display the combined ignore pattern.
1666
1667
1667 Given space separated file names, shows if the given file is ignored and
1668 Given space separated file names, shows if the given file is ignored and
1668 if so, show the ignore rule (file and line number) that matched it.
1669 if so, show the ignore rule (file and line number) that matched it.
1669 """
1670 """
1670 ignore = repo.dirstate._ignore
1671 ignore = repo.dirstate._ignore
1671 if not files:
1672 if not files:
1672 # Show all the patterns
1673 # Show all the patterns
1673 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 else:
1675 else:
1675 m = scmutil.match(repo[None], pats=files)
1676 m = scmutil.match(repo[None], pats=files)
1676 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 for f in m.files():
1678 for f in m.files():
1678 nf = util.normpath(f)
1679 nf = util.normpath(f)
1679 ignored = None
1680 ignored = None
1680 ignoredata = None
1681 ignoredata = None
1681 if nf != b'.':
1682 if nf != b'.':
1682 if ignore(nf):
1683 if ignore(nf):
1683 ignored = nf
1684 ignored = nf
1684 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 else:
1686 else:
1686 for p in pathutil.finddirs(nf):
1687 for p in pathutil.finddirs(nf):
1687 if ignore(p):
1688 if ignore(p):
1688 ignored = p
1689 ignored = p
1689 ignoredata = repo.dirstate._ignorefileandline(p)
1690 ignoredata = repo.dirstate._ignorefileandline(p)
1690 break
1691 break
1691 if ignored:
1692 if ignored:
1692 if ignored == nf:
1693 if ignored == nf:
1693 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 else:
1695 else:
1695 ui.write(
1696 ui.write(
1696 _(
1697 _(
1697 b"%s is ignored because of "
1698 b"%s is ignored because of "
1698 b"containing directory %s\n"
1699 b"containing directory %s\n"
1699 )
1700 )
1700 % (uipathfn(f), ignored)
1701 % (uipathfn(f), ignored)
1701 )
1702 )
1702 ignorefile, lineno, line = ignoredata
1703 ignorefile, lineno, line = ignoredata
1703 ui.write(
1704 ui.write(
1704 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 % (ignorefile, lineno, line)
1706 % (ignorefile, lineno, line)
1706 )
1707 )
1707 else:
1708 else:
1708 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709
1710
1710
1711
1711 @command(
1712 @command(
1712 b'debugindex',
1713 b'debugindex',
1713 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 _(b'-c|-m|FILE'),
1715 _(b'-c|-m|FILE'),
1715 )
1716 )
1716 def debugindex(ui, repo, file_=None, **opts):
1717 def debugindex(ui, repo, file_=None, **opts):
1717 """dump index data for a storage primitive"""
1718 """dump index data for a storage primitive"""
1718 opts = pycompat.byteskwargs(opts)
1719 opts = pycompat.byteskwargs(opts)
1719 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720
1721
1721 if ui.debugflag:
1722 if ui.debugflag:
1722 shortfn = hex
1723 shortfn = hex
1723 else:
1724 else:
1724 shortfn = short
1725 shortfn = short
1725
1726
1726 idlen = 12
1727 idlen = 12
1727 for i in store:
1728 for i in store:
1728 idlen = len(shortfn(store.node(i)))
1729 idlen = len(shortfn(store.node(i)))
1729 break
1730 break
1730
1731
1731 fm = ui.formatter(b'debugindex', opts)
1732 fm = ui.formatter(b'debugindex', opts)
1732 fm.plain(
1733 fm.plain(
1733 b' rev linkrev %s %s p2\n'
1734 b' rev linkrev %s %s p2\n'
1734 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 )
1736 )
1736
1737
1737 for rev in store:
1738 for rev in store:
1738 node = store.node(rev)
1739 node = store.node(rev)
1739 parents = store.parents(node)
1740 parents = store.parents(node)
1740
1741
1741 fm.startitem()
1742 fm.startitem()
1742 fm.write(b'rev', b'%6d ', rev)
1743 fm.write(b'rev', b'%6d ', rev)
1743 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 fm.write(b'node', b'%s ', shortfn(node))
1745 fm.write(b'node', b'%s ', shortfn(node))
1745 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 fm.plain(b'\n')
1748 fm.plain(b'\n')
1748
1749
1749 fm.end()
1750 fm.end()
1750
1751
1751
1752
1752 @command(
1753 @command(
1753 b'debugindexdot',
1754 b'debugindexdot',
1754 cmdutil.debugrevlogopts,
1755 cmdutil.debugrevlogopts,
1755 _(b'-c|-m|FILE'),
1756 _(b'-c|-m|FILE'),
1756 optionalrepo=True,
1757 optionalrepo=True,
1757 )
1758 )
1758 def debugindexdot(ui, repo, file_=None, **opts):
1759 def debugindexdot(ui, repo, file_=None, **opts):
1759 """dump an index DAG as a graphviz dot file"""
1760 """dump an index DAG as a graphviz dot file"""
1760 opts = pycompat.byteskwargs(opts)
1761 opts = pycompat.byteskwargs(opts)
1761 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 ui.writenoi18n(b"digraph G {\n")
1763 ui.writenoi18n(b"digraph G {\n")
1763 for i in r:
1764 for i in r:
1764 node = r.node(i)
1765 node = r.node(i)
1765 pp = r.parents(node)
1766 pp = r.parents(node)
1766 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 if pp[1] != repo.nullid:
1768 if pp[1] != repo.nullid:
1768 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 ui.write(b"}\n")
1770 ui.write(b"}\n")
1770
1771
1771
1772
1772 @command(b'debugindexstats', [])
1773 @command(b'debugindexstats', [])
1773 def debugindexstats(ui, repo):
1774 def debugindexstats(ui, repo):
1774 """show stats related to the changelog index"""
1775 """show stats related to the changelog index"""
1775 repo.changelog.shortest(repo.nullid, 1)
1776 repo.changelog.shortest(repo.nullid, 1)
1776 index = repo.changelog.index
1777 index = repo.changelog.index
1777 if not util.safehasattr(index, b'stats'):
1778 if not util.safehasattr(index, b'stats'):
1778 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 for k, v in sorted(index.stats().items()):
1780 for k, v in sorted(index.stats().items()):
1780 ui.write(b'%s: %d\n' % (k, v))
1781 ui.write(b'%s: %d\n' % (k, v))
1781
1782
1782
1783
1783 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 def debuginstall(ui, **opts):
1785 def debuginstall(ui, **opts):
1785 """test Mercurial installation
1786 """test Mercurial installation
1786
1787
1787 Returns 0 on success.
1788 Returns 0 on success.
1788 """
1789 """
1789 opts = pycompat.byteskwargs(opts)
1790 opts = pycompat.byteskwargs(opts)
1790
1791
1791 problems = 0
1792 problems = 0
1792
1793
1793 fm = ui.formatter(b'debuginstall', opts)
1794 fm = ui.formatter(b'debuginstall', opts)
1794 fm.startitem()
1795 fm.startitem()
1795
1796
1796 # encoding might be unknown or wrong. don't translate these messages.
1797 # encoding might be unknown or wrong. don't translate these messages.
1797 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 err = None
1799 err = None
1799 try:
1800 try:
1800 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 except LookupError as inst:
1802 except LookupError as inst:
1802 err = stringutil.forcebytestr(inst)
1803 err = stringutil.forcebytestr(inst)
1803 problems += 1
1804 problems += 1
1804 fm.condwrite(
1805 fm.condwrite(
1805 err,
1806 err,
1806 b'encodingerror',
1807 b'encodingerror',
1807 b" %s\n (check that your locale is properly set)\n",
1808 b" %s\n (check that your locale is properly set)\n",
1808 err,
1809 err,
1809 )
1810 )
1810
1811
1811 # Python
1812 # Python
1812 pythonlib = None
1813 pythonlib = None
1813 if util.safehasattr(os, '__file__'):
1814 if util.safehasattr(os, '__file__'):
1814 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 elif getattr(sys, 'oxidized', False):
1816 elif getattr(sys, 'oxidized', False):
1816 pythonlib = pycompat.sysexecutable
1817 pythonlib = pycompat.sysexecutable
1817
1818
1818 fm.write(
1819 fm.write(
1819 b'pythonexe',
1820 b'pythonexe',
1820 _(b"checking Python executable (%s)\n"),
1821 _(b"checking Python executable (%s)\n"),
1821 pycompat.sysexecutable or _(b"unknown"),
1822 pycompat.sysexecutable or _(b"unknown"),
1822 )
1823 )
1823 fm.write(
1824 fm.write(
1824 b'pythonimplementation',
1825 b'pythonimplementation',
1825 _(b"checking Python implementation (%s)\n"),
1826 _(b"checking Python implementation (%s)\n"),
1826 pycompat.sysbytes(platform.python_implementation()),
1827 pycompat.sysbytes(platform.python_implementation()),
1827 )
1828 )
1828 fm.write(
1829 fm.write(
1829 b'pythonver',
1830 b'pythonver',
1830 _(b"checking Python version (%s)\n"),
1831 _(b"checking Python version (%s)\n"),
1831 (b"%d.%d.%d" % sys.version_info[:3]),
1832 (b"%d.%d.%d" % sys.version_info[:3]),
1832 )
1833 )
1833 fm.write(
1834 fm.write(
1834 b'pythonlib',
1835 b'pythonlib',
1835 _(b"checking Python lib (%s)...\n"),
1836 _(b"checking Python lib (%s)...\n"),
1836 pythonlib or _(b"unknown"),
1837 pythonlib or _(b"unknown"),
1837 )
1838 )
1838
1839
1839 try:
1840 try:
1840 from . import rustext # pytype: disable=import-error
1841 from . import rustext # pytype: disable=import-error
1841
1842
1842 rustext.__doc__ # trigger lazy import
1843 rustext.__doc__ # trigger lazy import
1843 except ImportError:
1844 except ImportError:
1844 rustext = None
1845 rustext = None
1845
1846
1846 security = set(sslutil.supportedprotocols)
1847 security = set(sslutil.supportedprotocols)
1847 if sslutil.hassni:
1848 if sslutil.hassni:
1848 security.add(b'sni')
1849 security.add(b'sni')
1849
1850
1850 fm.write(
1851 fm.write(
1851 b'pythonsecurity',
1852 b'pythonsecurity',
1852 _(b"checking Python security support (%s)\n"),
1853 _(b"checking Python security support (%s)\n"),
1853 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 )
1855 )
1855
1856
1856 # These are warnings, not errors. So don't increment problem count. This
1857 # These are warnings, not errors. So don't increment problem count. This
1857 # may change in the future.
1858 # may change in the future.
1858 if b'tls1.2' not in security:
1859 if b'tls1.2' not in security:
1859 fm.plain(
1860 fm.plain(
1860 _(
1861 _(
1861 b' TLS 1.2 not supported by Python install; '
1862 b' TLS 1.2 not supported by Python install; '
1862 b'network connections lack modern security\n'
1863 b'network connections lack modern security\n'
1863 )
1864 )
1864 )
1865 )
1865 if b'sni' not in security:
1866 if b'sni' not in security:
1866 fm.plain(
1867 fm.plain(
1867 _(
1868 _(
1868 b' SNI not supported by Python install; may have '
1869 b' SNI not supported by Python install; may have '
1869 b'connectivity issues with some servers\n'
1870 b'connectivity issues with some servers\n'
1870 )
1871 )
1871 )
1872 )
1872
1873
1873 fm.plain(
1874 fm.plain(
1874 _(
1875 _(
1875 b"checking Rust extensions (%s)\n"
1876 b"checking Rust extensions (%s)\n"
1876 % (b'missing' if rustext is None else b'installed')
1877 % (b'missing' if rustext is None else b'installed')
1877 ),
1878 ),
1878 )
1879 )
1879
1880
1880 # TODO print CA cert info
1881 # TODO print CA cert info
1881
1882
1882 # hg version
1883 # hg version
1883 hgver = util.version()
1884 hgver = util.version()
1884 fm.write(
1885 fm.write(
1885 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 )
1887 )
1887 fm.write(
1888 fm.write(
1888 b'hgverextra',
1889 b'hgverextra',
1889 _(b"checking Mercurial custom build (%s)\n"),
1890 _(b"checking Mercurial custom build (%s)\n"),
1890 b'+'.join(hgver.split(b'+')[1:]),
1891 b'+'.join(hgver.split(b'+')[1:]),
1891 )
1892 )
1892
1893
1893 # compiled modules
1894 # compiled modules
1894 hgmodules = None
1895 hgmodules = None
1895 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 elif getattr(sys, 'oxidized', False):
1898 elif getattr(sys, 'oxidized', False):
1898 hgmodules = pycompat.sysexecutable
1899 hgmodules = pycompat.sysexecutable
1899
1900
1900 fm.write(
1901 fm.write(
1901 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 )
1903 )
1903 fm.write(
1904 fm.write(
1904 b'hgmodules',
1905 b'hgmodules',
1905 _(b"checking installed modules (%s)...\n"),
1906 _(b"checking installed modules (%s)...\n"),
1906 hgmodules or _(b"unknown"),
1907 hgmodules or _(b"unknown"),
1907 )
1908 )
1908
1909
1909 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 rustext = rustandc # for now, that's the only case
1911 rustext = rustandc # for now, that's the only case
1911 cext = policy.policy in (b'c', b'allow') or rustandc
1912 cext = policy.policy in (b'c', b'allow') or rustandc
1912 nopure = cext or rustext
1913 nopure = cext or rustext
1913 if nopure:
1914 if nopure:
1914 err = None
1915 err = None
1915 try:
1916 try:
1916 if cext:
1917 if cext:
1917 from .cext import ( # pytype: disable=import-error
1918 from .cext import ( # pytype: disable=import-error
1918 base85,
1919 base85,
1919 bdiff,
1920 bdiff,
1920 mpatch,
1921 mpatch,
1921 osutil,
1922 osutil,
1922 )
1923 )
1923
1924
1924 # quiet pyflakes
1925 # quiet pyflakes
1925 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 if rustext:
1927 if rustext:
1927 from .rustext import ( # pytype: disable=import-error
1928 from .rustext import ( # pytype: disable=import-error
1928 ancestor,
1929 ancestor,
1929 dirstate,
1930 dirstate,
1930 )
1931 )
1931
1932
1932 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 except Exception as inst:
1934 except Exception as inst:
1934 err = stringutil.forcebytestr(inst)
1935 err = stringutil.forcebytestr(inst)
1935 problems += 1
1936 problems += 1
1936 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937
1938
1938 compengines = util.compengines._engines.values()
1939 compengines = util.compengines._engines.values()
1939 fm.write(
1940 fm.write(
1940 b'compengines',
1941 b'compengines',
1941 _(b'checking registered compression engines (%s)\n'),
1942 _(b'checking registered compression engines (%s)\n'),
1942 fm.formatlist(
1943 fm.formatlist(
1943 sorted(e.name() for e in compengines),
1944 sorted(e.name() for e in compengines),
1944 name=b'compengine',
1945 name=b'compengine',
1945 fmt=b'%s',
1946 fmt=b'%s',
1946 sep=b', ',
1947 sep=b', ',
1947 ),
1948 ),
1948 )
1949 )
1949 fm.write(
1950 fm.write(
1950 b'compenginesavail',
1951 b'compenginesavail',
1951 _(b'checking available compression engines (%s)\n'),
1952 _(b'checking available compression engines (%s)\n'),
1952 fm.formatlist(
1953 fm.formatlist(
1953 sorted(e.name() for e in compengines if e.available()),
1954 sorted(e.name() for e in compengines if e.available()),
1954 name=b'compengine',
1955 name=b'compengine',
1955 fmt=b'%s',
1956 fmt=b'%s',
1956 sep=b', ',
1957 sep=b', ',
1957 ),
1958 ),
1958 )
1959 )
1959 wirecompengines = compression.compengines.supportedwireengines(
1960 wirecompengines = compression.compengines.supportedwireengines(
1960 compression.SERVERROLE
1961 compression.SERVERROLE
1961 )
1962 )
1962 fm.write(
1963 fm.write(
1963 b'compenginesserver',
1964 b'compenginesserver',
1964 _(
1965 _(
1965 b'checking available compression engines '
1966 b'checking available compression engines '
1966 b'for wire protocol (%s)\n'
1967 b'for wire protocol (%s)\n'
1967 ),
1968 ),
1968 fm.formatlist(
1969 fm.formatlist(
1969 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 name=b'compengine',
1971 name=b'compengine',
1971 fmt=b'%s',
1972 fmt=b'%s',
1972 sep=b', ',
1973 sep=b', ',
1973 ),
1974 ),
1974 )
1975 )
1975 re2 = b'missing'
1976 re2 = b'missing'
1976 if util._re2:
1977 if util._re2:
1977 re2 = b'available'
1978 re2 = b'available'
1978 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 fm.data(re2=bool(util._re2))
1980 fm.data(re2=bool(util._re2))
1980
1981
1981 # templates
1982 # templates
1982 p = templater.templatedir()
1983 p = templater.templatedir()
1983 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 if p:
1986 if p:
1986 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 if m:
1988 if m:
1988 # template found, check if it is working
1989 # template found, check if it is working
1989 err = None
1990 err = None
1990 try:
1991 try:
1991 templater.templater.frommapfile(m)
1992 templater.templater.frommapfile(m)
1992 except Exception as inst:
1993 except Exception as inst:
1993 err = stringutil.forcebytestr(inst)
1994 err = stringutil.forcebytestr(inst)
1994 p = None
1995 p = None
1995 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 else:
1997 else:
1997 p = None
1998 p = None
1998 fm.condwrite(
1999 fm.condwrite(
1999 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 )
2001 )
2001 fm.condwrite(
2002 fm.condwrite(
2002 not m,
2003 not m,
2003 b'defaulttemplatenotfound',
2004 b'defaulttemplatenotfound',
2004 _(b" template '%s' not found\n"),
2005 _(b" template '%s' not found\n"),
2005 b"default",
2006 b"default",
2006 )
2007 )
2007 if not p:
2008 if not p:
2008 problems += 1
2009 problems += 1
2009 fm.condwrite(
2010 fm.condwrite(
2010 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 )
2012 )
2012
2013
2013 # editor
2014 # editor
2014 editor = ui.geteditor()
2015 editor = ui.geteditor()
2015 editor = util.expandpath(editor)
2016 editor = util.expandpath(editor)
2016 editorbin = procutil.shellsplit(editor)[0]
2017 editorbin = procutil.shellsplit(editor)[0]
2017 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 cmdpath = procutil.findexe(editorbin)
2019 cmdpath = procutil.findexe(editorbin)
2019 fm.condwrite(
2020 fm.condwrite(
2020 not cmdpath and editor == b'vi',
2021 not cmdpath and editor == b'vi',
2021 b'vinotfound',
2022 b'vinotfound',
2022 _(
2023 _(
2023 b" No commit editor set and can't find %s in PATH\n"
2024 b" No commit editor set and can't find %s in PATH\n"
2024 b" (specify a commit editor in your configuration"
2025 b" (specify a commit editor in your configuration"
2025 b" file)\n"
2026 b" file)\n"
2026 ),
2027 ),
2027 not cmdpath and editor == b'vi' and editorbin,
2028 not cmdpath and editor == b'vi' and editorbin,
2028 )
2029 )
2029 fm.condwrite(
2030 fm.condwrite(
2030 not cmdpath and editor != b'vi',
2031 not cmdpath and editor != b'vi',
2031 b'editornotfound',
2032 b'editornotfound',
2032 _(
2033 _(
2033 b" Can't find editor '%s' in PATH\n"
2034 b" Can't find editor '%s' in PATH\n"
2034 b" (specify a commit editor in your configuration"
2035 b" (specify a commit editor in your configuration"
2035 b" file)\n"
2036 b" file)\n"
2036 ),
2037 ),
2037 not cmdpath and editorbin,
2038 not cmdpath and editorbin,
2038 )
2039 )
2039 if not cmdpath and editor != b'vi':
2040 if not cmdpath and editor != b'vi':
2040 problems += 1
2041 problems += 1
2041
2042
2042 # check username
2043 # check username
2043 username = None
2044 username = None
2044 err = None
2045 err = None
2045 try:
2046 try:
2046 username = ui.username()
2047 username = ui.username()
2047 except error.Abort as e:
2048 except error.Abort as e:
2048 err = e.message
2049 err = e.message
2049 problems += 1
2050 problems += 1
2050
2051
2051 fm.condwrite(
2052 fm.condwrite(
2052 username, b'username', _(b"checking username (%s)\n"), username
2053 username, b'username', _(b"checking username (%s)\n"), username
2053 )
2054 )
2054 fm.condwrite(
2055 fm.condwrite(
2055 err,
2056 err,
2056 b'usernameerror',
2057 b'usernameerror',
2057 _(
2058 _(
2058 b"checking username...\n %s\n"
2059 b"checking username...\n %s\n"
2059 b" (specify a username in your configuration file)\n"
2060 b" (specify a username in your configuration file)\n"
2060 ),
2061 ),
2061 err,
2062 err,
2062 )
2063 )
2063
2064
2064 for name, mod in extensions.extensions():
2065 for name, mod in extensions.extensions():
2065 handler = getattr(mod, 'debuginstall', None)
2066 handler = getattr(mod, 'debuginstall', None)
2066 if handler is not None:
2067 if handler is not None:
2067 problems += handler(ui, fm)
2068 problems += handler(ui, fm)
2068
2069
2069 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 if not problems:
2071 if not problems:
2071 fm.data(problems=problems)
2072 fm.data(problems=problems)
2072 fm.condwrite(
2073 fm.condwrite(
2073 problems,
2074 problems,
2074 b'problems',
2075 b'problems',
2075 _(b"%d problems detected, please check your install!\n"),
2076 _(b"%d problems detected, please check your install!\n"),
2076 problems,
2077 problems,
2077 )
2078 )
2078 fm.end()
2079 fm.end()
2079
2080
2080 return problems
2081 return problems
2081
2082
2082
2083
2083 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 def debugknown(ui, repopath, *ids, **opts):
2085 def debugknown(ui, repopath, *ids, **opts):
2085 """test whether node ids are known to a repo
2086 """test whether node ids are known to a repo
2086
2087
2087 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 and 1s indicating unknown/known.
2089 and 1s indicating unknown/known.
2089 """
2090 """
2090 opts = pycompat.byteskwargs(opts)
2091 opts = pycompat.byteskwargs(opts)
2091 repo = hg.peer(ui, opts, repopath)
2092 repo = hg.peer(ui, opts, repopath)
2092 if not repo.capable(b'known'):
2093 if not repo.capable(b'known'):
2093 raise error.Abort(b"known() not supported by target repository")
2094 raise error.Abort(b"known() not supported by target repository")
2094 flags = repo.known([bin(s) for s in ids])
2095 flags = repo.known([bin(s) for s in ids])
2095 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096
2097
2097
2098
2098 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 def debuglabelcomplete(ui, repo, *args):
2100 def debuglabelcomplete(ui, repo, *args):
2100 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 debugnamecomplete(ui, repo, *args)
2102 debugnamecomplete(ui, repo, *args)
2102
2103
2103
2104
2104 @command(
2105 @command(
2105 b'debuglocks',
2106 b'debuglocks',
2106 [
2107 [
2107 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 (
2109 (
2109 b'W',
2110 b'W',
2110 b'force-free-wlock',
2111 b'force-free-wlock',
2111 None,
2112 None,
2112 _(b'free the working state lock (DANGEROUS)'),
2113 _(b'free the working state lock (DANGEROUS)'),
2113 ),
2114 ),
2114 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 (
2116 (
2116 b'S',
2117 b'S',
2117 b'set-wlock',
2118 b'set-wlock',
2118 None,
2119 None,
2119 _(b'set the working state lock until stopped'),
2120 _(b'set the working state lock until stopped'),
2120 ),
2121 ),
2121 ],
2122 ],
2122 _(b'[OPTION]...'),
2123 _(b'[OPTION]...'),
2123 )
2124 )
2124 def debuglocks(ui, repo, **opts):
2125 def debuglocks(ui, repo, **opts):
2125 """show or modify state of locks
2126 """show or modify state of locks
2126
2127
2127 By default, this command will show which locks are held. This
2128 By default, this command will show which locks are held. This
2128 includes the user and process holding the lock, the amount of time
2129 includes the user and process holding the lock, the amount of time
2129 the lock has been held, and the machine name where the process is
2130 the lock has been held, and the machine name where the process is
2130 running if it's not local.
2131 running if it's not local.
2131
2132
2132 Locks protect the integrity of Mercurial's data, so should be
2133 Locks protect the integrity of Mercurial's data, so should be
2133 treated with care. System crashes or other interruptions may cause
2134 treated with care. System crashes or other interruptions may cause
2134 locks to not be properly released, though Mercurial will usually
2135 locks to not be properly released, though Mercurial will usually
2135 detect and remove such stale locks automatically.
2136 detect and remove such stale locks automatically.
2136
2137
2137 However, detecting stale locks may not always be possible (for
2138 However, detecting stale locks may not always be possible (for
2138 instance, on a shared filesystem). Removing locks may also be
2139 instance, on a shared filesystem). Removing locks may also be
2139 blocked by filesystem permissions.
2140 blocked by filesystem permissions.
2140
2141
2141 Setting a lock will prevent other commands from changing the data.
2142 Setting a lock will prevent other commands from changing the data.
2142 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 The set locks are removed when the command exits.
2144 The set locks are removed when the command exits.
2144
2145
2145 Returns 0 if no locks are held.
2146 Returns 0 if no locks are held.
2146
2147
2147 """
2148 """
2148
2149
2149 if opts.get('force_free_lock'):
2150 if opts.get('force_free_lock'):
2150 repo.svfs.unlink(b'lock')
2151 repo.svfs.unlink(b'lock')
2151 if opts.get('force_free_wlock'):
2152 if opts.get('force_free_wlock'):
2152 repo.vfs.unlink(b'wlock')
2153 repo.vfs.unlink(b'wlock')
2153 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 return 0
2155 return 0
2155
2156
2156 locks = []
2157 locks = []
2157 try:
2158 try:
2158 if opts.get('set_wlock'):
2159 if opts.get('set_wlock'):
2159 try:
2160 try:
2160 locks.append(repo.wlock(False))
2161 locks.append(repo.wlock(False))
2161 except error.LockHeld:
2162 except error.LockHeld:
2162 raise error.Abort(_(b'wlock is already held'))
2163 raise error.Abort(_(b'wlock is already held'))
2163 if opts.get('set_lock'):
2164 if opts.get('set_lock'):
2164 try:
2165 try:
2165 locks.append(repo.lock(False))
2166 locks.append(repo.lock(False))
2166 except error.LockHeld:
2167 except error.LockHeld:
2167 raise error.Abort(_(b'lock is already held'))
2168 raise error.Abort(_(b'lock is already held'))
2168 if len(locks):
2169 if len(locks):
2169 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 return 0
2171 return 0
2171 finally:
2172 finally:
2172 release(*locks)
2173 release(*locks)
2173
2174
2174 now = time.time()
2175 now = time.time()
2175 held = 0
2176 held = 0
2176
2177
2177 def report(vfs, name, method):
2178 def report(vfs, name, method):
2178 # this causes stale locks to get reaped for more accurate reporting
2179 # this causes stale locks to get reaped for more accurate reporting
2179 try:
2180 try:
2180 l = method(False)
2181 l = method(False)
2181 except error.LockHeld:
2182 except error.LockHeld:
2182 l = None
2183 l = None
2183
2184
2184 if l:
2185 if l:
2185 l.release()
2186 l.release()
2186 else:
2187 else:
2187 try:
2188 try:
2188 st = vfs.lstat(name)
2189 st = vfs.lstat(name)
2189 age = now - st[stat.ST_MTIME]
2190 age = now - st[stat.ST_MTIME]
2190 user = util.username(st.st_uid)
2191 user = util.username(st.st_uid)
2191 locker = vfs.readlock(name)
2192 locker = vfs.readlock(name)
2192 if b":" in locker:
2193 if b":" in locker:
2193 host, pid = locker.split(b':')
2194 host, pid = locker.split(b':')
2194 if host == socket.gethostname():
2195 if host == socket.gethostname():
2195 locker = b'user %s, process %s' % (user or b'None', pid)
2196 locker = b'user %s, process %s' % (user or b'None', pid)
2196 else:
2197 else:
2197 locker = b'user %s, process %s, host %s' % (
2198 locker = b'user %s, process %s, host %s' % (
2198 user or b'None',
2199 user or b'None',
2199 pid,
2200 pid,
2200 host,
2201 host,
2201 )
2202 )
2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 return 1
2204 return 1
2204 except OSError as e:
2205 except OSError as e:
2205 if e.errno != errno.ENOENT:
2206 if e.errno != errno.ENOENT:
2206 raise
2207 raise
2207
2208
2208 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 return 0
2210 return 0
2210
2211
2211 held += report(repo.svfs, b"lock", repo.lock)
2212 held += report(repo.svfs, b"lock", repo.lock)
2212 held += report(repo.vfs, b"wlock", repo.wlock)
2213 held += report(repo.vfs, b"wlock", repo.wlock)
2213
2214
2214 return held
2215 return held
2215
2216
2216
2217
2217 @command(
2218 @command(
2218 b'debugmanifestfulltextcache',
2219 b'debugmanifestfulltextcache',
2219 [
2220 [
2220 (b'', b'clear', False, _(b'clear the cache')),
2221 (b'', b'clear', False, _(b'clear the cache')),
2221 (
2222 (
2222 b'a',
2223 b'a',
2223 b'add',
2224 b'add',
2224 [],
2225 [],
2225 _(b'add the given manifest nodes to the cache'),
2226 _(b'add the given manifest nodes to the cache'),
2226 _(b'NODE'),
2227 _(b'NODE'),
2227 ),
2228 ),
2228 ],
2229 ],
2229 b'',
2230 b'',
2230 )
2231 )
2231 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 """show, clear or amend the contents of the manifest fulltext cache"""
2233 """show, clear or amend the contents of the manifest fulltext cache"""
2233
2234
2234 def getcache():
2235 def getcache():
2235 r = repo.manifestlog.getstorage(b'')
2236 r = repo.manifestlog.getstorage(b'')
2236 try:
2237 try:
2237 return r._fulltextcache
2238 return r._fulltextcache
2238 except AttributeError:
2239 except AttributeError:
2239 msg = _(
2240 msg = _(
2240 b"Current revlog implementation doesn't appear to have a "
2241 b"Current revlog implementation doesn't appear to have a "
2241 b"manifest fulltext cache\n"
2242 b"manifest fulltext cache\n"
2242 )
2243 )
2243 raise error.Abort(msg)
2244 raise error.Abort(msg)
2244
2245
2245 if opts.get('clear'):
2246 if opts.get('clear'):
2246 with repo.wlock():
2247 with repo.wlock():
2247 cache = getcache()
2248 cache = getcache()
2248 cache.clear(clear_persisted_data=True)
2249 cache.clear(clear_persisted_data=True)
2249 return
2250 return
2250
2251
2251 if add:
2252 if add:
2252 with repo.wlock():
2253 with repo.wlock():
2253 m = repo.manifestlog
2254 m = repo.manifestlog
2254 store = m.getstorage(b'')
2255 store = m.getstorage(b'')
2255 for n in add:
2256 for n in add:
2256 try:
2257 try:
2257 manifest = m[store.lookup(n)]
2258 manifest = m[store.lookup(n)]
2258 except error.LookupError as e:
2259 except error.LookupError as e:
2259 raise error.Abort(
2260 raise error.Abort(
2260 bytes(e), hint=b"Check your manifest node id"
2261 bytes(e), hint=b"Check your manifest node id"
2261 )
2262 )
2262 manifest.read() # stores revisision in cache too
2263 manifest.read() # stores revisision in cache too
2263 return
2264 return
2264
2265
2265 cache = getcache()
2266 cache = getcache()
2266 if not len(cache):
2267 if not len(cache):
2267 ui.write(_(b'cache empty\n'))
2268 ui.write(_(b'cache empty\n'))
2268 else:
2269 else:
2269 ui.write(
2270 ui.write(
2270 _(
2271 _(
2271 b'cache contains %d manifest entries, in order of most to '
2272 b'cache contains %d manifest entries, in order of most to '
2272 b'least recent:\n'
2273 b'least recent:\n'
2273 )
2274 )
2274 % (len(cache),)
2275 % (len(cache),)
2275 )
2276 )
2276 totalsize = 0
2277 totalsize = 0
2277 for nodeid in cache:
2278 for nodeid in cache:
2278 # Use cache.get to not update the LRU order
2279 # Use cache.get to not update the LRU order
2279 data = cache.peek(nodeid)
2280 data = cache.peek(nodeid)
2280 size = len(data)
2281 size = len(data)
2281 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 ui.write(
2283 ui.write(
2283 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 )
2285 )
2285 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 ui.write(
2287 ui.write(
2287 _(b'total cache data size %s, on-disk %s\n')
2288 _(b'total cache data size %s, on-disk %s\n')
2288 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 )
2290 )
2290
2291
2291
2292
2292 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 def debugmergestate(ui, repo, *args, **opts):
2294 def debugmergestate(ui, repo, *args, **opts):
2294 """print merge state
2295 """print merge state
2295
2296
2296 Use --verbose to print out information about whether v1 or v2 merge state
2297 Use --verbose to print out information about whether v1 or v2 merge state
2297 was chosen."""
2298 was chosen."""
2298
2299
2299 if ui.verbose:
2300 if ui.verbose:
2300 ms = mergestatemod.mergestate(repo)
2301 ms = mergestatemod.mergestate(repo)
2301
2302
2302 # sort so that reasonable information is on top
2303 # sort so that reasonable information is on top
2303 v1records = ms._readrecordsv1()
2304 v1records = ms._readrecordsv1()
2304 v2records = ms._readrecordsv2()
2305 v2records = ms._readrecordsv2()
2305
2306
2306 if not v1records and not v2records:
2307 if not v1records and not v2records:
2307 pass
2308 pass
2308 elif not v2records:
2309 elif not v2records:
2309 ui.writenoi18n(b'no version 2 merge state\n')
2310 ui.writenoi18n(b'no version 2 merge state\n')
2310 elif ms._v1v2match(v1records, v2records):
2311 elif ms._v1v2match(v1records, v2records):
2311 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 else:
2313 else:
2313 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314
2315
2315 opts = pycompat.byteskwargs(opts)
2316 opts = pycompat.byteskwargs(opts)
2316 if not opts[b'template']:
2317 if not opts[b'template']:
2317 opts[b'template'] = (
2318 opts[b'template'] = (
2318 b'{if(commits, "", "no merge state found\n")}'
2319 b'{if(commits, "", "no merge state found\n")}'
2319 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 b'{if(local_path, "'
2322 b'{if(local_path, "'
2322 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 b' other path: {other_path} (node {other_node})\n'
2325 b' other path: {other_path} (node {other_node})\n'
2325 b'")}'
2326 b'")}'
2326 b'{if(rename_side, "'
2327 b'{if(rename_side, "'
2327 b' rename side: {rename_side}\n'
2328 b' rename side: {rename_side}\n'
2328 b' renamed path: {renamed_path}\n'
2329 b' renamed path: {renamed_path}\n'
2329 b'")}'
2330 b'")}'
2330 b'{extras % " extra: {key} = {value}\n"}'
2331 b'{extras % " extra: {key} = {value}\n"}'
2331 b'"}'
2332 b'"}'
2332 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 )
2334 )
2334
2335
2335 ms = mergestatemod.mergestate.read(repo)
2336 ms = mergestatemod.mergestate.read(repo)
2336
2337
2337 fm = ui.formatter(b'debugmergestate', opts)
2338 fm = ui.formatter(b'debugmergestate', opts)
2338 fm.startitem()
2339 fm.startitem()
2339
2340
2340 fm_commits = fm.nested(b'commits')
2341 fm_commits = fm.nested(b'commits')
2341 if ms.active():
2342 if ms.active():
2342 for name, node, label_index in (
2343 for name, node, label_index in (
2343 (b'local', ms.local, 0),
2344 (b'local', ms.local, 0),
2344 (b'other', ms.other, 1),
2345 (b'other', ms.other, 1),
2345 ):
2346 ):
2346 fm_commits.startitem()
2347 fm_commits.startitem()
2347 fm_commits.data(name=name)
2348 fm_commits.data(name=name)
2348 fm_commits.data(node=hex(node))
2349 fm_commits.data(node=hex(node))
2349 if ms._labels and len(ms._labels) > label_index:
2350 if ms._labels and len(ms._labels) > label_index:
2350 fm_commits.data(label=ms._labels[label_index])
2351 fm_commits.data(label=ms._labels[label_index])
2351 fm_commits.end()
2352 fm_commits.end()
2352
2353
2353 fm_files = fm.nested(b'files')
2354 fm_files = fm.nested(b'files')
2354 if ms.active():
2355 if ms.active():
2355 for f in ms:
2356 for f in ms:
2356 fm_files.startitem()
2357 fm_files.startitem()
2357 fm_files.data(path=f)
2358 fm_files.data(path=f)
2358 state = ms._state[f]
2359 state = ms._state[f]
2359 fm_files.data(state=state[0])
2360 fm_files.data(state=state[0])
2360 if state[0] in (
2361 if state[0] in (
2361 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 mergestatemod.MERGE_RECORD_RESOLVED,
2363 mergestatemod.MERGE_RECORD_RESOLVED,
2363 ):
2364 ):
2364 fm_files.data(local_key=state[1])
2365 fm_files.data(local_key=state[1])
2365 fm_files.data(local_path=state[2])
2366 fm_files.data(local_path=state[2])
2366 fm_files.data(ancestor_path=state[3])
2367 fm_files.data(ancestor_path=state[3])
2367 fm_files.data(ancestor_node=state[4])
2368 fm_files.data(ancestor_node=state[4])
2368 fm_files.data(other_path=state[5])
2369 fm_files.data(other_path=state[5])
2369 fm_files.data(other_node=state[6])
2370 fm_files.data(other_node=state[6])
2370 fm_files.data(local_flags=state[7])
2371 fm_files.data(local_flags=state[7])
2371 elif state[0] in (
2372 elif state[0] in (
2372 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 ):
2375 ):
2375 fm_files.data(renamed_path=state[1])
2376 fm_files.data(renamed_path=state[1])
2376 fm_files.data(rename_side=state[2])
2377 fm_files.data(rename_side=state[2])
2377 fm_extras = fm_files.nested(b'extras')
2378 fm_extras = fm_files.nested(b'extras')
2378 for k, v in sorted(ms.extras(f).items()):
2379 for k, v in sorted(ms.extras(f).items()):
2379 fm_extras.startitem()
2380 fm_extras.startitem()
2380 fm_extras.data(key=k)
2381 fm_extras.data(key=k)
2381 fm_extras.data(value=v)
2382 fm_extras.data(value=v)
2382 fm_extras.end()
2383 fm_extras.end()
2383
2384
2384 fm_files.end()
2385 fm_files.end()
2385
2386
2386 fm_extras = fm.nested(b'extras')
2387 fm_extras = fm.nested(b'extras')
2387 for f, d in sorted(ms.allextras().items()):
2388 for f, d in sorted(ms.allextras().items()):
2388 if f in ms:
2389 if f in ms:
2389 # If file is in mergestate, we have already processed it's extras
2390 # If file is in mergestate, we have already processed it's extras
2390 continue
2391 continue
2391 for k, v in d.items():
2392 for k, v in d.items():
2392 fm_extras.startitem()
2393 fm_extras.startitem()
2393 fm_extras.data(file=f)
2394 fm_extras.data(file=f)
2394 fm_extras.data(key=k)
2395 fm_extras.data(key=k)
2395 fm_extras.data(value=v)
2396 fm_extras.data(value=v)
2396 fm_extras.end()
2397 fm_extras.end()
2397
2398
2398 fm.end()
2399 fm.end()
2399
2400
2400
2401
2401 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 def debugnamecomplete(ui, repo, *args):
2403 def debugnamecomplete(ui, repo, *args):
2403 '''complete "names" - tags, open branch names, bookmark names'''
2404 '''complete "names" - tags, open branch names, bookmark names'''
2404
2405
2405 names = set()
2406 names = set()
2406 # since we previously only listed open branches, we will handle that
2407 # since we previously only listed open branches, we will handle that
2407 # specially (after this for loop)
2408 # specially (after this for loop)
2408 for name, ns in repo.names.items():
2409 for name, ns in repo.names.items():
2409 if name != b'branches':
2410 if name != b'branches':
2410 names.update(ns.listnames(repo))
2411 names.update(ns.listnames(repo))
2411 names.update(
2412 names.update(
2412 tag
2413 tag
2413 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 if not closed
2415 if not closed
2415 )
2416 )
2416 completions = set()
2417 completions = set()
2417 if not args:
2418 if not args:
2418 args = [b'']
2419 args = [b'']
2419 for a in args:
2420 for a in args:
2420 completions.update(n for n in names if n.startswith(a))
2421 completions.update(n for n in names if n.startswith(a))
2421 ui.write(b'\n'.join(sorted(completions)))
2422 ui.write(b'\n'.join(sorted(completions)))
2422 ui.write(b'\n')
2423 ui.write(b'\n')
2423
2424
2424
2425
2425 @command(
2426 @command(
2426 b'debugnodemap',
2427 b'debugnodemap',
2427 [
2428 [
2428 (
2429 (
2429 b'',
2430 b'',
2430 b'dump-new',
2431 b'dump-new',
2431 False,
2432 False,
2432 _(b'write a (new) persistent binary nodemap on stdout'),
2433 _(b'write a (new) persistent binary nodemap on stdout'),
2433 ),
2434 ),
2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 (
2436 (
2436 b'',
2437 b'',
2437 b'check',
2438 b'check',
2438 False,
2439 False,
2439 _(b'check that the data on disk data are correct.'),
2440 _(b'check that the data on disk data are correct.'),
2440 ),
2441 ),
2441 (
2442 (
2442 b'',
2443 b'',
2443 b'metadata',
2444 b'metadata',
2444 False,
2445 False,
2445 _(b'display the on disk meta data for the nodemap'),
2446 _(b'display the on disk meta data for the nodemap'),
2446 ),
2447 ),
2447 ],
2448 ],
2448 )
2449 )
2449 def debugnodemap(ui, repo, **opts):
2450 def debugnodemap(ui, repo, **opts):
2450 """write and inspect on disk nodemap"""
2451 """write and inspect on disk nodemap"""
2451 if opts['dump_new']:
2452 if opts['dump_new']:
2452 unfi = repo.unfiltered()
2453 unfi = repo.unfiltered()
2453 cl = unfi.changelog
2454 cl = unfi.changelog
2454 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 data = cl.index.nodemap_data_all()
2456 data = cl.index.nodemap_data_all()
2456 else:
2457 else:
2457 data = nodemap.persistent_data(cl.index)
2458 data = nodemap.persistent_data(cl.index)
2458 ui.write(data)
2459 ui.write(data)
2459 elif opts['dump_disk']:
2460 elif opts['dump_disk']:
2460 unfi = repo.unfiltered()
2461 unfi = repo.unfiltered()
2461 cl = unfi.changelog
2462 cl = unfi.changelog
2462 nm_data = nodemap.persisted_data(cl)
2463 nm_data = nodemap.persisted_data(cl)
2463 if nm_data is not None:
2464 if nm_data is not None:
2464 docket, data = nm_data
2465 docket, data = nm_data
2465 ui.write(data[:])
2466 ui.write(data[:])
2466 elif opts['check']:
2467 elif opts['check']:
2467 unfi = repo.unfiltered()
2468 unfi = repo.unfiltered()
2468 cl = unfi.changelog
2469 cl = unfi.changelog
2469 nm_data = nodemap.persisted_data(cl)
2470 nm_data = nodemap.persisted_data(cl)
2470 if nm_data is not None:
2471 if nm_data is not None:
2471 docket, data = nm_data
2472 docket, data = nm_data
2472 return nodemap.check_data(ui, cl.index, data)
2473 return nodemap.check_data(ui, cl.index, data)
2473 elif opts['metadata']:
2474 elif opts['metadata']:
2474 unfi = repo.unfiltered()
2475 unfi = repo.unfiltered()
2475 cl = unfi.changelog
2476 cl = unfi.changelog
2476 nm_data = nodemap.persisted_data(cl)
2477 nm_data = nodemap.persisted_data(cl)
2477 if nm_data is not None:
2478 if nm_data is not None:
2478 docket, data = nm_data
2479 docket, data = nm_data
2479 ui.write((b"uid: %s\n") % docket.uid)
2480 ui.write((b"uid: %s\n") % docket.uid)
2480 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 ui.write((b"data-length: %d\n") % docket.data_length)
2483 ui.write((b"data-length: %d\n") % docket.data_length)
2483 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486
2487
2487
2488
2488 @command(
2489 @command(
2489 b'debugobsolete',
2490 b'debugobsolete',
2490 [
2491 [
2491 (b'', b'flags', 0, _(b'markers flag')),
2492 (b'', b'flags', 0, _(b'markers flag')),
2492 (
2493 (
2493 b'',
2494 b'',
2494 b'record-parents',
2495 b'record-parents',
2495 False,
2496 False,
2496 _(b'record parent information for the precursor'),
2497 _(b'record parent information for the precursor'),
2497 ),
2498 ),
2498 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 (
2500 (
2500 b'',
2501 b'',
2501 b'exclusive',
2502 b'exclusive',
2502 False,
2503 False,
2503 _(b'restrict display to markers only relevant to REV'),
2504 _(b'restrict display to markers only relevant to REV'),
2504 ),
2505 ),
2505 (b'', b'index', False, _(b'display index of the marker')),
2506 (b'', b'index', False, _(b'display index of the marker')),
2506 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 ]
2508 ]
2508 + cmdutil.commitopts2
2509 + cmdutil.commitopts2
2509 + cmdutil.formatteropts,
2510 + cmdutil.formatteropts,
2510 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 )
2512 )
2512 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 """create arbitrary obsolete marker
2514 """create arbitrary obsolete marker
2514
2515
2515 With no arguments, displays the list of obsolescence markers."""
2516 With no arguments, displays the list of obsolescence markers."""
2516
2517
2517 opts = pycompat.byteskwargs(opts)
2518 opts = pycompat.byteskwargs(opts)
2518
2519
2519 def parsenodeid(s):
2520 def parsenodeid(s):
2520 try:
2521 try:
2521 # We do not use revsingle/revrange functions here to accept
2522 # We do not use revsingle/revrange functions here to accept
2522 # arbitrary node identifiers, possibly not present in the
2523 # arbitrary node identifiers, possibly not present in the
2523 # local repository.
2524 # local repository.
2524 n = bin(s)
2525 n = bin(s)
2525 if len(n) != repo.nodeconstants.nodelen:
2526 if len(n) != repo.nodeconstants.nodelen:
2526 raise TypeError()
2527 raise TypeError()
2527 return n
2528 return n
2528 except TypeError:
2529 except TypeError:
2529 raise error.InputError(
2530 raise error.InputError(
2530 b'changeset references must be full hexadecimal '
2531 b'changeset references must be full hexadecimal '
2531 b'node identifiers'
2532 b'node identifiers'
2532 )
2533 )
2533
2534
2534 if opts.get(b'delete'):
2535 if opts.get(b'delete'):
2535 indices = []
2536 indices = []
2536 for v in opts.get(b'delete'):
2537 for v in opts.get(b'delete'):
2537 try:
2538 try:
2538 indices.append(int(v))
2539 indices.append(int(v))
2539 except ValueError:
2540 except ValueError:
2540 raise error.InputError(
2541 raise error.InputError(
2541 _(b'invalid index value: %r') % v,
2542 _(b'invalid index value: %r') % v,
2542 hint=_(b'use integers for indices'),
2543 hint=_(b'use integers for indices'),
2543 )
2544 )
2544
2545
2545 if repo.currenttransaction():
2546 if repo.currenttransaction():
2546 raise error.Abort(
2547 raise error.Abort(
2547 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 )
2549 )
2549
2550
2550 with repo.lock():
2551 with repo.lock():
2551 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553
2554
2554 return
2555 return
2555
2556
2556 if precursor is not None:
2557 if precursor is not None:
2557 if opts[b'rev']:
2558 if opts[b'rev']:
2558 raise error.InputError(
2559 raise error.InputError(
2559 b'cannot select revision when creating marker'
2560 b'cannot select revision when creating marker'
2560 )
2561 )
2561 metadata = {}
2562 metadata = {}
2562 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 succs = tuple(parsenodeid(succ) for succ in successors)
2564 succs = tuple(parsenodeid(succ) for succ in successors)
2564 l = repo.lock()
2565 l = repo.lock()
2565 try:
2566 try:
2566 tr = repo.transaction(b'debugobsolete')
2567 tr = repo.transaction(b'debugobsolete')
2567 try:
2568 try:
2568 date = opts.get(b'date')
2569 date = opts.get(b'date')
2569 if date:
2570 if date:
2570 date = dateutil.parsedate(date)
2571 date = dateutil.parsedate(date)
2571 else:
2572 else:
2572 date = None
2573 date = None
2573 prec = parsenodeid(precursor)
2574 prec = parsenodeid(precursor)
2574 parents = None
2575 parents = None
2575 if opts[b'record_parents']:
2576 if opts[b'record_parents']:
2576 if prec not in repo.unfiltered():
2577 if prec not in repo.unfiltered():
2577 raise error.Abort(
2578 raise error.Abort(
2578 b'cannot used --record-parents on '
2579 b'cannot used --record-parents on '
2579 b'unknown changesets'
2580 b'unknown changesets'
2580 )
2581 )
2581 parents = repo.unfiltered()[prec].parents()
2582 parents = repo.unfiltered()[prec].parents()
2582 parents = tuple(p.node() for p in parents)
2583 parents = tuple(p.node() for p in parents)
2583 repo.obsstore.create(
2584 repo.obsstore.create(
2584 tr,
2585 tr,
2585 prec,
2586 prec,
2586 succs,
2587 succs,
2587 opts[b'flags'],
2588 opts[b'flags'],
2588 parents=parents,
2589 parents=parents,
2589 date=date,
2590 date=date,
2590 metadata=metadata,
2591 metadata=metadata,
2591 ui=ui,
2592 ui=ui,
2592 )
2593 )
2593 tr.close()
2594 tr.close()
2594 except ValueError as exc:
2595 except ValueError as exc:
2595 raise error.Abort(
2596 raise error.Abort(
2596 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 )
2598 )
2598 finally:
2599 finally:
2599 tr.release()
2600 tr.release()
2600 finally:
2601 finally:
2601 l.release()
2602 l.release()
2602 else:
2603 else:
2603 if opts[b'rev']:
2604 if opts[b'rev']:
2604 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 nodes = [repo[r].node() for r in revs]
2606 nodes = [repo[r].node() for r in revs]
2606 markers = list(
2607 markers = list(
2607 obsutil.getmarkers(
2608 obsutil.getmarkers(
2608 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 )
2610 )
2610 )
2611 )
2611 markers.sort(key=lambda x: x._data)
2612 markers.sort(key=lambda x: x._data)
2612 else:
2613 else:
2613 markers = obsutil.getmarkers(repo)
2614 markers = obsutil.getmarkers(repo)
2614
2615
2615 markerstoiter = markers
2616 markerstoiter = markers
2616 isrelevant = lambda m: True
2617 isrelevant = lambda m: True
2617 if opts.get(b'rev') and opts.get(b'index'):
2618 if opts.get(b'rev') and opts.get(b'index'):
2618 markerstoiter = obsutil.getmarkers(repo)
2619 markerstoiter = obsutil.getmarkers(repo)
2619 markerset = set(markers)
2620 markerset = set(markers)
2620 isrelevant = lambda m: m in markerset
2621 isrelevant = lambda m: m in markerset
2621
2622
2622 fm = ui.formatter(b'debugobsolete', opts)
2623 fm = ui.formatter(b'debugobsolete', opts)
2623 for i, m in enumerate(markerstoiter):
2624 for i, m in enumerate(markerstoiter):
2624 if not isrelevant(m):
2625 if not isrelevant(m):
2625 # marker can be irrelevant when we're iterating over a set
2626 # marker can be irrelevant when we're iterating over a set
2626 # of markers (markerstoiter) which is bigger than the set
2627 # of markers (markerstoiter) which is bigger than the set
2627 # of markers we want to display (markers)
2628 # of markers we want to display (markers)
2628 # this can happen if both --index and --rev options are
2629 # this can happen if both --index and --rev options are
2629 # provided and thus we need to iterate over all of the markers
2630 # provided and thus we need to iterate over all of the markers
2630 # to get the correct indices, but only display the ones that
2631 # to get the correct indices, but only display the ones that
2631 # are relevant to --rev value
2632 # are relevant to --rev value
2632 continue
2633 continue
2633 fm.startitem()
2634 fm.startitem()
2634 ind = i if opts.get(b'index') else None
2635 ind = i if opts.get(b'index') else None
2635 cmdutil.showmarker(fm, m, index=ind)
2636 cmdutil.showmarker(fm, m, index=ind)
2636 fm.end()
2637 fm.end()
2637
2638
2638
2639
2639 @command(
2640 @command(
2640 b'debugp1copies',
2641 b'debugp1copies',
2641 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 _(b'[-r REV]'),
2643 _(b'[-r REV]'),
2643 )
2644 )
2644 def debugp1copies(ui, repo, **opts):
2645 def debugp1copies(ui, repo, **opts):
2645 """dump copy information compared to p1"""
2646 """dump copy information compared to p1"""
2646
2647
2647 opts = pycompat.byteskwargs(opts)
2648 opts = pycompat.byteskwargs(opts)
2648 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 for dst, src in ctx.p1copies().items():
2650 for dst, src in ctx.p1copies().items():
2650 ui.write(b'%s -> %s\n' % (src, dst))
2651 ui.write(b'%s -> %s\n' % (src, dst))
2651
2652
2652
2653
2653 @command(
2654 @command(
2654 b'debugp2copies',
2655 b'debugp2copies',
2655 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 _(b'[-r REV]'),
2657 _(b'[-r REV]'),
2657 )
2658 )
2658 def debugp1copies(ui, repo, **opts):
2659 def debugp1copies(ui, repo, **opts):
2659 """dump copy information compared to p2"""
2660 """dump copy information compared to p2"""
2660
2661
2661 opts = pycompat.byteskwargs(opts)
2662 opts = pycompat.byteskwargs(opts)
2662 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 for dst, src in ctx.p2copies().items():
2664 for dst, src in ctx.p2copies().items():
2664 ui.write(b'%s -> %s\n' % (src, dst))
2665 ui.write(b'%s -> %s\n' % (src, dst))
2665
2666
2666
2667
2667 @command(
2668 @command(
2668 b'debugpathcomplete',
2669 b'debugpathcomplete',
2669 [
2670 [
2670 (b'f', b'full', None, _(b'complete an entire path')),
2671 (b'f', b'full', None, _(b'complete an entire path')),
2671 (b'n', b'normal', None, _(b'show only normal files')),
2672 (b'n', b'normal', None, _(b'show only normal files')),
2672 (b'a', b'added', None, _(b'show only added files')),
2673 (b'a', b'added', None, _(b'show only added files')),
2673 (b'r', b'removed', None, _(b'show only removed files')),
2674 (b'r', b'removed', None, _(b'show only removed files')),
2674 ],
2675 ],
2675 _(b'FILESPEC...'),
2676 _(b'FILESPEC...'),
2676 )
2677 )
2677 def debugpathcomplete(ui, repo, *specs, **opts):
2678 def debugpathcomplete(ui, repo, *specs, **opts):
2678 """complete part or all of a tracked path
2679 """complete part or all of a tracked path
2679
2680
2680 This command supports shells that offer path name completion. It
2681 This command supports shells that offer path name completion. It
2681 currently completes only files already known to the dirstate.
2682 currently completes only files already known to the dirstate.
2682
2683
2683 Completion extends only to the next path segment unless
2684 Completion extends only to the next path segment unless
2684 --full is specified, in which case entire paths are used."""
2685 --full is specified, in which case entire paths are used."""
2685
2686
2686 def complete(path, acceptable):
2687 def complete(path, acceptable):
2687 dirstate = repo.dirstate
2688 dirstate = repo.dirstate
2688 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 rootdir = repo.root + pycompat.ossep
2690 rootdir = repo.root + pycompat.ossep
2690 if spec != repo.root and not spec.startswith(rootdir):
2691 if spec != repo.root and not spec.startswith(rootdir):
2691 return [], []
2692 return [], []
2692 if os.path.isdir(spec):
2693 if os.path.isdir(spec):
2693 spec += b'/'
2694 spec += b'/'
2694 spec = spec[len(rootdir) :]
2695 spec = spec[len(rootdir) :]
2695 fixpaths = pycompat.ossep != b'/'
2696 fixpaths = pycompat.ossep != b'/'
2696 if fixpaths:
2697 if fixpaths:
2697 spec = spec.replace(pycompat.ossep, b'/')
2698 spec = spec.replace(pycompat.ossep, b'/')
2698 speclen = len(spec)
2699 speclen = len(spec)
2699 fullpaths = opts['full']
2700 fullpaths = opts['full']
2700 files, dirs = set(), set()
2701 files, dirs = set(), set()
2701 adddir, addfile = dirs.add, files.add
2702 adddir, addfile = dirs.add, files.add
2702 for f, st in dirstate.items():
2703 for f, st in dirstate.items():
2703 if f.startswith(spec) and st.state in acceptable:
2704 if f.startswith(spec) and st.state in acceptable:
2704 if fixpaths:
2705 if fixpaths:
2705 f = f.replace(b'/', pycompat.ossep)
2706 f = f.replace(b'/', pycompat.ossep)
2706 if fullpaths:
2707 if fullpaths:
2707 addfile(f)
2708 addfile(f)
2708 continue
2709 continue
2709 s = f.find(pycompat.ossep, speclen)
2710 s = f.find(pycompat.ossep, speclen)
2710 if s >= 0:
2711 if s >= 0:
2711 adddir(f[:s])
2712 adddir(f[:s])
2712 else:
2713 else:
2713 addfile(f)
2714 addfile(f)
2714 return files, dirs
2715 return files, dirs
2715
2716
2716 acceptable = b''
2717 acceptable = b''
2717 if opts['normal']:
2718 if opts['normal']:
2718 acceptable += b'nm'
2719 acceptable += b'nm'
2719 if opts['added']:
2720 if opts['added']:
2720 acceptable += b'a'
2721 acceptable += b'a'
2721 if opts['removed']:
2722 if opts['removed']:
2722 acceptable += b'r'
2723 acceptable += b'r'
2723 cwd = repo.getcwd()
2724 cwd = repo.getcwd()
2724 if not specs:
2725 if not specs:
2725 specs = [b'.']
2726 specs = [b'.']
2726
2727
2727 files, dirs = set(), set()
2728 files, dirs = set(), set()
2728 for spec in specs:
2729 for spec in specs:
2729 f, d = complete(spec, acceptable or b'nmar')
2730 f, d = complete(spec, acceptable or b'nmar')
2730 files.update(f)
2731 files.update(f)
2731 dirs.update(d)
2732 dirs.update(d)
2732 files.update(dirs)
2733 files.update(dirs)
2733 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 ui.write(b'\n')
2735 ui.write(b'\n')
2735
2736
2736
2737
2737 @command(
2738 @command(
2738 b'debugpathcopies',
2739 b'debugpathcopies',
2739 cmdutil.walkopts,
2740 cmdutil.walkopts,
2740 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 inferrepo=True,
2742 inferrepo=True,
2742 )
2743 )
2743 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 """show copies between two revisions"""
2745 """show copies between two revisions"""
2745 ctx1 = scmutil.revsingle(repo, rev1)
2746 ctx1 = scmutil.revsingle(repo, rev1)
2746 ctx2 = scmutil.revsingle(repo, rev2)
2747 ctx2 = scmutil.revsingle(repo, rev2)
2747 m = scmutil.match(ctx1, pats, opts)
2748 m = scmutil.match(ctx1, pats, opts)
2748 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 ui.write(b'%s -> %s\n' % (src, dst))
2750 ui.write(b'%s -> %s\n' % (src, dst))
2750
2751
2751
2752
2752 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 def debugpeer(ui, path):
2754 def debugpeer(ui, path):
2754 """establish a connection to a peer repository"""
2755 """establish a connection to a peer repository"""
2755 # Always enable peer request logging. Requires --debug to display
2756 # Always enable peer request logging. Requires --debug to display
2756 # though.
2757 # though.
2757 overrides = {
2758 overrides = {
2758 (b'devel', b'debug.peer-request'): True,
2759 (b'devel', b'debug.peer-request'): True,
2759 }
2760 }
2760
2761
2761 with ui.configoverride(overrides):
2762 with ui.configoverride(overrides):
2762 peer = hg.peer(ui, {}, path)
2763 peer = hg.peer(ui, {}, path)
2763
2764
2764 try:
2765 try:
2765 local = peer.local() is not None
2766 local = peer.local() is not None
2766 canpush = peer.canpush()
2767 canpush = peer.canpush()
2767
2768
2768 ui.write(_(b'url: %s\n') % peer.url())
2769 ui.write(_(b'url: %s\n') % peer.url())
2769 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 ui.write(
2771 ui.write(
2771 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 )
2773 )
2773 finally:
2774 finally:
2774 peer.close()
2775 peer.close()
2775
2776
2776
2777
2777 @command(
2778 @command(
2778 b'debugpickmergetool',
2779 b'debugpickmergetool',
2779 [
2780 [
2780 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 ]
2783 ]
2783 + cmdutil.walkopts
2784 + cmdutil.walkopts
2784 + cmdutil.mergetoolopts,
2785 + cmdutil.mergetoolopts,
2785 _(b'[PATTERN]...'),
2786 _(b'[PATTERN]...'),
2786 inferrepo=True,
2787 inferrepo=True,
2787 )
2788 )
2788 def debugpickmergetool(ui, repo, *pats, **opts):
2789 def debugpickmergetool(ui, repo, *pats, **opts):
2789 """examine which merge tool is chosen for specified file
2790 """examine which merge tool is chosen for specified file
2790
2791
2791 As described in :hg:`help merge-tools`, Mercurial examines
2792 As described in :hg:`help merge-tools`, Mercurial examines
2792 configurations below in this order to decide which merge tool is
2793 configurations below in this order to decide which merge tool is
2793 chosen for specified file.
2794 chosen for specified file.
2794
2795
2795 1. ``--tool`` option
2796 1. ``--tool`` option
2796 2. ``HGMERGE`` environment variable
2797 2. ``HGMERGE`` environment variable
2797 3. configurations in ``merge-patterns`` section
2798 3. configurations in ``merge-patterns`` section
2798 4. configuration of ``ui.merge``
2799 4. configuration of ``ui.merge``
2799 5. configurations in ``merge-tools`` section
2800 5. configurations in ``merge-tools`` section
2800 6. ``hgmerge`` tool (for historical reason only)
2801 6. ``hgmerge`` tool (for historical reason only)
2801 7. default tool for fallback (``:merge`` or ``:prompt``)
2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2802
2803
2803 This command writes out examination result in the style below::
2804 This command writes out examination result in the style below::
2804
2805
2805 FILE = MERGETOOL
2806 FILE = MERGETOOL
2806
2807
2807 By default, all files known in the first parent context of the
2808 By default, all files known in the first parent context of the
2808 working directory are examined. Use file patterns and/or -I/-X
2809 working directory are examined. Use file patterns and/or -I/-X
2809 options to limit target files. -r/--rev is also useful to examine
2810 options to limit target files. -r/--rev is also useful to examine
2810 files in another context without actual updating to it.
2811 files in another context without actual updating to it.
2811
2812
2812 With --debug, this command shows warning messages while matching
2813 With --debug, this command shows warning messages while matching
2813 against ``merge-patterns`` and so on, too. It is recommended to
2814 against ``merge-patterns`` and so on, too. It is recommended to
2814 use this option with explicit file patterns and/or -I/-X options,
2815 use this option with explicit file patterns and/or -I/-X options,
2815 because this option increases amount of output per file according
2816 because this option increases amount of output per file according
2816 to configurations in hgrc.
2817 to configurations in hgrc.
2817
2818
2818 With -v/--verbose, this command shows configurations below at
2819 With -v/--verbose, this command shows configurations below at
2819 first (only if specified).
2820 first (only if specified).
2820
2821
2821 - ``--tool`` option
2822 - ``--tool`` option
2822 - ``HGMERGE`` environment variable
2823 - ``HGMERGE`` environment variable
2823 - configuration of ``ui.merge``
2824 - configuration of ``ui.merge``
2824
2825
2825 If merge tool is chosen before matching against
2826 If merge tool is chosen before matching against
2826 ``merge-patterns``, this command can't show any helpful
2827 ``merge-patterns``, this command can't show any helpful
2827 information, even with --debug. In such case, information above is
2828 information, even with --debug. In such case, information above is
2828 useful to know why a merge tool is chosen.
2829 useful to know why a merge tool is chosen.
2829 """
2830 """
2830 opts = pycompat.byteskwargs(opts)
2831 opts = pycompat.byteskwargs(opts)
2831 overrides = {}
2832 overrides = {}
2832 if opts[b'tool']:
2833 if opts[b'tool']:
2833 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835
2836
2836 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 hgmerge = encoding.environ.get(b"HGMERGE")
2838 hgmerge = encoding.environ.get(b"HGMERGE")
2838 if hgmerge is not None:
2839 if hgmerge is not None:
2839 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 uimerge = ui.config(b"ui", b"merge")
2841 uimerge = ui.config(b"ui", b"merge")
2841 if uimerge:
2842 if uimerge:
2842 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843
2844
2844 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 m = scmutil.match(ctx, pats, opts)
2846 m = scmutil.match(ctx, pats, opts)
2846 changedelete = opts[b'changedelete']
2847 changedelete = opts[b'changedelete']
2847 for path in ctx.walk(m):
2848 for path in ctx.walk(m):
2848 fctx = ctx[path]
2849 fctx = ctx[path]
2849 with ui.silent(
2850 with ui.silent(
2850 error=True
2851 error=True
2851 ) if not ui.debugflag else util.nullcontextmanager():
2852 ) if not ui.debugflag else util.nullcontextmanager():
2852 tool, toolpath = filemerge._picktool(
2853 tool, toolpath = filemerge._picktool(
2853 repo,
2854 repo,
2854 ui,
2855 ui,
2855 path,
2856 path,
2856 fctx.isbinary(),
2857 fctx.isbinary(),
2857 b'l' in fctx.flags(),
2858 b'l' in fctx.flags(),
2858 changedelete,
2859 changedelete,
2859 )
2860 )
2860 ui.write(b'%s = %s\n' % (path, tool))
2861 ui.write(b'%s = %s\n' % (path, tool))
2861
2862
2862
2863
2863 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 """access the pushkey key/value protocol
2866 """access the pushkey key/value protocol
2866
2867
2867 With two args, list the keys in the given namespace.
2868 With two args, list the keys in the given namespace.
2868
2869
2869 With five args, set a key to new if it currently is set to old.
2870 With five args, set a key to new if it currently is set to old.
2870 Reports success or failure.
2871 Reports success or failure.
2871 """
2872 """
2872
2873
2873 target = hg.peer(ui, {}, repopath)
2874 target = hg.peer(ui, {}, repopath)
2874 try:
2875 try:
2875 if keyinfo:
2876 if keyinfo:
2876 key, old, new = keyinfo
2877 key, old, new = keyinfo
2877 with target.commandexecutor() as e:
2878 with target.commandexecutor() as e:
2878 r = e.callcommand(
2879 r = e.callcommand(
2879 b'pushkey',
2880 b'pushkey',
2880 {
2881 {
2881 b'namespace': namespace,
2882 b'namespace': namespace,
2882 b'key': key,
2883 b'key': key,
2883 b'old': old,
2884 b'old': old,
2884 b'new': new,
2885 b'new': new,
2885 },
2886 },
2886 ).result()
2887 ).result()
2887
2888
2888 ui.status(pycompat.bytestr(r) + b'\n')
2889 ui.status(pycompat.bytestr(r) + b'\n')
2889 return not r
2890 return not r
2890 else:
2891 else:
2891 for k, v in sorted(target.listkeys(namespace).items()):
2892 for k, v in sorted(target.listkeys(namespace).items()):
2892 ui.write(
2893 ui.write(
2893 b"%s\t%s\n"
2894 b"%s\t%s\n"
2894 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 )
2896 )
2896 finally:
2897 finally:
2897 target.close()
2898 target.close()
2898
2899
2899
2900
2900 @command(b'debugpvec', [], _(b'A B'))
2901 @command(b'debugpvec', [], _(b'A B'))
2901 def debugpvec(ui, repo, a, b=None):
2902 def debugpvec(ui, repo, a, b=None):
2902 ca = scmutil.revsingle(repo, a)
2903 ca = scmutil.revsingle(repo, a)
2903 cb = scmutil.revsingle(repo, b)
2904 cb = scmutil.revsingle(repo, b)
2904 pa = pvec.ctxpvec(ca)
2905 pa = pvec.ctxpvec(ca)
2905 pb = pvec.ctxpvec(cb)
2906 pb = pvec.ctxpvec(cb)
2906 if pa == pb:
2907 if pa == pb:
2907 rel = b"="
2908 rel = b"="
2908 elif pa > pb:
2909 elif pa > pb:
2909 rel = b">"
2910 rel = b">"
2910 elif pa < pb:
2911 elif pa < pb:
2911 rel = b"<"
2912 rel = b"<"
2912 elif pa | pb:
2913 elif pa | pb:
2913 rel = b"|"
2914 rel = b"|"
2914 ui.write(_(b"a: %s\n") % pa)
2915 ui.write(_(b"a: %s\n") % pa)
2915 ui.write(_(b"b: %s\n") % pb)
2916 ui.write(_(b"b: %s\n") % pb)
2916 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 ui.write(
2918 ui.write(
2918 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 % (
2920 % (
2920 abs(pa._depth - pb._depth),
2921 abs(pa._depth - pb._depth),
2921 pvec._hamming(pa._vec, pb._vec),
2922 pvec._hamming(pa._vec, pb._vec),
2922 pa.distance(pb),
2923 pa.distance(pb),
2923 rel,
2924 rel,
2924 )
2925 )
2925 )
2926 )
2926
2927
2927
2928
2928 @command(
2929 @command(
2929 b'debugrebuilddirstate|debugrebuildstate',
2930 b'debugrebuilddirstate|debugrebuildstate',
2930 [
2931 [
2931 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 (
2933 (
2933 b'',
2934 b'',
2934 b'minimal',
2935 b'minimal',
2935 None,
2936 None,
2936 _(
2937 _(
2937 b'only rebuild files that are inconsistent with '
2938 b'only rebuild files that are inconsistent with '
2938 b'the working copy parent'
2939 b'the working copy parent'
2939 ),
2940 ),
2940 ),
2941 ),
2941 ],
2942 ],
2942 _(b'[-r REV]'),
2943 _(b'[-r REV]'),
2943 )
2944 )
2944 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 """rebuild the dirstate as it would look like for the given revision
2946 """rebuild the dirstate as it would look like for the given revision
2946
2947
2947 If no revision is specified the first current parent will be used.
2948 If no revision is specified the first current parent will be used.
2948
2949
2949 The dirstate will be set to the files of the given revision.
2950 The dirstate will be set to the files of the given revision.
2950 The actual working directory content or existing dirstate
2951 The actual working directory content or existing dirstate
2951 information such as adds or removes is not considered.
2952 information such as adds or removes is not considered.
2952
2953
2953 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 tracked but are not in the parent manifest, or that exist in the parent
2955 tracked but are not in the parent manifest, or that exist in the parent
2955 manifest but are not in the dirstate. It will not change adds, removes, or
2956 manifest but are not in the dirstate. It will not change adds, removes, or
2956 modified files that are in the working copy parent.
2957 modified files that are in the working copy parent.
2957
2958
2958 One use of this command is to make the next :hg:`status` invocation
2959 One use of this command is to make the next :hg:`status` invocation
2959 check the actual file content.
2960 check the actual file content.
2960 """
2961 """
2961 ctx = scmutil.revsingle(repo, rev)
2962 ctx = scmutil.revsingle(repo, rev)
2962 with repo.wlock():
2963 with repo.wlock():
2963 dirstate = repo.dirstate
2964 dirstate = repo.dirstate
2964 changedfiles = None
2965 changedfiles = None
2965 # See command doc for what minimal does.
2966 # See command doc for what minimal does.
2966 if opts.get('minimal'):
2967 if opts.get('minimal'):
2967 manifestfiles = set(ctx.manifest().keys())
2968 manifestfiles = set(ctx.manifest().keys())
2968 dirstatefiles = set(dirstate)
2969 dirstatefiles = set(dirstate)
2969 manifestonly = manifestfiles - dirstatefiles
2970 manifestonly = manifestfiles - dirstatefiles
2970 dsonly = dirstatefiles - manifestfiles
2971 dsonly = dirstatefiles - manifestfiles
2971 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 changedfiles = manifestonly | dsnotadded
2973 changedfiles = manifestonly | dsnotadded
2973
2974
2974 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975
2976
2976
2977
2977 @command(
2978 @command(
2978 b'debugrebuildfncache',
2979 b'debugrebuildfncache',
2979 [
2980 [
2980 (
2981 (
2981 b'',
2982 b'',
2982 b'only-data',
2983 b'only-data',
2983 False,
2984 False,
2984 _(b'only look for wrong .d files (much faster)'),
2985 _(b'only look for wrong .d files (much faster)'),
2985 )
2986 )
2986 ],
2987 ],
2987 b'',
2988 b'',
2988 )
2989 )
2989 def debugrebuildfncache(ui, repo, **opts):
2990 def debugrebuildfncache(ui, repo, **opts):
2990 """rebuild the fncache file"""
2991 """rebuild the fncache file"""
2991 opts = pycompat.byteskwargs(opts)
2992 opts = pycompat.byteskwargs(opts)
2992 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993
2994
2994
2995
2995 @command(
2996 @command(
2996 b'debugrename',
2997 b'debugrename',
2997 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 _(b'[-r REV] [FILE]...'),
2999 _(b'[-r REV] [FILE]...'),
2999 )
3000 )
3000 def debugrename(ui, repo, *pats, **opts):
3001 def debugrename(ui, repo, *pats, **opts):
3001 """dump rename information"""
3002 """dump rename information"""
3002
3003
3003 opts = pycompat.byteskwargs(opts)
3004 opts = pycompat.byteskwargs(opts)
3004 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 m = scmutil.match(ctx, pats, opts)
3006 m = scmutil.match(ctx, pats, opts)
3006 for abs in ctx.walk(m):
3007 for abs in ctx.walk(m):
3007 fctx = ctx[abs]
3008 fctx = ctx[abs]
3008 o = fctx.filelog().renamed(fctx.filenode())
3009 o = fctx.filelog().renamed(fctx.filenode())
3009 rel = repo.pathto(abs)
3010 rel = repo.pathto(abs)
3010 if o:
3011 if o:
3011 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 else:
3013 else:
3013 ui.write(_(b"%s not renamed\n") % rel)
3014 ui.write(_(b"%s not renamed\n") % rel)
3014
3015
3015
3016
3016 @command(b'debugrequires|debugrequirements', [], b'')
3017 @command(b'debugrequires|debugrequirements', [], b'')
3017 def debugrequirements(ui, repo):
3018 def debugrequirements(ui, repo):
3018 """print the current repo requirements"""
3019 """print the current repo requirements"""
3019 for r in sorted(repo.requirements):
3020 for r in sorted(repo.requirements):
3020 ui.write(b"%s\n" % r)
3021 ui.write(b"%s\n" % r)
3021
3022
3022
3023
3023 @command(
3024 @command(
3024 b'debugrevlog',
3025 b'debugrevlog',
3025 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 _(b'-c|-m|FILE'),
3027 _(b'-c|-m|FILE'),
3027 optionalrepo=True,
3028 optionalrepo=True,
3028 )
3029 )
3029 def debugrevlog(ui, repo, file_=None, **opts):
3030 def debugrevlog(ui, repo, file_=None, **opts):
3030 """show data and statistics about a revlog"""
3031 """show data and statistics about a revlog"""
3031 opts = pycompat.byteskwargs(opts)
3032 opts = pycompat.byteskwargs(opts)
3032 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033
3034
3034 if opts.get(b"dump"):
3035 if opts.get(b"dump"):
3035 numrevs = len(r)
3036 numrevs = len(r)
3036 ui.write(
3037 ui.write(
3037 (
3038 (
3038 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 b" rawsize totalsize compression heads chainlen\n"
3040 b" rawsize totalsize compression heads chainlen\n"
3040 )
3041 )
3041 )
3042 )
3042 ts = 0
3043 ts = 0
3043 heads = set()
3044 heads = set()
3044
3045
3045 for rev in pycompat.xrange(numrevs):
3046 for rev in pycompat.xrange(numrevs):
3046 dbase = r.deltaparent(rev)
3047 dbase = r.deltaparent(rev)
3047 if dbase == -1:
3048 if dbase == -1:
3048 dbase = rev
3049 dbase = rev
3049 cbase = r.chainbase(rev)
3050 cbase = r.chainbase(rev)
3050 clen = r.chainlen(rev)
3051 clen = r.chainlen(rev)
3051 p1, p2 = r.parentrevs(rev)
3052 p1, p2 = r.parentrevs(rev)
3052 rs = r.rawsize(rev)
3053 rs = r.rawsize(rev)
3053 ts = ts + rs
3054 ts = ts + rs
3054 heads -= set(r.parentrevs(rev))
3055 heads -= set(r.parentrevs(rev))
3055 heads.add(rev)
3056 heads.add(rev)
3056 try:
3057 try:
3057 compression = ts / r.end(rev)
3058 compression = ts / r.end(rev)
3058 except ZeroDivisionError:
3059 except ZeroDivisionError:
3059 compression = 0
3060 compression = 0
3060 ui.write(
3061 ui.write(
3061 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 b"%11d %5d %8d\n"
3063 b"%11d %5d %8d\n"
3063 % (
3064 % (
3064 rev,
3065 rev,
3065 p1,
3066 p1,
3066 p2,
3067 p2,
3067 r.start(rev),
3068 r.start(rev),
3068 r.end(rev),
3069 r.end(rev),
3069 r.start(dbase),
3070 r.start(dbase),
3070 r.start(cbase),
3071 r.start(cbase),
3071 r.start(p1),
3072 r.start(p1),
3072 r.start(p2),
3073 r.start(p2),
3073 rs,
3074 rs,
3074 ts,
3075 ts,
3075 compression,
3076 compression,
3076 len(heads),
3077 len(heads),
3077 clen,
3078 clen,
3078 )
3079 )
3079 )
3080 )
3080 return 0
3081 return 0
3081
3082
3082 format = r._format_version
3083 format = r._format_version
3083 v = r._format_flags
3084 v = r._format_flags
3084 flags = []
3085 flags = []
3085 gdelta = False
3086 gdelta = False
3086 if v & revlog.FLAG_INLINE_DATA:
3087 if v & revlog.FLAG_INLINE_DATA:
3087 flags.append(b'inline')
3088 flags.append(b'inline')
3088 if v & revlog.FLAG_GENERALDELTA:
3089 if v & revlog.FLAG_GENERALDELTA:
3089 gdelta = True
3090 gdelta = True
3090 flags.append(b'generaldelta')
3091 flags.append(b'generaldelta')
3091 if not flags:
3092 if not flags:
3092 flags = [b'(none)']
3093 flags = [b'(none)']
3093
3094
3094 ### tracks merge vs single parent
3095 ### tracks merge vs single parent
3095 nummerges = 0
3096 nummerges = 0
3096
3097
3097 ### tracks ways the "delta" are build
3098 ### tracks ways the "delta" are build
3098 # nodelta
3099 # nodelta
3099 numempty = 0
3100 numempty = 0
3100 numemptytext = 0
3101 numemptytext = 0
3101 numemptydelta = 0
3102 numemptydelta = 0
3102 # full file content
3103 # full file content
3103 numfull = 0
3104 numfull = 0
3104 # intermediate snapshot against a prior snapshot
3105 # intermediate snapshot against a prior snapshot
3105 numsemi = 0
3106 numsemi = 0
3106 # snapshot count per depth
3107 # snapshot count per depth
3107 numsnapdepth = collections.defaultdict(lambda: 0)
3108 numsnapdepth = collections.defaultdict(lambda: 0)
3108 # delta against previous revision
3109 # delta against previous revision
3109 numprev = 0
3110 numprev = 0
3110 # delta against first or second parent (not prev)
3111 # delta against first or second parent (not prev)
3111 nump1 = 0
3112 nump1 = 0
3112 nump2 = 0
3113 nump2 = 0
3113 # delta against neither prev nor parents
3114 # delta against neither prev nor parents
3114 numother = 0
3115 numother = 0
3115 # delta against prev that are also first or second parent
3116 # delta against prev that are also first or second parent
3116 # (details of `numprev`)
3117 # (details of `numprev`)
3117 nump1prev = 0
3118 nump1prev = 0
3118 nump2prev = 0
3119 nump2prev = 0
3119
3120
3120 # data about delta chain of each revs
3121 # data about delta chain of each revs
3121 chainlengths = []
3122 chainlengths = []
3122 chainbases = []
3123 chainbases = []
3123 chainspans = []
3124 chainspans = []
3124
3125
3125 # data about each revision
3126 # data about each revision
3126 datasize = [None, 0, 0]
3127 datasize = [None, 0, 0]
3127 fullsize = [None, 0, 0]
3128 fullsize = [None, 0, 0]
3128 semisize = [None, 0, 0]
3129 semisize = [None, 0, 0]
3129 # snapshot count per depth
3130 # snapshot count per depth
3130 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 deltasize = [None, 0, 0]
3132 deltasize = [None, 0, 0]
3132 chunktypecounts = {}
3133 chunktypecounts = {}
3133 chunktypesizes = {}
3134 chunktypesizes = {}
3134
3135
3135 def addsize(size, l):
3136 def addsize(size, l):
3136 if l[0] is None or size < l[0]:
3137 if l[0] is None or size < l[0]:
3137 l[0] = size
3138 l[0] = size
3138 if size > l[1]:
3139 if size > l[1]:
3139 l[1] = size
3140 l[1] = size
3140 l[2] += size
3141 l[2] += size
3141
3142
3142 numrevs = len(r)
3143 numrevs = len(r)
3143 for rev in pycompat.xrange(numrevs):
3144 for rev in pycompat.xrange(numrevs):
3144 p1, p2 = r.parentrevs(rev)
3145 p1, p2 = r.parentrevs(rev)
3145 delta = r.deltaparent(rev)
3146 delta = r.deltaparent(rev)
3146 if format > 0:
3147 if format > 0:
3147 addsize(r.rawsize(rev), datasize)
3148 addsize(r.rawsize(rev), datasize)
3148 if p2 != nullrev:
3149 if p2 != nullrev:
3149 nummerges += 1
3150 nummerges += 1
3150 size = r.length(rev)
3151 size = r.length(rev)
3151 if delta == nullrev:
3152 if delta == nullrev:
3152 chainlengths.append(0)
3153 chainlengths.append(0)
3153 chainbases.append(r.start(rev))
3154 chainbases.append(r.start(rev))
3154 chainspans.append(size)
3155 chainspans.append(size)
3155 if size == 0:
3156 if size == 0:
3156 numempty += 1
3157 numempty += 1
3157 numemptytext += 1
3158 numemptytext += 1
3158 else:
3159 else:
3159 numfull += 1
3160 numfull += 1
3160 numsnapdepth[0] += 1
3161 numsnapdepth[0] += 1
3161 addsize(size, fullsize)
3162 addsize(size, fullsize)
3162 addsize(size, snapsizedepth[0])
3163 addsize(size, snapsizedepth[0])
3163 else:
3164 else:
3164 chainlengths.append(chainlengths[delta] + 1)
3165 chainlengths.append(chainlengths[delta] + 1)
3165 baseaddr = chainbases[delta]
3166 baseaddr = chainbases[delta]
3166 revaddr = r.start(rev)
3167 revaddr = r.start(rev)
3167 chainbases.append(baseaddr)
3168 chainbases.append(baseaddr)
3168 chainspans.append((revaddr - baseaddr) + size)
3169 chainspans.append((revaddr - baseaddr) + size)
3169 if size == 0:
3170 if size == 0:
3170 numempty += 1
3171 numempty += 1
3171 numemptydelta += 1
3172 numemptydelta += 1
3172 elif r.issnapshot(rev):
3173 elif r.issnapshot(rev):
3173 addsize(size, semisize)
3174 addsize(size, semisize)
3174 numsemi += 1
3175 numsemi += 1
3175 depth = r.snapshotdepth(rev)
3176 depth = r.snapshotdepth(rev)
3176 numsnapdepth[depth] += 1
3177 numsnapdepth[depth] += 1
3177 addsize(size, snapsizedepth[depth])
3178 addsize(size, snapsizedepth[depth])
3178 else:
3179 else:
3179 addsize(size, deltasize)
3180 addsize(size, deltasize)
3180 if delta == rev - 1:
3181 if delta == rev - 1:
3181 numprev += 1
3182 numprev += 1
3182 if delta == p1:
3183 if delta == p1:
3183 nump1prev += 1
3184 nump1prev += 1
3184 elif delta == p2:
3185 elif delta == p2:
3185 nump2prev += 1
3186 nump2prev += 1
3186 elif delta == p1:
3187 elif delta == p1:
3187 nump1 += 1
3188 nump1 += 1
3188 elif delta == p2:
3189 elif delta == p2:
3189 nump2 += 1
3190 nump2 += 1
3190 elif delta != nullrev:
3191 elif delta != nullrev:
3191 numother += 1
3192 numother += 1
3192
3193
3193 # Obtain data on the raw chunks in the revlog.
3194 # Obtain data on the raw chunks in the revlog.
3194 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 segment = r._getsegmentforrevs(rev, rev)[1]
3196 segment = r._getsegmentforrevs(rev, rev)[1]
3196 else:
3197 else:
3197 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 if segment:
3199 if segment:
3199 chunktype = bytes(segment[0:1])
3200 chunktype = bytes(segment[0:1])
3200 else:
3201 else:
3201 chunktype = b'empty'
3202 chunktype = b'empty'
3202
3203
3203 if chunktype not in chunktypecounts:
3204 if chunktype not in chunktypecounts:
3204 chunktypecounts[chunktype] = 0
3205 chunktypecounts[chunktype] = 0
3205 chunktypesizes[chunktype] = 0
3206 chunktypesizes[chunktype] = 0
3206
3207
3207 chunktypecounts[chunktype] += 1
3208 chunktypecounts[chunktype] += 1
3208 chunktypesizes[chunktype] += size
3209 chunktypesizes[chunktype] += size
3209
3210
3210 # Adjust size min value for empty cases
3211 # Adjust size min value for empty cases
3211 for size in (datasize, fullsize, semisize, deltasize):
3212 for size in (datasize, fullsize, semisize, deltasize):
3212 if size[0] is None:
3213 if size[0] is None:
3213 size[0] = 0
3214 size[0] = 0
3214
3215
3215 numdeltas = numrevs - numfull - numempty - numsemi
3216 numdeltas = numrevs - numfull - numempty - numsemi
3216 numoprev = numprev - nump1prev - nump2prev
3217 numoprev = numprev - nump1prev - nump2prev
3217 totalrawsize = datasize[2]
3218 totalrawsize = datasize[2]
3218 datasize[2] /= numrevs
3219 datasize[2] /= numrevs
3219 fulltotal = fullsize[2]
3220 fulltotal = fullsize[2]
3220 if numfull == 0:
3221 if numfull == 0:
3221 fullsize[2] = 0
3222 fullsize[2] = 0
3222 else:
3223 else:
3223 fullsize[2] /= numfull
3224 fullsize[2] /= numfull
3224 semitotal = semisize[2]
3225 semitotal = semisize[2]
3225 snaptotal = {}
3226 snaptotal = {}
3226 if numsemi > 0:
3227 if numsemi > 0:
3227 semisize[2] /= numsemi
3228 semisize[2] /= numsemi
3228 for depth in snapsizedepth:
3229 for depth in snapsizedepth:
3229 snaptotal[depth] = snapsizedepth[depth][2]
3230 snaptotal[depth] = snapsizedepth[depth][2]
3230 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231
3232
3232 deltatotal = deltasize[2]
3233 deltatotal = deltasize[2]
3233 if numdeltas > 0:
3234 if numdeltas > 0:
3234 deltasize[2] /= numdeltas
3235 deltasize[2] /= numdeltas
3235 totalsize = fulltotal + semitotal + deltatotal
3236 totalsize = fulltotal + semitotal + deltatotal
3236 avgchainlen = sum(chainlengths) / numrevs
3237 avgchainlen = sum(chainlengths) / numrevs
3237 maxchainlen = max(chainlengths)
3238 maxchainlen = max(chainlengths)
3238 maxchainspan = max(chainspans)
3239 maxchainspan = max(chainspans)
3239 compratio = 1
3240 compratio = 1
3240 if totalsize:
3241 if totalsize:
3241 compratio = totalrawsize / totalsize
3242 compratio = totalrawsize / totalsize
3242
3243
3243 basedfmtstr = b'%%%dd\n'
3244 basedfmtstr = b'%%%dd\n'
3244 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245
3246
3246 def dfmtstr(max):
3247 def dfmtstr(max):
3247 return basedfmtstr % len(str(max))
3248 return basedfmtstr % len(str(max))
3248
3249
3249 def pcfmtstr(max, padding=0):
3250 def pcfmtstr(max, padding=0):
3250 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251
3252
3252 def pcfmt(value, total):
3253 def pcfmt(value, total):
3253 if total:
3254 if total:
3254 return (value, 100 * float(value) / total)
3255 return (value, 100 * float(value) / total)
3255 else:
3256 else:
3256 return value, 100.0
3257 return value, 100.0
3257
3258
3258 ui.writenoi18n(b'format : %d\n' % format)
3259 ui.writenoi18n(b'format : %d\n' % format)
3259 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260
3261
3261 ui.write(b'\n')
3262 ui.write(b'\n')
3262 fmt = pcfmtstr(totalsize)
3263 fmt = pcfmtstr(totalsize)
3263 fmt2 = dfmtstr(totalsize)
3264 fmt2 = dfmtstr(totalsize)
3264 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 ui.writenoi18n(
3267 ui.writenoi18n(
3267 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 )
3269 )
3269 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 ui.writenoi18n(
3272 ui.writenoi18n(
3272 b' text : '
3273 b' text : '
3273 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 )
3275 )
3275 ui.writenoi18n(
3276 ui.writenoi18n(
3276 b' delta : '
3277 b' delta : '
3277 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 )
3279 )
3279 ui.writenoi18n(
3280 ui.writenoi18n(
3280 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 )
3282 )
3282 for depth in sorted(numsnapdepth):
3283 for depth in sorted(numsnapdepth):
3283 ui.write(
3284 ui.write(
3284 (b' lvl-%-3d : ' % depth)
3285 (b' lvl-%-3d : ' % depth)
3285 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 )
3287 )
3287 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 ui.writenoi18n(
3290 ui.writenoi18n(
3290 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 )
3292 )
3292 for depth in sorted(numsnapdepth):
3293 for depth in sorted(numsnapdepth):
3293 ui.write(
3294 ui.write(
3294 (b' lvl-%-3d : ' % depth)
3295 (b' lvl-%-3d : ' % depth)
3295 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 )
3297 )
3297 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298
3299
3299 def fmtchunktype(chunktype):
3300 def fmtchunktype(chunktype):
3300 if chunktype == b'empty':
3301 if chunktype == b'empty':
3301 return b' %s : ' % chunktype
3302 return b' %s : ' % chunktype
3302 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 else:
3305 else:
3305 return b' 0x%s : ' % hex(chunktype)
3306 return b' 0x%s : ' % hex(chunktype)
3306
3307
3307 ui.write(b'\n')
3308 ui.write(b'\n')
3308 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 for chunktype in sorted(chunktypecounts):
3310 for chunktype in sorted(chunktypecounts):
3310 ui.write(fmtchunktype(chunktype))
3311 ui.write(fmtchunktype(chunktype))
3311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 for chunktype in sorted(chunktypecounts):
3314 for chunktype in sorted(chunktypecounts):
3314 ui.write(fmtchunktype(chunktype))
3315 ui.write(fmtchunktype(chunktype))
3315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316
3317
3317 ui.write(b'\n')
3318 ui.write(b'\n')
3318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323
3324
3324 if format > 0:
3325 if format > 0:
3325 ui.write(b'\n')
3326 ui.write(b'\n')
3326 ui.writenoi18n(
3327 ui.writenoi18n(
3327 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 % tuple(datasize)
3329 % tuple(datasize)
3329 )
3330 )
3330 ui.writenoi18n(
3331 ui.writenoi18n(
3331 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 % tuple(fullsize)
3333 % tuple(fullsize)
3333 )
3334 )
3334 ui.writenoi18n(
3335 ui.writenoi18n(
3335 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 % tuple(semisize)
3337 % tuple(semisize)
3337 )
3338 )
3338 for depth in sorted(snapsizedepth):
3339 for depth in sorted(snapsizedepth):
3339 if depth == 0:
3340 if depth == 0:
3340 continue
3341 continue
3341 ui.writenoi18n(
3342 ui.writenoi18n(
3342 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 % ((depth,) + tuple(snapsizedepth[depth]))
3344 % ((depth,) + tuple(snapsizedepth[depth]))
3344 )
3345 )
3345 ui.writenoi18n(
3346 ui.writenoi18n(
3346 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 % tuple(deltasize)
3348 % tuple(deltasize)
3348 )
3349 )
3349
3350
3350 if numdeltas > 0:
3351 if numdeltas > 0:
3351 ui.write(b'\n')
3352 ui.write(b'\n')
3352 fmt = pcfmtstr(numdeltas)
3353 fmt = pcfmtstr(numdeltas)
3353 fmt2 = pcfmtstr(numdeltas, 4)
3354 fmt2 = pcfmtstr(numdeltas, 4)
3354 ui.writenoi18n(
3355 ui.writenoi18n(
3355 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 )
3357 )
3357 if numprev > 0:
3358 if numprev > 0:
3358 ui.writenoi18n(
3359 ui.writenoi18n(
3359 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 )
3361 )
3361 ui.writenoi18n(
3362 ui.writenoi18n(
3362 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 )
3364 )
3364 ui.writenoi18n(
3365 ui.writenoi18n(
3365 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 )
3367 )
3367 if gdelta:
3368 if gdelta:
3368 ui.writenoi18n(
3369 ui.writenoi18n(
3369 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 )
3371 )
3371 ui.writenoi18n(
3372 ui.writenoi18n(
3372 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 )
3374 )
3374 ui.writenoi18n(
3375 ui.writenoi18n(
3375 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 )
3377 )
3377
3378
3378
3379
3379 @command(
3380 @command(
3380 b'debugrevlogindex',
3381 b'debugrevlogindex',
3381 cmdutil.debugrevlogopts
3382 cmdutil.debugrevlogopts
3382 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 _(b'[-f FORMAT] -c|-m|FILE'),
3384 _(b'[-f FORMAT] -c|-m|FILE'),
3384 optionalrepo=True,
3385 optionalrepo=True,
3385 )
3386 )
3386 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 """dump the contents of a revlog index"""
3388 """dump the contents of a revlog index"""
3388 opts = pycompat.byteskwargs(opts)
3389 opts = pycompat.byteskwargs(opts)
3389 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 format = opts.get(b'format', 0)
3391 format = opts.get(b'format', 0)
3391 if format not in (0, 1):
3392 if format not in (0, 1):
3392 raise error.Abort(_(b"unknown format %d") % format)
3393 raise error.Abort(_(b"unknown format %d") % format)
3393
3394
3394 if ui.debugflag:
3395 if ui.debugflag:
3395 shortfn = hex
3396 shortfn = hex
3396 else:
3397 else:
3397 shortfn = short
3398 shortfn = short
3398
3399
3399 # There might not be anything in r, so have a sane default
3400 # There might not be anything in r, so have a sane default
3400 idlen = 12
3401 idlen = 12
3401 for i in r:
3402 for i in r:
3402 idlen = len(shortfn(r.node(i)))
3403 idlen = len(shortfn(r.node(i)))
3403 break
3404 break
3404
3405
3405 if format == 0:
3406 if format == 0:
3406 if ui.verbose:
3407 if ui.verbose:
3407 ui.writenoi18n(
3408 ui.writenoi18n(
3408 b" rev offset length linkrev %s %s p2\n"
3409 b" rev offset length linkrev %s %s p2\n"
3409 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 )
3411 )
3411 else:
3412 else:
3412 ui.writenoi18n(
3413 ui.writenoi18n(
3413 b" rev linkrev %s %s p2\n"
3414 b" rev linkrev %s %s p2\n"
3414 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 )
3416 )
3416 elif format == 1:
3417 elif format == 1:
3417 if ui.verbose:
3418 if ui.verbose:
3418 ui.writenoi18n(
3419 ui.writenoi18n(
3419 (
3420 (
3420 b" rev flag offset length size link p1"
3421 b" rev flag offset length size link p1"
3421 b" p2 %s\n"
3422 b" p2 %s\n"
3422 )
3423 )
3423 % b"nodeid".rjust(idlen)
3424 % b"nodeid".rjust(idlen)
3424 )
3425 )
3425 else:
3426 else:
3426 ui.writenoi18n(
3427 ui.writenoi18n(
3427 b" rev flag size link p1 p2 %s\n"
3428 b" rev flag size link p1 p2 %s\n"
3428 % b"nodeid".rjust(idlen)
3429 % b"nodeid".rjust(idlen)
3429 )
3430 )
3430
3431
3431 for i in r:
3432 for i in r:
3432 node = r.node(i)
3433 node = r.node(i)
3433 if format == 0:
3434 if format == 0:
3434 try:
3435 try:
3435 pp = r.parents(node)
3436 pp = r.parents(node)
3436 except Exception:
3437 except Exception:
3437 pp = [repo.nullid, repo.nullid]
3438 pp = [repo.nullid, repo.nullid]
3438 if ui.verbose:
3439 if ui.verbose:
3439 ui.write(
3440 ui.write(
3440 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 % (
3442 % (
3442 i,
3443 i,
3443 r.start(i),
3444 r.start(i),
3444 r.length(i),
3445 r.length(i),
3445 r.linkrev(i),
3446 r.linkrev(i),
3446 shortfn(node),
3447 shortfn(node),
3447 shortfn(pp[0]),
3448 shortfn(pp[0]),
3448 shortfn(pp[1]),
3449 shortfn(pp[1]),
3449 )
3450 )
3450 )
3451 )
3451 else:
3452 else:
3452 ui.write(
3453 ui.write(
3453 b"% 6d % 7d %s %s %s\n"
3454 b"% 6d % 7d %s %s %s\n"
3454 % (
3455 % (
3455 i,
3456 i,
3456 r.linkrev(i),
3457 r.linkrev(i),
3457 shortfn(node),
3458 shortfn(node),
3458 shortfn(pp[0]),
3459 shortfn(pp[0]),
3459 shortfn(pp[1]),
3460 shortfn(pp[1]),
3460 )
3461 )
3461 )
3462 )
3462 elif format == 1:
3463 elif format == 1:
3463 pr = r.parentrevs(i)
3464 pr = r.parentrevs(i)
3464 if ui.verbose:
3465 if ui.verbose:
3465 ui.write(
3466 ui.write(
3466 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 % (
3468 % (
3468 i,
3469 i,
3469 r.flags(i),
3470 r.flags(i),
3470 r.start(i),
3471 r.start(i),
3471 r.length(i),
3472 r.length(i),
3472 r.rawsize(i),
3473 r.rawsize(i),
3473 r.linkrev(i),
3474 r.linkrev(i),
3474 pr[0],
3475 pr[0],
3475 pr[1],
3476 pr[1],
3476 shortfn(node),
3477 shortfn(node),
3477 )
3478 )
3478 )
3479 )
3479 else:
3480 else:
3480 ui.write(
3481 ui.write(
3481 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 % (
3483 % (
3483 i,
3484 i,
3484 r.flags(i),
3485 r.flags(i),
3485 r.rawsize(i),
3486 r.rawsize(i),
3486 r.linkrev(i),
3487 r.linkrev(i),
3487 pr[0],
3488 pr[0],
3488 pr[1],
3489 pr[1],
3489 shortfn(node),
3490 shortfn(node),
3490 )
3491 )
3491 )
3492 )
3492
3493
3493
3494
3494 @command(
3495 @command(
3495 b'debugrevspec',
3496 b'debugrevspec',
3496 [
3497 [
3497 (
3498 (
3498 b'',
3499 b'',
3499 b'optimize',
3500 b'optimize',
3500 None,
3501 None,
3501 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 ),
3503 ),
3503 (
3504 (
3504 b'',
3505 b'',
3505 b'show-revs',
3506 b'show-revs',
3506 True,
3507 True,
3507 _(b'print list of result revisions (default)'),
3508 _(b'print list of result revisions (default)'),
3508 ),
3509 ),
3509 (
3510 (
3510 b's',
3511 b's',
3511 b'show-set',
3512 b'show-set',
3512 None,
3513 None,
3513 _(b'print internal representation of result set'),
3514 _(b'print internal representation of result set'),
3514 ),
3515 ),
3515 (
3516 (
3516 b'p',
3517 b'p',
3517 b'show-stage',
3518 b'show-stage',
3518 [],
3519 [],
3519 _(b'print parsed tree at the given stage'),
3520 _(b'print parsed tree at the given stage'),
3520 _(b'NAME'),
3521 _(b'NAME'),
3521 ),
3522 ),
3522 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 ],
3525 ],
3525 b'REVSPEC',
3526 b'REVSPEC',
3526 )
3527 )
3527 def debugrevspec(ui, repo, expr, **opts):
3528 def debugrevspec(ui, repo, expr, **opts):
3528 """parse and apply a revision specification
3529 """parse and apply a revision specification
3529
3530
3530 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 Use -p all to print tree at every stage.
3532 Use -p all to print tree at every stage.
3532
3533
3533 Use --no-show-revs option with -s or -p to print only the set
3534 Use --no-show-revs option with -s or -p to print only the set
3534 representation or the parsed tree respectively.
3535 representation or the parsed tree respectively.
3535
3536
3536 Use --verify-optimized to compare the optimized result with the unoptimized
3537 Use --verify-optimized to compare the optimized result with the unoptimized
3537 one. Returns 1 if the optimized result differs.
3538 one. Returns 1 if the optimized result differs.
3538 """
3539 """
3539 opts = pycompat.byteskwargs(opts)
3540 opts = pycompat.byteskwargs(opts)
3540 aliases = ui.configitems(b'revsetalias')
3541 aliases = ui.configitems(b'revsetalias')
3541 stages = [
3542 stages = [
3542 (b'parsed', lambda tree: tree),
3543 (b'parsed', lambda tree: tree),
3543 (
3544 (
3544 b'expanded',
3545 b'expanded',
3545 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 ),
3547 ),
3547 (b'concatenated', revsetlang.foldconcat),
3548 (b'concatenated', revsetlang.foldconcat),
3548 (b'analyzed', revsetlang.analyze),
3549 (b'analyzed', revsetlang.analyze),
3549 (b'optimized', revsetlang.optimize),
3550 (b'optimized', revsetlang.optimize),
3550 ]
3551 ]
3551 if opts[b'no_optimized']:
3552 if opts[b'no_optimized']:
3552 stages = stages[:-1]
3553 stages = stages[:-1]
3553 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 raise error.Abort(
3555 raise error.Abort(
3555 _(b'cannot use --verify-optimized with --no-optimized')
3556 _(b'cannot use --verify-optimized with --no-optimized')
3556 )
3557 )
3557 stagenames = {n for n, f in stages}
3558 stagenames = {n for n, f in stages}
3558
3559
3559 showalways = set()
3560 showalways = set()
3560 showchanged = set()
3561 showchanged = set()
3561 if ui.verbose and not opts[b'show_stage']:
3562 if ui.verbose and not opts[b'show_stage']:
3562 # show parsed tree by --verbose (deprecated)
3563 # show parsed tree by --verbose (deprecated)
3563 showalways.add(b'parsed')
3564 showalways.add(b'parsed')
3564 showchanged.update([b'expanded', b'concatenated'])
3565 showchanged.update([b'expanded', b'concatenated'])
3565 if opts[b'optimize']:
3566 if opts[b'optimize']:
3566 showalways.add(b'optimized')
3567 showalways.add(b'optimized')
3567 if opts[b'show_stage'] and opts[b'optimize']:
3568 if opts[b'show_stage'] and opts[b'optimize']:
3568 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 if opts[b'show_stage'] == [b'all']:
3570 if opts[b'show_stage'] == [b'all']:
3570 showalways.update(stagenames)
3571 showalways.update(stagenames)
3571 else:
3572 else:
3572 for n in opts[b'show_stage']:
3573 for n in opts[b'show_stage']:
3573 if n not in stagenames:
3574 if n not in stagenames:
3574 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 showalways.update(opts[b'show_stage'])
3576 showalways.update(opts[b'show_stage'])
3576
3577
3577 treebystage = {}
3578 treebystage = {}
3578 printedtree = None
3579 printedtree = None
3579 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 for n, f in stages:
3581 for n, f in stages:
3581 treebystage[n] = tree = f(tree)
3582 treebystage[n] = tree = f(tree)
3582 if n in showalways or (n in showchanged and tree != printedtree):
3583 if n in showalways or (n in showchanged and tree != printedtree):
3583 if opts[b'show_stage'] or n != b'parsed':
3584 if opts[b'show_stage'] or n != b'parsed':
3584 ui.write(b"* %s:\n" % n)
3585 ui.write(b"* %s:\n" % n)
3585 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 printedtree = tree
3587 printedtree = tree
3587
3588
3588 if opts[b'verify_optimized']:
3589 if opts[b'verify_optimized']:
3589 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 ui.writenoi18n(
3593 ui.writenoi18n(
3593 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 )
3595 )
3595 ui.writenoi18n(
3596 ui.writenoi18n(
3596 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 )
3598 )
3598 arevs = list(arevs)
3599 arevs = list(arevs)
3599 brevs = list(brevs)
3600 brevs = list(brevs)
3600 if arevs == brevs:
3601 if arevs == brevs:
3601 return 0
3602 return 0
3602 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 if tag in ('delete', 'replace'):
3607 if tag in ('delete', 'replace'):
3607 for c in arevs[alo:ahi]:
3608 for c in arevs[alo:ahi]:
3608 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 if tag in ('insert', 'replace'):
3610 if tag in ('insert', 'replace'):
3610 for c in brevs[blo:bhi]:
3611 for c in brevs[blo:bhi]:
3611 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 if tag == 'equal':
3613 if tag == 'equal':
3613 for c in arevs[alo:ahi]:
3614 for c in arevs[alo:ahi]:
3614 ui.write(b' %d\n' % c)
3615 ui.write(b' %d\n' % c)
3615 return 1
3616 return 1
3616
3617
3617 func = revset.makematcher(tree)
3618 func = revset.makematcher(tree)
3618 revs = func(repo)
3619 revs = func(repo)
3619 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 if not opts[b'show_revs']:
3622 if not opts[b'show_revs']:
3622 return
3623 return
3623 for c in revs:
3624 for c in revs:
3624 ui.write(b"%d\n" % c)
3625 ui.write(b"%d\n" % c)
3625
3626
3626
3627
3627 @command(
3628 @command(
3628 b'debugserve',
3629 b'debugserve',
3629 [
3630 [
3630 (
3631 (
3631 b'',
3632 b'',
3632 b'sshstdio',
3633 b'sshstdio',
3633 False,
3634 False,
3634 _(b'run an SSH server bound to process handles'),
3635 _(b'run an SSH server bound to process handles'),
3635 ),
3636 ),
3636 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 ],
3639 ],
3639 b'',
3640 b'',
3640 )
3641 )
3641 def debugserve(ui, repo, **opts):
3642 def debugserve(ui, repo, **opts):
3642 """run a server with advanced settings
3643 """run a server with advanced settings
3643
3644
3644 This command is similar to :hg:`serve`. It exists partially as a
3645 This command is similar to :hg:`serve`. It exists partially as a
3645 workaround to the fact that ``hg serve --stdio`` must have specific
3646 workaround to the fact that ``hg serve --stdio`` must have specific
3646 arguments for security reasons.
3647 arguments for security reasons.
3647 """
3648 """
3648 opts = pycompat.byteskwargs(opts)
3649 opts = pycompat.byteskwargs(opts)
3649
3650
3650 if not opts[b'sshstdio']:
3651 if not opts[b'sshstdio']:
3651 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652
3653
3653 logfh = None
3654 logfh = None
3654
3655
3655 if opts[b'logiofd'] and opts[b'logiofile']:
3656 if opts[b'logiofd'] and opts[b'logiofile']:
3656 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657
3658
3658 if opts[b'logiofd']:
3659 if opts[b'logiofd']:
3659 # Ideally we would be line buffered. But line buffering in binary
3660 # Ideally we would be line buffered. But line buffering in binary
3660 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 # buffering could have performance impacts. But since this isn't
3662 # buffering could have performance impacts. But since this isn't
3662 # performance critical code, it should be fine.
3663 # performance critical code, it should be fine.
3663 try:
3664 try:
3664 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 except OSError as e:
3666 except OSError as e:
3666 if e.errno != errno.ESPIPE:
3667 if e.errno != errno.ESPIPE:
3667 raise
3668 raise
3668 # can't seek a pipe, so `ab` mode fails on py3
3669 # can't seek a pipe, so `ab` mode fails on py3
3669 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 elif opts[b'logiofile']:
3671 elif opts[b'logiofile']:
3671 logfh = open(opts[b'logiofile'], b'ab', 0)
3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3672
3673
3673 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 s.serve_forever()
3675 s.serve_forever()
3675
3676
3676
3677
3677 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 def debugsetparents(ui, repo, rev1, rev2=None):
3679 def debugsetparents(ui, repo, rev1, rev2=None):
3679 """manually set the parents of the current working directory (DANGEROUS)
3680 """manually set the parents of the current working directory (DANGEROUS)
3680
3681
3681 This command is not what you are looking for and should not be used. Using
3682 This command is not what you are looking for and should not be used. Using
3682 this command will most certainly results in slight corruption of the file
3683 this command will most certainly results in slight corruption of the file
3683 level histories withing your repository. DO NOT USE THIS COMMAND.
3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3684
3685
3685 The command update the p1 and p2 field in the dirstate, and not touching
3686 The command update the p1 and p2 field in the dirstate, and not touching
3686 anything else. This useful for writing repository conversion tools, but
3687 anything else. This useful for writing repository conversion tools, but
3687 should be used with extreme care. For example, neither the working
3688 should be used with extreme care. For example, neither the working
3688 directory nor the dirstate is updated, so file status may be incorrect
3689 directory nor the dirstate is updated, so file status may be incorrect
3689 after running this command. Only used if you are one of the few people that
3690 after running this command. Only used if you are one of the few people that
3690 deeply unstand both conversion tools and file level histories. If you are
3691 deeply unstand both conversion tools and file level histories. If you are
3691 reading this help, you are not one of this people (most of them sailed west
3692 reading this help, you are not one of this people (most of them sailed west
3692 from Mithlond anyway.
3693 from Mithlond anyway.
3693
3694
3694 So one last time DO NOT USE THIS COMMAND.
3695 So one last time DO NOT USE THIS COMMAND.
3695
3696
3696 Returns 0 on success.
3697 Returns 0 on success.
3697 """
3698 """
3698
3699
3699 node1 = scmutil.revsingle(repo, rev1).node()
3700 node1 = scmutil.revsingle(repo, rev1).node()
3700 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701
3702
3702 with repo.wlock():
3703 with repo.wlock():
3703 repo.setparents(node1, node2)
3704 repo.setparents(node1, node2)
3704
3705
3705
3706
3706 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 """dump the side data for a cl/manifest/file revision
3709 """dump the side data for a cl/manifest/file revision
3709
3710
3710 Use --verbose to dump the sidedata content."""
3711 Use --verbose to dump the sidedata content."""
3711 opts = pycompat.byteskwargs(opts)
3712 opts = pycompat.byteskwargs(opts)
3712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 if rev is not None:
3714 if rev is not None:
3714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 file_, rev = None, file_
3716 file_, rev = None, file_
3716 elif rev is None:
3717 elif rev is None:
3717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 r = getattr(r, '_revlog', r)
3720 r = getattr(r, '_revlog', r)
3720 try:
3721 try:
3721 sidedata = r.sidedata(r.lookup(rev))
3722 sidedata = r.sidedata(r.lookup(rev))
3722 except KeyError:
3723 except KeyError:
3723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 if sidedata:
3725 if sidedata:
3725 sidedata = list(sidedata.items())
3726 sidedata = list(sidedata.items())
3726 sidedata.sort()
3727 sidedata.sort()
3727 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 for key, value in sidedata:
3729 for key, value in sidedata:
3729 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 if ui.verbose:
3731 if ui.verbose:
3731 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732
3733
3733
3734
3734 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 def debugssl(ui, repo, source=None, **opts):
3736 def debugssl(ui, repo, source=None, **opts):
3736 """test a secure connection to a server
3737 """test a secure connection to a server
3737
3738
3738 This builds the certificate chain for the server on Windows, installing the
3739 This builds the certificate chain for the server on Windows, installing the
3739 missing intermediates and trusted root via Windows Update if necessary. It
3740 missing intermediates and trusted root via Windows Update if necessary. It
3740 does nothing on other platforms.
3741 does nothing on other platforms.
3741
3742
3742 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 that server is used. See :hg:`help urls` for more information.
3744 that server is used. See :hg:`help urls` for more information.
3744
3745
3745 If the update succeeds, retry the original operation. Otherwise, the cause
3746 If the update succeeds, retry the original operation. Otherwise, the cause
3746 of the SSL error is likely another issue.
3747 of the SSL error is likely another issue.
3747 """
3748 """
3748 if not pycompat.iswindows:
3749 if not pycompat.iswindows:
3749 raise error.Abort(
3750 raise error.Abort(
3750 _(b'certificate chain building is only possible on Windows')
3751 _(b'certificate chain building is only possible on Windows')
3751 )
3752 )
3752
3753
3753 if not source:
3754 if not source:
3754 if not repo:
3755 if not repo:
3755 raise error.Abort(
3756 raise error.Abort(
3756 _(
3757 _(
3757 b"there is no Mercurial repository here, and no "
3758 b"there is no Mercurial repository here, and no "
3758 b"server specified"
3759 b"server specified"
3759 )
3760 )
3760 )
3761 )
3761 source = b"default"
3762 source = b"default"
3762
3763
3763 source, branches = urlutil.get_unique_pull_path(
3764 source, branches = urlutil.get_unique_pull_path(
3764 b'debugssl', repo, ui, source
3765 b'debugssl', repo, ui, source
3765 )
3766 )
3766 url = urlutil.url(source)
3767 url = urlutil.url(source)
3767
3768
3768 defaultport = {b'https': 443, b'ssh': 22}
3769 defaultport = {b'https': 443, b'ssh': 22}
3769 if url.scheme in defaultport:
3770 if url.scheme in defaultport:
3770 try:
3771 try:
3771 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 except ValueError:
3773 except ValueError:
3773 raise error.Abort(_(b"malformed port number in URL"))
3774 raise error.Abort(_(b"malformed port number in URL"))
3774 else:
3775 else:
3775 raise error.Abort(_(b"only https and ssh connections are supported"))
3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3776
3777
3777 from . import win32
3778 from . import win32
3778
3779
3779 s = ssl.wrap_socket(
3780 s = ssl.wrap_socket(
3780 socket.socket(),
3781 socket.socket(),
3781 ssl_version=ssl.PROTOCOL_TLS,
3782 ssl_version=ssl.PROTOCOL_TLS,
3782 cert_reqs=ssl.CERT_NONE,
3783 cert_reqs=ssl.CERT_NONE,
3783 ca_certs=None,
3784 ca_certs=None,
3784 )
3785 )
3785
3786
3786 try:
3787 try:
3787 s.connect(addr)
3788 s.connect(addr)
3788 cert = s.getpeercert(True)
3789 cert = s.getpeercert(True)
3789
3790
3790 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791
3792
3792 complete = win32.checkcertificatechain(cert, build=False)
3793 complete = win32.checkcertificatechain(cert, build=False)
3793
3794
3794 if not complete:
3795 if not complete:
3795 ui.status(_(b'certificate chain is incomplete, updating... '))
3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3796
3797
3797 if not win32.checkcertificatechain(cert):
3798 if not win32.checkcertificatechain(cert):
3798 ui.status(_(b'failed.\n'))
3799 ui.status(_(b'failed.\n'))
3799 else:
3800 else:
3800 ui.status(_(b'done.\n'))
3801 ui.status(_(b'done.\n'))
3801 else:
3802 else:
3802 ui.status(_(b'full certificate chain is available\n'))
3803 ui.status(_(b'full certificate chain is available\n'))
3803 finally:
3804 finally:
3804 s.close()
3805 s.close()
3805
3806
3806
3807
3807 @command(
3808 @command(
3808 b"debugbackupbundle",
3809 b"debugbackupbundle",
3809 [
3810 [
3810 (
3811 (
3811 b"",
3812 b"",
3812 b"recover",
3813 b"recover",
3813 b"",
3814 b"",
3814 b"brings the specified changeset back into the repository",
3815 b"brings the specified changeset back into the repository",
3815 )
3816 )
3816 ]
3817 ]
3817 + cmdutil.logopts,
3818 + cmdutil.logopts,
3818 _(b"hg debugbackupbundle [--recover HASH]"),
3819 _(b"hg debugbackupbundle [--recover HASH]"),
3819 )
3820 )
3820 def debugbackupbundle(ui, repo, *pats, **opts):
3821 def debugbackupbundle(ui, repo, *pats, **opts):
3821 """lists the changesets available in backup bundles
3822 """lists the changesets available in backup bundles
3822
3823
3823 Without any arguments, this command prints a list of the changesets in each
3824 Without any arguments, this command prints a list of the changesets in each
3824 backup bundle.
3825 backup bundle.
3825
3826
3826 --recover takes a changeset hash and unbundles the first bundle that
3827 --recover takes a changeset hash and unbundles the first bundle that
3827 contains that hash, which puts that changeset back in your repository.
3828 contains that hash, which puts that changeset back in your repository.
3828
3829
3829 --verbose will print the entire commit message and the bundle path for that
3830 --verbose will print the entire commit message and the bundle path for that
3830 backup.
3831 backup.
3831 """
3832 """
3832 backups = list(
3833 backups = list(
3833 filter(
3834 filter(
3834 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 )
3836 )
3836 )
3837 )
3837 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838
3839
3839 opts = pycompat.byteskwargs(opts)
3840 opts = pycompat.byteskwargs(opts)
3840 opts[b"bundle"] = b""
3841 opts[b"bundle"] = b""
3841 opts[b"force"] = None
3842 opts[b"force"] = None
3842 limit = logcmdutil.getlimit(opts)
3843 limit = logcmdutil.getlimit(opts)
3843
3844
3844 def display(other, chlist, displayer):
3845 def display(other, chlist, displayer):
3845 if opts.get(b"newest_first"):
3846 if opts.get(b"newest_first"):
3846 chlist.reverse()
3847 chlist.reverse()
3847 count = 0
3848 count = 0
3848 for n in chlist:
3849 for n in chlist:
3849 if limit is not None and count >= limit:
3850 if limit is not None and count >= limit:
3850 break
3851 break
3851 parents = [
3852 parents = [
3852 True for p in other.changelog.parents(n) if p != repo.nullid
3853 True for p in other.changelog.parents(n) if p != repo.nullid
3853 ]
3854 ]
3854 if opts.get(b"no_merges") and len(parents) == 2:
3855 if opts.get(b"no_merges") and len(parents) == 2:
3855 continue
3856 continue
3856 count += 1
3857 count += 1
3857 displayer.show(other[n])
3858 displayer.show(other[n])
3858
3859
3859 recovernode = opts.get(b"recover")
3860 recovernode = opts.get(b"recover")
3860 if recovernode:
3861 if recovernode:
3861 if scmutil.isrevsymbol(repo, recovernode):
3862 if scmutil.isrevsymbol(repo, recovernode):
3862 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 return
3864 return
3864 elif backups:
3865 elif backups:
3865 msg = _(
3866 msg = _(
3866 b"Recover changesets using: hg debugbackupbundle --recover "
3867 b"Recover changesets using: hg debugbackupbundle --recover "
3867 b"<changeset hash>\n\nAvailable backup changesets:"
3868 b"<changeset hash>\n\nAvailable backup changesets:"
3868 )
3869 )
3869 ui.status(msg, label=b"status.removed")
3870 ui.status(msg, label=b"status.removed")
3870 else:
3871 else:
3871 ui.status(_(b"no backup changesets found\n"))
3872 ui.status(_(b"no backup changesets found\n"))
3872 return
3873 return
3873
3874
3874 for backup in backups:
3875 for backup in backups:
3875 # Much of this is copied from the hg incoming logic
3876 # Much of this is copied from the hg incoming logic
3876 source = os.path.relpath(backup, encoding.getcwd())
3877 source = os.path.relpath(backup, encoding.getcwd())
3877 source, branches = urlutil.get_unique_pull_path(
3878 source, branches = urlutil.get_unique_pull_path(
3878 b'debugbackupbundle',
3879 b'debugbackupbundle',
3879 repo,
3880 repo,
3880 ui,
3881 ui,
3881 source,
3882 source,
3882 default_branches=opts.get(b'branch'),
3883 default_branches=opts.get(b'branch'),
3883 )
3884 )
3884 try:
3885 try:
3885 other = hg.peer(repo, opts, source)
3886 other = hg.peer(repo, opts, source)
3886 except error.LookupError as ex:
3887 except error.LookupError as ex:
3887 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 ui.warn(msg, hint=hint)
3890 ui.warn(msg, hint=hint)
3890 continue
3891 continue
3891 revs, checkout = hg.addbranchrevs(
3892 revs, checkout = hg.addbranchrevs(
3892 repo, other, branches, opts.get(b"rev")
3893 repo, other, branches, opts.get(b"rev")
3893 )
3894 )
3894
3895
3895 if revs:
3896 if revs:
3896 revs = [other.lookup(rev) for rev in revs]
3897 revs = [other.lookup(rev) for rev in revs]
3897
3898
3898 with ui.silent():
3899 with ui.silent():
3899 try:
3900 try:
3900 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 )
3903 )
3903 except error.LookupError:
3904 except error.LookupError:
3904 continue
3905 continue
3905
3906
3906 try:
3907 try:
3907 if not chlist:
3908 if not chlist:
3908 continue
3909 continue
3909 if recovernode:
3910 if recovernode:
3910 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 if scmutil.isrevsymbol(other, recovernode):
3912 if scmutil.isrevsymbol(other, recovernode):
3912 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 f = hg.openpath(ui, source)
3914 f = hg.openpath(ui, source)
3914 gen = exchange.readbundle(ui, f, source)
3915 gen = exchange.readbundle(ui, f, source)
3915 if isinstance(gen, bundle2.unbundle20):
3916 if isinstance(gen, bundle2.unbundle20):
3916 bundle2.applybundle(
3917 bundle2.applybundle(
3917 repo,
3918 repo,
3918 gen,
3919 gen,
3919 tr,
3920 tr,
3920 source=b"unbundle",
3921 source=b"unbundle",
3921 url=b"bundle:" + source,
3922 url=b"bundle:" + source,
3922 )
3923 )
3923 else:
3924 else:
3924 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 break
3926 break
3926 else:
3927 else:
3927 backupdate = encoding.strtolocal(
3928 backupdate = encoding.strtolocal(
3928 time.strftime(
3929 time.strftime(
3929 "%a %H:%M, %Y-%m-%d",
3930 "%a %H:%M, %Y-%m-%d",
3930 time.localtime(os.path.getmtime(source)),
3931 time.localtime(os.path.getmtime(source)),
3931 )
3932 )
3932 )
3933 )
3933 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 if ui.verbose:
3935 if ui.verbose:
3935 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 else:
3937 else:
3937 opts[
3938 opts[
3938 b"template"
3939 b"template"
3939 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 displayer = logcmdutil.changesetdisplayer(
3941 displayer = logcmdutil.changesetdisplayer(
3941 ui, other, opts, False
3942 ui, other, opts, False
3942 )
3943 )
3943 display(other, chlist, displayer)
3944 display(other, chlist, displayer)
3944 displayer.close()
3945 displayer.close()
3945 finally:
3946 finally:
3946 cleanupfn()
3947 cleanupfn()
3947
3948
3948
3949
3949 @command(
3950 @command(
3950 b'debugsub',
3951 b'debugsub',
3951 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 _(b'[-r REV] [REV]'),
3953 _(b'[-r REV] [REV]'),
3953 )
3954 )
3954 def debugsub(ui, repo, rev=None):
3955 def debugsub(ui, repo, rev=None):
3955 ctx = scmutil.revsingle(repo, rev, None)
3956 ctx = scmutil.revsingle(repo, rev, None)
3956 for k, v in sorted(ctx.substate.items()):
3957 for k, v in sorted(ctx.substate.items()):
3957 ui.writenoi18n(b'path %s\n' % k)
3958 ui.writenoi18n(b'path %s\n' % k)
3958 ui.writenoi18n(b' source %s\n' % v[0])
3959 ui.writenoi18n(b' source %s\n' % v[0])
3959 ui.writenoi18n(b' revision %s\n' % v[1])
3960 ui.writenoi18n(b' revision %s\n' % v[1])
3960
3961
3961
3962
3962 @command(b'debugshell', optionalrepo=True)
3963 @command(b'debugshell', optionalrepo=True)
3963 def debugshell(ui, repo):
3964 def debugshell(ui, repo):
3964 """run an interactive Python interpreter
3965 """run an interactive Python interpreter
3965
3966
3966 The local namespace is provided with a reference to the ui and
3967 The local namespace is provided with a reference to the ui and
3967 the repo instance (if available).
3968 the repo instance (if available).
3968 """
3969 """
3969 import code
3970 import code
3970
3971
3971 imported_objects = {
3972 imported_objects = {
3972 'ui': ui,
3973 'ui': ui,
3973 'repo': repo,
3974 'repo': repo,
3974 }
3975 }
3975
3976
3976 code.interact(local=imported_objects)
3977 code.interact(local=imported_objects)
3977
3978
3978
3979
3979 @command(
3980 @command(
3980 b'debugsuccessorssets',
3981 b'debugsuccessorssets',
3981 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 _(b'[REV]'),
3983 _(b'[REV]'),
3983 )
3984 )
3984 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 """show set of successors for revision
3986 """show set of successors for revision
3986
3987
3987 A successors set of changeset A is a consistent group of revisions that
3988 A successors set of changeset A is a consistent group of revisions that
3988 succeed A. It contains non-obsolete changesets only unless closests
3989 succeed A. It contains non-obsolete changesets only unless closests
3989 successors set is set.
3990 successors set is set.
3990
3991
3991 In most cases a changeset A has a single successors set containing a single
3992 In most cases a changeset A has a single successors set containing a single
3992 successor (changeset A replaced by A').
3993 successor (changeset A replaced by A').
3993
3994
3994 A changeset that is made obsolete with no successors are called "pruned".
3995 A changeset that is made obsolete with no successors are called "pruned".
3995 Such changesets have no successors sets at all.
3996 Such changesets have no successors sets at all.
3996
3997
3997 A changeset that has been "split" will have a successors set containing
3998 A changeset that has been "split" will have a successors set containing
3998 more than one successor.
3999 more than one successor.
3999
4000
4000 A changeset that has been rewritten in multiple different ways is called
4001 A changeset that has been rewritten in multiple different ways is called
4001 "divergent". Such changesets have multiple successor sets (each of which
4002 "divergent". Such changesets have multiple successor sets (each of which
4002 may also be split, i.e. have multiple successors).
4003 may also be split, i.e. have multiple successors).
4003
4004
4004 Results are displayed as follows::
4005 Results are displayed as follows::
4005
4006
4006 <rev1>
4007 <rev1>
4007 <successors-1A>
4008 <successors-1A>
4008 <rev2>
4009 <rev2>
4009 <successors-2A>
4010 <successors-2A>
4010 <successors-2B1> <successors-2B2> <successors-2B3>
4011 <successors-2B1> <successors-2B2> <successors-2B3>
4011
4012
4012 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 holds one element, whereas the second holds three (i.e. the changeset has
4014 holds one element, whereas the second holds three (i.e. the changeset has
4014 been split).
4015 been split).
4015 """
4016 """
4016 # passed to successorssets caching computation from one call to another
4017 # passed to successorssets caching computation from one call to another
4017 cache = {}
4018 cache = {}
4018 ctx2str = bytes
4019 ctx2str = bytes
4019 node2str = short
4020 node2str = short
4020 for rev in logcmdutil.revrange(repo, revs):
4021 for rev in logcmdutil.revrange(repo, revs):
4021 ctx = repo[rev]
4022 ctx = repo[rev]
4022 ui.write(b'%s\n' % ctx2str(ctx))
4023 ui.write(b'%s\n' % ctx2str(ctx))
4023 for succsset in obsutil.successorssets(
4024 for succsset in obsutil.successorssets(
4024 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 ):
4026 ):
4026 if succsset:
4027 if succsset:
4027 ui.write(b' ')
4028 ui.write(b' ')
4028 ui.write(node2str(succsset[0]))
4029 ui.write(node2str(succsset[0]))
4029 for node in succsset[1:]:
4030 for node in succsset[1:]:
4030 ui.write(b' ')
4031 ui.write(b' ')
4031 ui.write(node2str(node))
4032 ui.write(node2str(node))
4032 ui.write(b'\n')
4033 ui.write(b'\n')
4033
4034
4034
4035
4035 @command(b'debugtagscache', [])
4036 @command(b'debugtagscache', [])
4036 def debugtagscache(ui, repo):
4037 def debugtagscache(ui, repo):
4037 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 flog = repo.file(b'.hgtags')
4040 flog = repo.file(b'.hgtags')
4040 for r in repo:
4041 for r in repo:
4041 node = repo[r].node()
4042 node = repo[r].node()
4042 tagsnode = cache.getfnode(node, computemissing=False)
4043 tagsnode = cache.getfnode(node, computemissing=False)
4043 if tagsnode:
4044 if tagsnode:
4044 tagsnodedisplay = hex(tagsnode)
4045 tagsnodedisplay = hex(tagsnode)
4045 if not flog.hasnode(tagsnode):
4046 if not flog.hasnode(tagsnode):
4046 tagsnodedisplay += b' (unknown node)'
4047 tagsnodedisplay += b' (unknown node)'
4047 elif tagsnode is None:
4048 elif tagsnode is None:
4048 tagsnodedisplay = b'missing'
4049 tagsnodedisplay = b'missing'
4049 else:
4050 else:
4050 tagsnodedisplay = b'invalid'
4051 tagsnodedisplay = b'invalid'
4051
4052
4052 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053
4054
4054
4055
4055 @command(
4056 @command(
4056 b'debugtemplate',
4057 b'debugtemplate',
4057 [
4058 [
4058 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 ],
4061 ],
4061 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 optionalrepo=True,
4063 optionalrepo=True,
4063 )
4064 )
4064 def debugtemplate(ui, repo, tmpl, **opts):
4065 def debugtemplate(ui, repo, tmpl, **opts):
4065 """parse and apply a template
4066 """parse and apply a template
4066
4067
4067 If -r/--rev is given, the template is processed as a log template and
4068 If -r/--rev is given, the template is processed as a log template and
4068 applied to the given changesets. Otherwise, it is processed as a generic
4069 applied to the given changesets. Otherwise, it is processed as a generic
4069 template.
4070 template.
4070
4071
4071 Use --verbose to print the parsed tree.
4072 Use --verbose to print the parsed tree.
4072 """
4073 """
4073 revs = None
4074 revs = None
4074 if opts['rev']:
4075 if opts['rev']:
4075 if repo is None:
4076 if repo is None:
4076 raise error.RepoError(
4077 raise error.RepoError(
4077 _(b'there is no Mercurial repository here (.hg not found)')
4078 _(b'there is no Mercurial repository here (.hg not found)')
4078 )
4079 )
4079 revs = logcmdutil.revrange(repo, opts['rev'])
4080 revs = logcmdutil.revrange(repo, opts['rev'])
4080
4081
4081 props = {}
4082 props = {}
4082 for d in opts['define']:
4083 for d in opts['define']:
4083 try:
4084 try:
4084 k, v = (e.strip() for e in d.split(b'=', 1))
4085 k, v = (e.strip() for e in d.split(b'=', 1))
4085 if not k or k == b'ui':
4086 if not k or k == b'ui':
4086 raise ValueError
4087 raise ValueError
4087 props[k] = v
4088 props[k] = v
4088 except ValueError:
4089 except ValueError:
4089 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090
4091
4091 if ui.verbose:
4092 if ui.verbose:
4092 aliases = ui.configitems(b'templatealias')
4093 aliases = ui.configitems(b'templatealias')
4093 tree = templater.parse(tmpl)
4094 tree = templater.parse(tmpl)
4094 ui.note(templater.prettyformat(tree), b'\n')
4095 ui.note(templater.prettyformat(tree), b'\n')
4095 newtree = templater.expandaliases(tree, aliases)
4096 newtree = templater.expandaliases(tree, aliases)
4096 if newtree != tree:
4097 if newtree != tree:
4097 ui.notenoi18n(
4098 ui.notenoi18n(
4098 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 )
4100 )
4100
4101
4101 if revs is None:
4102 if revs is None:
4102 tres = formatter.templateresources(ui, repo)
4103 tres = formatter.templateresources(ui, repo)
4103 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 if ui.verbose:
4105 if ui.verbose:
4105 kwds, funcs = t.symbolsuseddefault()
4106 kwds, funcs = t.symbolsuseddefault()
4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.write(t.renderdefault(props))
4109 ui.write(t.renderdefault(props))
4109 else:
4110 else:
4110 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 if ui.verbose:
4112 if ui.verbose:
4112 kwds, funcs = displayer.t.symbolsuseddefault()
4113 kwds, funcs = displayer.t.symbolsuseddefault()
4113 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 for r in revs:
4116 for r in revs:
4116 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 displayer.close()
4118 displayer.close()
4118
4119
4119
4120
4120 @command(
4121 @command(
4121 b'debuguigetpass',
4122 b'debuguigetpass',
4122 [
4123 [
4123 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 ],
4125 ],
4125 _(b'[-p TEXT]'),
4126 _(b'[-p TEXT]'),
4126 norepo=True,
4127 norepo=True,
4127 )
4128 )
4128 def debuguigetpass(ui, prompt=b''):
4129 def debuguigetpass(ui, prompt=b''):
4129 """show prompt to type password"""
4130 """show prompt to type password"""
4130 r = ui.getpass(prompt)
4131 r = ui.getpass(prompt)
4131 if r is None:
4132 if r is None:
4132 r = b"<default response>"
4133 r = b"<default response>"
4133 ui.writenoi18n(b'response: %s\n' % r)
4134 ui.writenoi18n(b'response: %s\n' % r)
4134
4135
4135
4136
4136 @command(
4137 @command(
4137 b'debuguiprompt',
4138 b'debuguiprompt',
4138 [
4139 [
4139 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 ],
4141 ],
4141 _(b'[-p TEXT]'),
4142 _(b'[-p TEXT]'),
4142 norepo=True,
4143 norepo=True,
4143 )
4144 )
4144 def debuguiprompt(ui, prompt=b''):
4145 def debuguiprompt(ui, prompt=b''):
4145 """show plain prompt"""
4146 """show plain prompt"""
4146 r = ui.prompt(prompt)
4147 r = ui.prompt(prompt)
4147 ui.writenoi18n(b'response: %s\n' % r)
4148 ui.writenoi18n(b'response: %s\n' % r)
4148
4149
4149
4150
4150 @command(b'debugupdatecaches', [])
4151 @command(b'debugupdatecaches', [])
4151 def debugupdatecaches(ui, repo, *pats, **opts):
4152 def debugupdatecaches(ui, repo, *pats, **opts):
4152 """warm all known caches in the repository"""
4153 """warm all known caches in the repository"""
4153 with repo.wlock(), repo.lock():
4154 with repo.wlock(), repo.lock():
4154 repo.updatecaches(caches=repository.CACHES_ALL)
4155 repo.updatecaches(caches=repository.CACHES_ALL)
4155
4156
4156
4157
4157 @command(
4158 @command(
4158 b'debugupgraderepo',
4159 b'debugupgraderepo',
4159 [
4160 [
4160 (
4161 (
4161 b'o',
4162 b'o',
4162 b'optimize',
4163 b'optimize',
4163 [],
4164 [],
4164 _(b'extra optimization to perform'),
4165 _(b'extra optimization to perform'),
4165 _(b'NAME'),
4166 _(b'NAME'),
4166 ),
4167 ),
4167 (b'', b'run', False, _(b'performs an upgrade')),
4168 (b'', b'run', False, _(b'performs an upgrade')),
4168 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 ],
4173 ],
4173 )
4174 )
4174 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 """upgrade a repository to use different features
4176 """upgrade a repository to use different features
4176
4177
4177 If no arguments are specified, the repository is evaluated for upgrade
4178 If no arguments are specified, the repository is evaluated for upgrade
4178 and a list of problems and potential optimizations is printed.
4179 and a list of problems and potential optimizations is printed.
4179
4180
4180 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 can be influenced via additional arguments. More details will be provided
4182 can be influenced via additional arguments. More details will be provided
4182 by the command output when run without ``--run``.
4183 by the command output when run without ``--run``.
4183
4184
4184 During the upgrade, the repository will be locked and no writes will be
4185 During the upgrade, the repository will be locked and no writes will be
4185 allowed.
4186 allowed.
4186
4187
4187 At the end of the upgrade, the repository may not be readable while new
4188 At the end of the upgrade, the repository may not be readable while new
4188 repository data is swapped in. This window will be as long as it takes to
4189 repository data is swapped in. This window will be as long as it takes to
4189 rename some directories inside the ``.hg`` directory. On most machines, this
4190 rename some directories inside the ``.hg`` directory. On most machines, this
4190 should complete almost instantaneously and the chances of a consumer being
4191 should complete almost instantaneously and the chances of a consumer being
4191 unable to access the repository should be low.
4192 unable to access the repository should be low.
4192
4193
4193 By default, all revlogs will be upgraded. You can restrict this using flags
4194 By default, all revlogs will be upgraded. You can restrict this using flags
4194 such as `--manifest`:
4195 such as `--manifest`:
4195
4196
4196 * `--manifest`: only optimize the manifest
4197 * `--manifest`: only optimize the manifest
4197 * `--no-manifest`: optimize all revlog but the manifest
4198 * `--no-manifest`: optimize all revlog but the manifest
4198 * `--changelog`: optimize the changelog only
4199 * `--changelog`: optimize the changelog only
4199 * `--no-changelog --no-manifest`: optimize filelogs only
4200 * `--no-changelog --no-manifest`: optimize filelogs only
4200 * `--filelogs`: optimize the filelogs only
4201 * `--filelogs`: optimize the filelogs only
4201 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 """
4203 """
4203 return upgrade.upgraderepo(
4204 return upgrade.upgraderepo(
4204 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 )
4206 )
4206
4207
4207
4208
4208 @command(
4209 @command(
4209 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 )
4211 )
4211 def debugwalk(ui, repo, *pats, **opts):
4212 def debugwalk(ui, repo, *pats, **opts):
4212 """show how files match on given patterns"""
4213 """show how files match on given patterns"""
4213 opts = pycompat.byteskwargs(opts)
4214 opts = pycompat.byteskwargs(opts)
4214 m = scmutil.match(repo[None], pats, opts)
4215 m = scmutil.match(repo[None], pats, opts)
4215 if ui.verbose:
4216 if ui.verbose:
4216 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 items = list(repo[None].walk(m))
4218 items = list(repo[None].walk(m))
4218 if not items:
4219 if not items:
4219 return
4220 return
4220 f = lambda fn: fn
4221 f = lambda fn: fn
4221 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 f = lambda fn: util.normpath(fn)
4223 f = lambda fn: util.normpath(fn)
4223 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 max([len(abs) for abs in items]),
4225 max([len(abs) for abs in items]),
4225 max([len(repo.pathto(abs)) for abs in items]),
4226 max([len(repo.pathto(abs)) for abs in items]),
4226 )
4227 )
4227 for abs in items:
4228 for abs in items:
4228 line = fmt % (
4229 line = fmt % (
4229 abs,
4230 abs,
4230 f(repo.pathto(abs)),
4231 f(repo.pathto(abs)),
4231 m.exact(abs) and b'exact' or b'',
4232 m.exact(abs) and b'exact' or b'',
4232 )
4233 )
4233 ui.write(b"%s\n" % line.rstrip())
4234 ui.write(b"%s\n" % line.rstrip())
4234
4235
4235
4236
4236 @command(b'debugwhyunstable', [], _(b'REV'))
4237 @command(b'debugwhyunstable', [], _(b'REV'))
4237 def debugwhyunstable(ui, repo, rev):
4238 def debugwhyunstable(ui, repo, rev):
4238 """explain instabilities of a changeset"""
4239 """explain instabilities of a changeset"""
4239 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 dnodes = b''
4241 dnodes = b''
4241 if entry.get(b'divergentnodes'):
4242 if entry.get(b'divergentnodes'):
4242 dnodes = (
4243 dnodes = (
4243 b' '.join(
4244 b' '.join(
4244 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 for ctx in entry[b'divergentnodes']
4246 for ctx in entry[b'divergentnodes']
4246 )
4247 )
4247 + b' '
4248 + b' '
4248 )
4249 )
4249 ui.write(
4250 ui.write(
4250 b'%s: %s%s %s\n'
4251 b'%s: %s%s %s\n'
4251 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 )
4253 )
4253
4254
4254
4255
4255 @command(
4256 @command(
4256 b'debugwireargs',
4257 b'debugwireargs',
4257 [
4258 [
4258 (b'', b'three', b'', b'three'),
4259 (b'', b'three', b'', b'three'),
4259 (b'', b'four', b'', b'four'),
4260 (b'', b'four', b'', b'four'),
4260 (b'', b'five', b'', b'five'),
4261 (b'', b'five', b'', b'five'),
4261 ]
4262 ]
4262 + cmdutil.remoteopts,
4263 + cmdutil.remoteopts,
4263 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 norepo=True,
4265 norepo=True,
4265 )
4266 )
4266 def debugwireargs(ui, repopath, *vals, **opts):
4267 def debugwireargs(ui, repopath, *vals, **opts):
4267 opts = pycompat.byteskwargs(opts)
4268 opts = pycompat.byteskwargs(opts)
4268 repo = hg.peer(ui, opts, repopath)
4269 repo = hg.peer(ui, opts, repopath)
4269 try:
4270 try:
4270 for opt in cmdutil.remoteopts:
4271 for opt in cmdutil.remoteopts:
4271 del opts[opt[1]]
4272 del opts[opt[1]]
4272 args = {}
4273 args = {}
4273 for k, v in opts.items():
4274 for k, v in opts.items():
4274 if v:
4275 if v:
4275 args[k] = v
4276 args[k] = v
4276 args = pycompat.strkwargs(args)
4277 args = pycompat.strkwargs(args)
4277 # run twice to check that we don't mess up the stream for the next command
4278 # run twice to check that we don't mess up the stream for the next command
4278 res1 = repo.debugwireargs(*vals, **args)
4279 res1 = repo.debugwireargs(*vals, **args)
4279 res2 = repo.debugwireargs(*vals, **args)
4280 res2 = repo.debugwireargs(*vals, **args)
4280 ui.write(b"%s\n" % res1)
4281 ui.write(b"%s\n" % res1)
4281 if res1 != res2:
4282 if res1 != res2:
4282 ui.warn(b"%s\n" % res2)
4283 ui.warn(b"%s\n" % res2)
4283 finally:
4284 finally:
4284 repo.close()
4285 repo.close()
4285
4286
4286
4287
4287 def _parsewirelangblocks(fh):
4288 def _parsewirelangblocks(fh):
4288 activeaction = None
4289 activeaction = None
4289 blocklines = []
4290 blocklines = []
4290 lastindent = 0
4291 lastindent = 0
4291
4292
4292 for line in fh:
4293 for line in fh:
4293 line = line.rstrip()
4294 line = line.rstrip()
4294 if not line:
4295 if not line:
4295 continue
4296 continue
4296
4297
4297 if line.startswith(b'#'):
4298 if line.startswith(b'#'):
4298 continue
4299 continue
4299
4300
4300 if not line.startswith(b' '):
4301 if not line.startswith(b' '):
4301 # New block. Flush previous one.
4302 # New block. Flush previous one.
4302 if activeaction:
4303 if activeaction:
4303 yield activeaction, blocklines
4304 yield activeaction, blocklines
4304
4305
4305 activeaction = line
4306 activeaction = line
4306 blocklines = []
4307 blocklines = []
4307 lastindent = 0
4308 lastindent = 0
4308 continue
4309 continue
4309
4310
4310 # Else we start with an indent.
4311 # Else we start with an indent.
4311
4312
4312 if not activeaction:
4313 if not activeaction:
4313 raise error.Abort(_(b'indented line outside of block'))
4314 raise error.Abort(_(b'indented line outside of block'))
4314
4315
4315 indent = len(line) - len(line.lstrip())
4316 indent = len(line) - len(line.lstrip())
4316
4317
4317 # If this line is indented more than the last line, concatenate it.
4318 # If this line is indented more than the last line, concatenate it.
4318 if indent > lastindent and blocklines:
4319 if indent > lastindent and blocklines:
4319 blocklines[-1] += line.lstrip()
4320 blocklines[-1] += line.lstrip()
4320 else:
4321 else:
4321 blocklines.append(line)
4322 blocklines.append(line)
4322 lastindent = indent
4323 lastindent = indent
4323
4324
4324 # Flush last block.
4325 # Flush last block.
4325 if activeaction:
4326 if activeaction:
4326 yield activeaction, blocklines
4327 yield activeaction, blocklines
4327
4328
4328
4329
4329 @command(
4330 @command(
4330 b'debugwireproto',
4331 b'debugwireproto',
4331 [
4332 [
4332 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 (
4335 (
4335 b'',
4336 b'',
4336 b'noreadstderr',
4337 b'noreadstderr',
4337 False,
4338 False,
4338 _(b'do not read from stderr of the remote'),
4339 _(b'do not read from stderr of the remote'),
4339 ),
4340 ),
4340 (
4341 (
4341 b'',
4342 b'',
4342 b'nologhandshake',
4343 b'nologhandshake',
4343 False,
4344 False,
4344 _(b'do not log I/O related to the peer handshake'),
4345 _(b'do not log I/O related to the peer handshake'),
4345 ),
4346 ),
4346 ]
4347 ]
4347 + cmdutil.remoteopts,
4348 + cmdutil.remoteopts,
4348 _(b'[PATH]'),
4349 _(b'[PATH]'),
4349 optionalrepo=True,
4350 optionalrepo=True,
4350 )
4351 )
4351 def debugwireproto(ui, repo, path=None, **opts):
4352 def debugwireproto(ui, repo, path=None, **opts):
4352 """send wire protocol commands to a server
4353 """send wire protocol commands to a server
4353
4354
4354 This command can be used to issue wire protocol commands to remote
4355 This command can be used to issue wire protocol commands to remote
4355 peers and to debug the raw data being exchanged.
4356 peers and to debug the raw data being exchanged.
4356
4357
4357 ``--localssh`` will start an SSH server against the current repository
4358 ``--localssh`` will start an SSH server against the current repository
4358 and connect to that. By default, the connection will perform a handshake
4359 and connect to that. By default, the connection will perform a handshake
4359 and establish an appropriate peer instance.
4360 and establish an appropriate peer instance.
4360
4361
4361 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 peer instance using the specified class type. Valid values are ``raw``,
4363 peer instance using the specified class type. Valid values are ``raw``,
4363 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 don't support higher-level command actions.
4365 don't support higher-level command actions.
4365
4366
4366 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 of the peer (for SSH connections only). Disabling automatic reading of
4368 of the peer (for SSH connections only). Disabling automatic reading of
4368 stderr is useful for making output more deterministic.
4369 stderr is useful for making output more deterministic.
4369
4370
4370 Commands are issued via a mini language which is specified via stdin.
4371 Commands are issued via a mini language which is specified via stdin.
4371 The language consists of individual actions to perform. An action is
4372 The language consists of individual actions to perform. An action is
4372 defined by a block. A block is defined as a line with no leading
4373 defined by a block. A block is defined as a line with no leading
4373 space followed by 0 or more lines with leading space. Blocks are
4374 space followed by 0 or more lines with leading space. Blocks are
4374 effectively a high-level command with additional metadata.
4375 effectively a high-level command with additional metadata.
4375
4376
4376 Lines beginning with ``#`` are ignored.
4377 Lines beginning with ``#`` are ignored.
4377
4378
4378 The following sections denote available actions.
4379 The following sections denote available actions.
4379
4380
4380 raw
4381 raw
4381 ---
4382 ---
4382
4383
4383 Send raw data to the server.
4384 Send raw data to the server.
4384
4385
4385 The block payload contains the raw data to send as one atomic send
4386 The block payload contains the raw data to send as one atomic send
4386 operation. The data may not actually be delivered in a single system
4387 operation. The data may not actually be delivered in a single system
4387 call: it depends on the abilities of the transport being used.
4388 call: it depends on the abilities of the transport being used.
4388
4389
4389 Each line in the block is de-indented and concatenated. Then, that
4390 Each line in the block is de-indented and concatenated. Then, that
4390 value is evaluated as a Python b'' literal. This allows the use of
4391 value is evaluated as a Python b'' literal. This allows the use of
4391 backslash escaping, etc.
4392 backslash escaping, etc.
4392
4393
4393 raw+
4394 raw+
4394 ----
4395 ----
4395
4396
4396 Behaves like ``raw`` except flushes output afterwards.
4397 Behaves like ``raw`` except flushes output afterwards.
4397
4398
4398 command <X>
4399 command <X>
4399 -----------
4400 -----------
4400
4401
4401 Send a request to run a named command, whose name follows the ``command``
4402 Send a request to run a named command, whose name follows the ``command``
4402 string.
4403 string.
4403
4404
4404 Arguments to the command are defined as lines in this block. The format of
4405 Arguments to the command are defined as lines in this block. The format of
4405 each line is ``<key> <value>``. e.g.::
4406 each line is ``<key> <value>``. e.g.::
4406
4407
4407 command listkeys
4408 command listkeys
4408 namespace bookmarks
4409 namespace bookmarks
4409
4410
4410 If the value begins with ``eval:``, it will be interpreted as a Python
4411 If the value begins with ``eval:``, it will be interpreted as a Python
4411 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 This allows sending complex types and encoding special byte sequences via
4413 This allows sending complex types and encoding special byte sequences via
4413 backslash escaping.
4414 backslash escaping.
4414
4415
4415 The following arguments have special meaning:
4416 The following arguments have special meaning:
4416
4417
4417 ``PUSHFILE``
4418 ``PUSHFILE``
4418 When defined, the *push* mechanism of the peer will be used instead
4419 When defined, the *push* mechanism of the peer will be used instead
4419 of the static request-response mechanism and the content of the
4420 of the static request-response mechanism and the content of the
4420 file specified in the value of this argument will be sent as the
4421 file specified in the value of this argument will be sent as the
4421 command payload.
4422 command payload.
4422
4423
4423 This can be used to submit a local bundle file to the remote.
4424 This can be used to submit a local bundle file to the remote.
4424
4425
4425 batchbegin
4426 batchbegin
4426 ----------
4427 ----------
4427
4428
4428 Instruct the peer to begin a batched send.
4429 Instruct the peer to begin a batched send.
4429
4430
4430 All ``command`` blocks are queued for execution until the next
4431 All ``command`` blocks are queued for execution until the next
4431 ``batchsubmit`` block.
4432 ``batchsubmit`` block.
4432
4433
4433 batchsubmit
4434 batchsubmit
4434 -----------
4435 -----------
4435
4436
4436 Submit previously queued ``command`` blocks as a batch request.
4437 Submit previously queued ``command`` blocks as a batch request.
4437
4438
4438 This action MUST be paired with a ``batchbegin`` action.
4439 This action MUST be paired with a ``batchbegin`` action.
4439
4440
4440 httprequest <method> <path>
4441 httprequest <method> <path>
4441 ---------------------------
4442 ---------------------------
4442
4443
4443 (HTTP peer only)
4444 (HTTP peer only)
4444
4445
4445 Send an HTTP request to the peer.
4446 Send an HTTP request to the peer.
4446
4447
4447 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448
4449
4449 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 headers to add to the request. e.g. ``Accept: foo``.
4451 headers to add to the request. e.g. ``Accept: foo``.
4451
4452
4452 The following arguments are special:
4453 The following arguments are special:
4453
4454
4454 ``BODYFILE``
4455 ``BODYFILE``
4455 The content of the file defined as the value to this argument will be
4456 The content of the file defined as the value to this argument will be
4456 transferred verbatim as the HTTP request body.
4457 transferred verbatim as the HTTP request body.
4457
4458
4458 ``frame <type> <flags> <payload>``
4459 ``frame <type> <flags> <payload>``
4459 Send a unified protocol frame as part of the request body.
4460 Send a unified protocol frame as part of the request body.
4460
4461
4461 All frames will be collected and sent as the body to the HTTP
4462 All frames will be collected and sent as the body to the HTTP
4462 request.
4463 request.
4463
4464
4464 close
4465 close
4465 -----
4466 -----
4466
4467
4467 Close the connection to the server.
4468 Close the connection to the server.
4468
4469
4469 flush
4470 flush
4470 -----
4471 -----
4471
4472
4472 Flush data written to the server.
4473 Flush data written to the server.
4473
4474
4474 readavailable
4475 readavailable
4475 -------------
4476 -------------
4476
4477
4477 Close the write end of the connection and read all available data from
4478 Close the write end of the connection and read all available data from
4478 the server.
4479 the server.
4479
4480
4480 If the connection to the server encompasses multiple pipes, we poll both
4481 If the connection to the server encompasses multiple pipes, we poll both
4481 pipes and read available data.
4482 pipes and read available data.
4482
4483
4483 readline
4484 readline
4484 --------
4485 --------
4485
4486
4486 Read a line of output from the server. If there are multiple output
4487 Read a line of output from the server. If there are multiple output
4487 pipes, reads only the main pipe.
4488 pipes, reads only the main pipe.
4488
4489
4489 ereadline
4490 ereadline
4490 ---------
4491 ---------
4491
4492
4492 Like ``readline``, but read from the stderr pipe, if available.
4493 Like ``readline``, but read from the stderr pipe, if available.
4493
4494
4494 read <X>
4495 read <X>
4495 --------
4496 --------
4496
4497
4497 ``read()`` N bytes from the server's main output pipe.
4498 ``read()`` N bytes from the server's main output pipe.
4498
4499
4499 eread <X>
4500 eread <X>
4500 ---------
4501 ---------
4501
4502
4502 ``read()`` N bytes from the server's stderr pipe, if available.
4503 ``read()`` N bytes from the server's stderr pipe, if available.
4503
4504
4504 Specifying Unified Frame-Based Protocol Frames
4505 Specifying Unified Frame-Based Protocol Frames
4505 ----------------------------------------------
4506 ----------------------------------------------
4506
4507
4507 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 syntax.
4509 syntax.
4509
4510
4510 A frame is composed as a type, flags, and payload. These can be parsed
4511 A frame is composed as a type, flags, and payload. These can be parsed
4511 from a string of the form:
4512 from a string of the form:
4512
4513
4513 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514
4515
4515 ``request-id`` and ``stream-id`` are integers defining the request and
4516 ``request-id`` and ``stream-id`` are integers defining the request and
4516 stream identifiers.
4517 stream identifiers.
4517
4518
4518 ``type`` can be an integer value for the frame type or the string name
4519 ``type`` can be an integer value for the frame type or the string name
4519 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 ``command-name``.
4521 ``command-name``.
4521
4522
4522 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 components. Each component (and there can be just one) can be an integer
4524 components. Each component (and there can be just one) can be an integer
4524 or a flag name for stream flags or frame flags, respectively. Values are
4525 or a flag name for stream flags or frame flags, respectively. Values are
4525 resolved to integers and then bitwise OR'd together.
4526 resolved to integers and then bitwise OR'd together.
4526
4527
4527 ``payload`` represents the raw frame payload. If it begins with
4528 ``payload`` represents the raw frame payload. If it begins with
4528 ``cbor:``, the following string is evaluated as Python code and the
4529 ``cbor:``, the following string is evaluated as Python code and the
4529 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 as a Python byte string literal.
4531 as a Python byte string literal.
4531 """
4532 """
4532 opts = pycompat.byteskwargs(opts)
4533 opts = pycompat.byteskwargs(opts)
4533
4534
4534 if opts[b'localssh'] and not repo:
4535 if opts[b'localssh'] and not repo:
4535 raise error.Abort(_(b'--localssh requires a repository'))
4536 raise error.Abort(_(b'--localssh requires a repository'))
4536
4537
4537 if opts[b'peer'] and opts[b'peer'] not in (
4538 if opts[b'peer'] and opts[b'peer'] not in (
4538 b'raw',
4539 b'raw',
4539 b'ssh1',
4540 b'ssh1',
4540 ):
4541 ):
4541 raise error.Abort(
4542 raise error.Abort(
4542 _(b'invalid value for --peer'),
4543 _(b'invalid value for --peer'),
4543 hint=_(b'valid values are "raw" and "ssh1"'),
4544 hint=_(b'valid values are "raw" and "ssh1"'),
4544 )
4545 )
4545
4546
4546 if path and opts[b'localssh']:
4547 if path and opts[b'localssh']:
4547 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548
4549
4549 if ui.interactive():
4550 if ui.interactive():
4550 ui.write(_(b'(waiting for commands on stdin)\n'))
4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4551
4552
4552 blocks = list(_parsewirelangblocks(ui.fin))
4553 blocks = list(_parsewirelangblocks(ui.fin))
4553
4554
4554 proc = None
4555 proc = None
4555 stdin = None
4556 stdin = None
4556 stdout = None
4557 stdout = None
4557 stderr = None
4558 stderr = None
4558 opener = None
4559 opener = None
4559
4560
4560 if opts[b'localssh']:
4561 if opts[b'localssh']:
4561 # We start the SSH server in its own process so there is process
4562 # We start the SSH server in its own process so there is process
4562 # separation. This prevents a whole class of potential bugs around
4563 # separation. This prevents a whole class of potential bugs around
4563 # shared state from interfering with server operation.
4564 # shared state from interfering with server operation.
4564 args = procutil.hgcmd() + [
4565 args = procutil.hgcmd() + [
4565 b'-R',
4566 b'-R',
4566 repo.root,
4567 repo.root,
4567 b'debugserve',
4568 b'debugserve',
4568 b'--sshstdio',
4569 b'--sshstdio',
4569 ]
4570 ]
4570 proc = subprocess.Popen(
4571 proc = subprocess.Popen(
4571 pycompat.rapply(procutil.tonativestr, args),
4572 pycompat.rapply(procutil.tonativestr, args),
4572 stdin=subprocess.PIPE,
4573 stdin=subprocess.PIPE,
4573 stdout=subprocess.PIPE,
4574 stdout=subprocess.PIPE,
4574 stderr=subprocess.PIPE,
4575 stderr=subprocess.PIPE,
4575 bufsize=0,
4576 bufsize=0,
4576 )
4577 )
4577
4578
4578 stdin = proc.stdin
4579 stdin = proc.stdin
4579 stdout = proc.stdout
4580 stdout = proc.stdout
4580 stderr = proc.stderr
4581 stderr = proc.stderr
4581
4582
4582 # We turn the pipes into observers so we can log I/O.
4583 # We turn the pipes into observers so we can log I/O.
4583 if ui.verbose or opts[b'peer'] == b'raw':
4584 if ui.verbose or opts[b'peer'] == b'raw':
4584 stdin = util.makeloggingfileobject(
4585 stdin = util.makeloggingfileobject(
4585 ui, proc.stdin, b'i', logdata=True
4586 ui, proc.stdin, b'i', logdata=True
4586 )
4587 )
4587 stdout = util.makeloggingfileobject(
4588 stdout = util.makeloggingfileobject(
4588 ui, proc.stdout, b'o', logdata=True
4589 ui, proc.stdout, b'o', logdata=True
4589 )
4590 )
4590 stderr = util.makeloggingfileobject(
4591 stderr = util.makeloggingfileobject(
4591 ui, proc.stderr, b'e', logdata=True
4592 ui, proc.stderr, b'e', logdata=True
4592 )
4593 )
4593
4594
4594 # --localssh also implies the peer connection settings.
4595 # --localssh also implies the peer connection settings.
4595
4596
4596 url = b'ssh://localserver'
4597 url = b'ssh://localserver'
4597 autoreadstderr = not opts[b'noreadstderr']
4598 autoreadstderr = not opts[b'noreadstderr']
4598
4599
4599 if opts[b'peer'] == b'ssh1':
4600 if opts[b'peer'] == b'ssh1':
4600 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 peer = sshpeer.sshv1peer(
4602 peer = sshpeer.sshv1peer(
4602 ui,
4603 ui,
4603 url,
4604 url,
4604 proc,
4605 proc,
4605 stdin,
4606 stdin,
4606 stdout,
4607 stdout,
4607 stderr,
4608 stderr,
4608 None,
4609 None,
4609 autoreadstderr=autoreadstderr,
4610 autoreadstderr=autoreadstderr,
4610 )
4611 )
4611 elif opts[b'peer'] == b'raw':
4612 elif opts[b'peer'] == b'raw':
4612 ui.write(_(b'using raw connection to peer\n'))
4613 ui.write(_(b'using raw connection to peer\n'))
4613 peer = None
4614 peer = None
4614 else:
4615 else:
4615 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 peer = sshpeer.makepeer(
4617 peer = sshpeer.makepeer(
4617 ui,
4618 ui,
4618 url,
4619 url,
4619 proc,
4620 proc,
4620 stdin,
4621 stdin,
4621 stdout,
4622 stdout,
4622 stderr,
4623 stderr,
4623 autoreadstderr=autoreadstderr,
4624 autoreadstderr=autoreadstderr,
4624 )
4625 )
4625
4626
4626 elif path:
4627 elif path:
4627 # We bypass hg.peer() so we can proxy the sockets.
4628 # We bypass hg.peer() so we can proxy the sockets.
4628 # TODO consider not doing this because we skip
4629 # TODO consider not doing this because we skip
4629 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 u = urlutil.url(path)
4631 u = urlutil.url(path)
4631 if u.scheme != b'http':
4632 if u.scheme != b'http':
4632 raise error.Abort(_(b'only http:// paths are currently supported'))
4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4633
4634
4634 url, authinfo = u.authinfo()
4635 url, authinfo = u.authinfo()
4635 openerargs = {
4636 openerargs = {
4636 'useragent': b'Mercurial debugwireproto',
4637 'useragent': b'Mercurial debugwireproto',
4637 }
4638 }
4638
4639
4639 # Turn pipes/sockets into observers so we can log I/O.
4640 # Turn pipes/sockets into observers so we can log I/O.
4640 if ui.verbose:
4641 if ui.verbose:
4641 openerargs.update(
4642 openerargs.update(
4642 {
4643 {
4643 'loggingfh': ui,
4644 'loggingfh': ui,
4644 'loggingname': b's',
4645 'loggingname': b's',
4645 'loggingopts': {
4646 'loggingopts': {
4646 'logdata': True,
4647 'logdata': True,
4647 'logdataapis': False,
4648 'logdataapis': False,
4648 },
4649 },
4649 }
4650 }
4650 )
4651 )
4651
4652
4652 if ui.debugflag:
4653 if ui.debugflag:
4653 openerargs['loggingopts']['logdataapis'] = True
4654 openerargs['loggingopts']['logdataapis'] = True
4654
4655
4655 # Don't send default headers when in raw mode. This allows us to
4656 # Don't send default headers when in raw mode. This allows us to
4656 # bypass most of the behavior of our URL handling code so we can
4657 # bypass most of the behavior of our URL handling code so we can
4657 # have near complete control over what's sent on the wire.
4658 # have near complete control over what's sent on the wire.
4658 if opts[b'peer'] == b'raw':
4659 if opts[b'peer'] == b'raw':
4659 openerargs['sendaccept'] = False
4660 openerargs['sendaccept'] = False
4660
4661
4661 opener = urlmod.opener(ui, authinfo, **openerargs)
4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4662
4663
4663 if opts[b'peer'] == b'raw':
4664 if opts[b'peer'] == b'raw':
4664 ui.write(_(b'using raw connection to peer\n'))
4665 ui.write(_(b'using raw connection to peer\n'))
4665 peer = None
4666 peer = None
4666 elif opts[b'peer']:
4667 elif opts[b'peer']:
4667 raise error.Abort(
4668 raise error.Abort(
4668 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 )
4670 )
4670 else:
4671 else:
4671 peer = httppeer.makepeer(ui, path, opener=opener)
4672 peer = httppeer.makepeer(ui, path, opener=opener)
4672
4673
4673 # We /could/ populate stdin/stdout with sock.makefile()...
4674 # We /could/ populate stdin/stdout with sock.makefile()...
4674 else:
4675 else:
4675 raise error.Abort(_(b'unsupported connection configuration'))
4676 raise error.Abort(_(b'unsupported connection configuration'))
4676
4677
4677 batchedcommands = None
4678 batchedcommands = None
4678
4679
4679 # Now perform actions based on the parsed wire language instructions.
4680 # Now perform actions based on the parsed wire language instructions.
4680 for action, lines in blocks:
4681 for action, lines in blocks:
4681 if action in (b'raw', b'raw+'):
4682 if action in (b'raw', b'raw+'):
4682 if not stdin:
4683 if not stdin:
4683 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684
4685
4685 # Concatenate the data together.
4686 # Concatenate the data together.
4686 data = b''.join(l.lstrip() for l in lines)
4687 data = b''.join(l.lstrip() for l in lines)
4687 data = stringutil.unescapestr(data)
4688 data = stringutil.unescapestr(data)
4688 stdin.write(data)
4689 stdin.write(data)
4689
4690
4690 if action == b'raw+':
4691 if action == b'raw+':
4691 stdin.flush()
4692 stdin.flush()
4692 elif action == b'flush':
4693 elif action == b'flush':
4693 if not stdin:
4694 if not stdin:
4694 raise error.Abort(_(b'cannot call flush on this peer'))
4695 raise error.Abort(_(b'cannot call flush on this peer'))
4695 stdin.flush()
4696 stdin.flush()
4696 elif action.startswith(b'command'):
4697 elif action.startswith(b'command'):
4697 if not peer:
4698 if not peer:
4698 raise error.Abort(
4699 raise error.Abort(
4699 _(
4700 _(
4700 b'cannot send commands unless peer instance '
4701 b'cannot send commands unless peer instance '
4701 b'is available'
4702 b'is available'
4702 )
4703 )
4703 )
4704 )
4704
4705
4705 command = action.split(b' ', 1)[1]
4706 command = action.split(b' ', 1)[1]
4706
4707
4707 args = {}
4708 args = {}
4708 for line in lines:
4709 for line in lines:
4709 # We need to allow empty values.
4710 # We need to allow empty values.
4710 fields = line.lstrip().split(b' ', 1)
4711 fields = line.lstrip().split(b' ', 1)
4711 if len(fields) == 1:
4712 if len(fields) == 1:
4712 key = fields[0]
4713 key = fields[0]
4713 value = b''
4714 value = b''
4714 else:
4715 else:
4715 key, value = fields
4716 key, value = fields
4716
4717
4717 if value.startswith(b'eval:'):
4718 if value.startswith(b'eval:'):
4718 value = stringutil.evalpythonliteral(value[5:])
4719 value = stringutil.evalpythonliteral(value[5:])
4719 else:
4720 else:
4720 value = stringutil.unescapestr(value)
4721 value = stringutil.unescapestr(value)
4721
4722
4722 args[key] = value
4723 args[key] = value
4723
4724
4724 if batchedcommands is not None:
4725 if batchedcommands is not None:
4725 batchedcommands.append((command, args))
4726 batchedcommands.append((command, args))
4726 continue
4727 continue
4727
4728
4728 ui.status(_(b'sending %s command\n') % command)
4729 ui.status(_(b'sending %s command\n') % command)
4729
4730
4730 if b'PUSHFILE' in args:
4731 if b'PUSHFILE' in args:
4731 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 del args[b'PUSHFILE']
4733 del args[b'PUSHFILE']
4733 res, output = peer._callpush(
4734 res, output = peer._callpush(
4734 command, fh, **pycompat.strkwargs(args)
4735 command, fh, **pycompat.strkwargs(args)
4735 )
4736 )
4736 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 ui.status(
4738 ui.status(
4738 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 )
4740 )
4740 else:
4741 else:
4741 with peer.commandexecutor() as e:
4742 with peer.commandexecutor() as e:
4742 res = e.callcommand(command, args).result()
4743 res = e.callcommand(command, args).result()
4743
4744
4744 ui.status(
4745 ui.status(
4745 _(b'response: %s\n')
4746 _(b'response: %s\n')
4746 % stringutil.pprint(res, bprefix=True, indent=2)
4747 % stringutil.pprint(res, bprefix=True, indent=2)
4747 )
4748 )
4748
4749
4749 elif action == b'batchbegin':
4750 elif action == b'batchbegin':
4750 if batchedcommands is not None:
4751 if batchedcommands is not None:
4751 raise error.Abort(_(b'nested batchbegin not allowed'))
4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4752
4753
4753 batchedcommands = []
4754 batchedcommands = []
4754 elif action == b'batchsubmit':
4755 elif action == b'batchsubmit':
4755 # There is a batching API we could go through. But it would be
4756 # There is a batching API we could go through. But it would be
4756 # difficult to normalize requests into function calls. It is easier
4757 # difficult to normalize requests into function calls. It is easier
4757 # to bypass this layer and normalize to commands + args.
4758 # to bypass this layer and normalize to commands + args.
4758 ui.status(
4759 ui.status(
4759 _(b'sending batch with %d sub-commands\n')
4760 _(b'sending batch with %d sub-commands\n')
4760 % len(batchedcommands)
4761 % len(batchedcommands)
4761 )
4762 )
4762 assert peer is not None
4763 assert peer is not None
4763 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 ui.status(
4765 ui.status(
4765 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 )
4767 )
4767
4768
4768 batchedcommands = None
4769 batchedcommands = None
4769
4770
4770 elif action.startswith(b'httprequest '):
4771 elif action.startswith(b'httprequest '):
4771 if not opener:
4772 if not opener:
4772 raise error.Abort(
4773 raise error.Abort(
4773 _(b'cannot use httprequest without an HTTP peer')
4774 _(b'cannot use httprequest without an HTTP peer')
4774 )
4775 )
4775
4776
4776 request = action.split(b' ', 2)
4777 request = action.split(b' ', 2)
4777 if len(request) != 3:
4778 if len(request) != 3:
4778 raise error.Abort(
4779 raise error.Abort(
4779 _(
4780 _(
4780 b'invalid httprequest: expected format is '
4781 b'invalid httprequest: expected format is '
4781 b'"httprequest <method> <path>'
4782 b'"httprequest <method> <path>'
4782 )
4783 )
4783 )
4784 )
4784
4785
4785 method, httppath = request[1:]
4786 method, httppath = request[1:]
4786 headers = {}
4787 headers = {}
4787 body = None
4788 body = None
4788 frames = []
4789 frames = []
4789 for line in lines:
4790 for line in lines:
4790 line = line.lstrip()
4791 line = line.lstrip()
4791 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 if m:
4793 if m:
4793 # Headers need to use native strings.
4794 # Headers need to use native strings.
4794 key = pycompat.strurl(m.group(1))
4795 key = pycompat.strurl(m.group(1))
4795 value = pycompat.strurl(m.group(2))
4796 value = pycompat.strurl(m.group(2))
4796 headers[key] = value
4797 headers[key] = value
4797 continue
4798 continue
4798
4799
4799 if line.startswith(b'BODYFILE '):
4800 if line.startswith(b'BODYFILE '):
4800 with open(line.split(b' ', 1), b'rb') as fh:
4801 with open(line.split(b' ', 1), b'rb') as fh:
4801 body = fh.read()
4802 body = fh.read()
4802 elif line.startswith(b'frame '):
4803 elif line.startswith(b'frame '):
4803 frame = wireprotoframing.makeframefromhumanstring(
4804 frame = wireprotoframing.makeframefromhumanstring(
4804 line[len(b'frame ') :]
4805 line[len(b'frame ') :]
4805 )
4806 )
4806
4807
4807 frames.append(frame)
4808 frames.append(frame)
4808 else:
4809 else:
4809 raise error.Abort(
4810 raise error.Abort(
4810 _(b'unknown argument to httprequest: %s') % line
4811 _(b'unknown argument to httprequest: %s') % line
4811 )
4812 )
4812
4813
4813 url = path + httppath
4814 url = path + httppath
4814
4815
4815 if frames:
4816 if frames:
4816 body = b''.join(bytes(f) for f in frames)
4817 body = b''.join(bytes(f) for f in frames)
4817
4818
4818 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819
4820
4820 # urllib.Request insists on using has_data() as a proxy for
4821 # urllib.Request insists on using has_data() as a proxy for
4821 # determining the request method. Override that to use our
4822 # determining the request method. Override that to use our
4822 # explicitly requested method.
4823 # explicitly requested method.
4823 req.get_method = lambda: pycompat.sysstr(method)
4824 req.get_method = lambda: pycompat.sysstr(method)
4824
4825
4825 try:
4826 try:
4826 res = opener.open(req)
4827 res = opener.open(req)
4827 body = res.read()
4828 body = res.read()
4828 except util.urlerr.urlerror as e:
4829 except util.urlerr.urlerror as e:
4829 # read() method must be called, but only exists in Python 2
4830 # read() method must be called, but only exists in Python 2
4830 getattr(e, 'read', lambda: None)()
4831 getattr(e, 'read', lambda: None)()
4831 continue
4832 continue
4832
4833
4833 ct = res.headers.get('Content-Type')
4834 ct = res.headers.get('Content-Type')
4834 if ct == 'application/mercurial-cbor':
4835 if ct == 'application/mercurial-cbor':
4835 ui.write(
4836 ui.write(
4836 _(b'cbor> %s\n')
4837 _(b'cbor> %s\n')
4837 % stringutil.pprint(
4838 % stringutil.pprint(
4838 cborutil.decodeall(body), bprefix=True, indent=2
4839 cborutil.decodeall(body), bprefix=True, indent=2
4839 )
4840 )
4840 )
4841 )
4841
4842
4842 elif action == b'close':
4843 elif action == b'close':
4843 assert peer is not None
4844 assert peer is not None
4844 peer.close()
4845 peer.close()
4845 elif action == b'readavailable':
4846 elif action == b'readavailable':
4846 if not stdout or not stderr:
4847 if not stdout or not stderr:
4847 raise error.Abort(
4848 raise error.Abort(
4848 _(b'readavailable not available on this peer')
4849 _(b'readavailable not available on this peer')
4849 )
4850 )
4850
4851
4851 stdin.close()
4852 stdin.close()
4852 stdout.read()
4853 stdout.read()
4853 stderr.read()
4854 stderr.read()
4854
4855
4855 elif action == b'readline':
4856 elif action == b'readline':
4856 if not stdout:
4857 if not stdout:
4857 raise error.Abort(_(b'readline not available on this peer'))
4858 raise error.Abort(_(b'readline not available on this peer'))
4858 stdout.readline()
4859 stdout.readline()
4859 elif action == b'ereadline':
4860 elif action == b'ereadline':
4860 if not stderr:
4861 if not stderr:
4861 raise error.Abort(_(b'ereadline not available on this peer'))
4862 raise error.Abort(_(b'ereadline not available on this peer'))
4862 stderr.readline()
4863 stderr.readline()
4863 elif action.startswith(b'read '):
4864 elif action.startswith(b'read '):
4864 count = int(action.split(b' ', 1)[1])
4865 count = int(action.split(b' ', 1)[1])
4865 if not stdout:
4866 if not stdout:
4866 raise error.Abort(_(b'read not available on this peer'))
4867 raise error.Abort(_(b'read not available on this peer'))
4867 stdout.read(count)
4868 stdout.read(count)
4868 elif action.startswith(b'eread '):
4869 elif action.startswith(b'eread '):
4869 count = int(action.split(b' ', 1)[1])
4870 count = int(action.split(b' ', 1)[1])
4870 if not stderr:
4871 if not stderr:
4871 raise error.Abort(_(b'eread not available on this peer'))
4872 raise error.Abort(_(b'eread not available on this peer'))
4872 stderr.read(count)
4873 stderr.read(count)
4873 else:
4874 else:
4874 raise error.Abort(_(b'unknown action: %s') % action)
4875 raise error.Abort(_(b'unknown action: %s') % action)
4875
4876
4876 if batchedcommands is not None:
4877 if batchedcommands is not None:
4877 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878
4879
4879 if peer:
4880 if peer:
4880 peer.close()
4881 peer.close()
4881
4882
4882 if proc:
4883 if proc:
4883 proc.kill()
4884 proc.kill()
@@ -1,519 +1,526 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """
8 """
9 Algorithm works in the following way. You have two repository: local and
9 Algorithm works in the following way. You have two repository: local and
10 remote. They both contains a DAG of changelists.
10 remote. They both contains a DAG of changelists.
11
11
12 The goal of the discovery protocol is to find one set of node *common*,
12 The goal of the discovery protocol is to find one set of node *common*,
13 the set of nodes shared by local and remote.
13 the set of nodes shared by local and remote.
14
14
15 One of the issue with the original protocol was latency, it could
15 One of the issue with the original protocol was latency, it could
16 potentially require lots of roundtrips to discover that the local repo was a
16 potentially require lots of roundtrips to discover that the local repo was a
17 subset of remote (which is a very common case, you usually have few changes
17 subset of remote (which is a very common case, you usually have few changes
18 compared to upstream, while upstream probably had lots of development).
18 compared to upstream, while upstream probably had lots of development).
19
19
20 The new protocol only requires one interface for the remote repo: `known()`,
20 The new protocol only requires one interface for the remote repo: `known()`,
21 which given a set of changelists tells you if they are present in the DAG.
21 which given a set of changelists tells you if they are present in the DAG.
22
22
23 The algorithm then works as follow:
23 The algorithm then works as follow:
24
24
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 all nodes are in `unknown`.
26 all nodes are in `unknown`.
27 - Take a sample from `unknown`, call `remote.known(sample)`
27 - Take a sample from `unknown`, call `remote.known(sample)`
28 - For each node that remote knows, move it and all its ancestors to `common`
28 - For each node that remote knows, move it and all its ancestors to `common`
29 - For each node that remote doesn't know, move it and all its descendants
29 - For each node that remote doesn't know, move it and all its descendants
30 to `missing`
30 to `missing`
31 - Iterate until `unknown` is empty
31 - Iterate until `unknown` is empty
32
32
33 There are a couple optimizations, first is instead of starting with a random
33 There are a couple optimizations, first is instead of starting with a random
34 sample of missing, start by sending all heads, in the case where the local
34 sample of missing, start by sending all heads, in the case where the local
35 repo is a subset, you computed the answer in one round trip.
35 repo is a subset, you computed the answer in one round trip.
36
36
37 Then you can do something similar to the bisecting strategy used when
37 Then you can do something similar to the bisecting strategy used when
38 finding faulty changesets. Instead of random samples, you can try picking
38 finding faulty changesets. Instead of random samples, you can try picking
39 nodes that will maximize the number of nodes that will be
39 nodes that will maximize the number of nodes that will be
40 classified with it (since all ancestors or descendants will be marked as well).
40 classified with it (since all ancestors or descendants will be marked as well).
41 """
41 """
42
42
43
43
44 import collections
44 import collections
45 import random
45 import random
46
46
47 from .i18n import _
47 from .i18n import _
48 from .node import nullrev
48 from .node import nullrev
49 from . import (
49 from . import (
50 error,
50 error,
51 policy,
51 policy,
52 util,
52 util,
53 )
53 )
54
54
55
55
56 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
56 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
57 """update an existing sample to match the expected size
57 """update an existing sample to match the expected size
58
58
59 The sample is updated with revs exponentially distant from each head of the
59 The sample is updated with revs exponentially distant from each head of the
60 <revs> set. (H~1, H~2, H~4, H~8, etc).
60 <revs> set. (H~1, H~2, H~4, H~8, etc).
61
61
62 If a target size is specified, the sampling will stop once this size is
62 If a target size is specified, the sampling will stop once this size is
63 reached. Otherwise sampling will happen until roots of the <revs> set are
63 reached. Otherwise sampling will happen until roots of the <revs> set are
64 reached.
64 reached.
65
65
66 :revs: set of revs we want to discover (if None, assume the whole dag)
66 :revs: set of revs we want to discover (if None, assume the whole dag)
67 :heads: set of DAG head revs
67 :heads: set of DAG head revs
68 :sample: a sample to update
68 :sample: a sample to update
69 :parentfn: a callable to resolve parents for a revision
69 :parentfn: a callable to resolve parents for a revision
70 :quicksamplesize: optional target size of the sample"""
70 :quicksamplesize: optional target size of the sample"""
71 dist = {}
71 dist = {}
72 visit = collections.deque(heads)
72 visit = collections.deque(heads)
73 seen = set()
73 seen = set()
74 factor = 1
74 factor = 1
75 while visit:
75 while visit:
76 curr = visit.popleft()
76 curr = visit.popleft()
77 if curr in seen:
77 if curr in seen:
78 continue
78 continue
79 d = dist.setdefault(curr, 1)
79 d = dist.setdefault(curr, 1)
80 if d > factor:
80 if d > factor:
81 factor *= 2
81 factor *= 2
82 if d == factor:
82 if d == factor:
83 sample.add(curr)
83 sample.add(curr)
84 if quicksamplesize and (len(sample) >= quicksamplesize):
84 if quicksamplesize and (len(sample) >= quicksamplesize):
85 return
85 return
86 seen.add(curr)
86 seen.add(curr)
87
87
88 for p in parentfn(curr):
88 for p in parentfn(curr):
89 if p != nullrev and (not revs or p in revs):
89 if p != nullrev and (not revs or p in revs):
90 dist.setdefault(p, d + 1)
90 dist.setdefault(p, d + 1)
91 visit.append(p)
91 visit.append(p)
92
92
93
93
94 def _limitsample(sample, desiredlen, randomize=True):
94 def _limitsample(sample, desiredlen, randomize=True):
95 """return a random subset of sample of at most desiredlen item.
95 """return a random subset of sample of at most desiredlen item.
96
96
97 If randomize is False, though, a deterministic subset is returned.
97 If randomize is False, though, a deterministic subset is returned.
98 This is meant for integration tests.
98 This is meant for integration tests.
99 """
99 """
100 if len(sample) <= desiredlen:
100 if len(sample) <= desiredlen:
101 return sample
101 return sample
102 if randomize:
102 if randomize:
103 return set(random.sample(sample, desiredlen))
103 return set(random.sample(sample, desiredlen))
104 sample = list(sample)
104 sample = list(sample)
105 sample.sort()
105 sample.sort()
106 return set(sample[:desiredlen])
106 return set(sample[:desiredlen])
107
107
108
108
109 class partialdiscovery:
109 class partialdiscovery:
110 """an object representing ongoing discovery
110 """an object representing ongoing discovery
111
111
112 Feed with data from the remote repository, this object keep track of the
112 Feed with data from the remote repository, this object keep track of the
113 current set of changeset in various states:
113 current set of changeset in various states:
114
114
115 - common: revs also known remotely
115 - common: revs also known remotely
116 - undecided: revs we don't have information on yet
116 - undecided: revs we don't have information on yet
117 - missing: revs missing remotely
117 - missing: revs missing remotely
118 (all tracked revisions are known locally)
118 (all tracked revisions are known locally)
119 """
119 """
120
120
121 def __init__(self, repo, targetheads, respectsize, randomize=True):
121 def __init__(self, repo, targetheads, respectsize, randomize=True):
122 self._repo = repo
122 self._repo = repo
123 self._targetheads = targetheads
123 self._targetheads = targetheads
124 self._common = repo.changelog.incrementalmissingrevs()
124 self._common = repo.changelog.incrementalmissingrevs()
125 self._undecided = None
125 self._undecided = None
126 self.missing = set()
126 self.missing = set()
127 self._childrenmap = None
127 self._childrenmap = None
128 self._respectsize = respectsize
128 self._respectsize = respectsize
129 self.randomize = randomize
129 self.randomize = randomize
130
130
131 def addcommons(self, commons):
131 def addcommons(self, commons):
132 """register nodes known as common"""
132 """register nodes known as common"""
133 self._common.addbases(commons)
133 self._common.addbases(commons)
134 if self._undecided is not None:
134 if self._undecided is not None:
135 self._common.removeancestorsfrom(self._undecided)
135 self._common.removeancestorsfrom(self._undecided)
136
136
137 def addmissings(self, missings):
137 def addmissings(self, missings):
138 """register some nodes as missing"""
138 """register some nodes as missing"""
139 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
139 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
140 if newmissing:
140 if newmissing:
141 self.missing.update(newmissing)
141 self.missing.update(newmissing)
142 self.undecided.difference_update(newmissing)
142 self.undecided.difference_update(newmissing)
143
143
144 def addinfo(self, sample):
144 def addinfo(self, sample):
145 """consume an iterable of (rev, known) tuples"""
145 """consume an iterable of (rev, known) tuples"""
146 common = set()
146 common = set()
147 missing = set()
147 missing = set()
148 for rev, known in sample:
148 for rev, known in sample:
149 if known:
149 if known:
150 common.add(rev)
150 common.add(rev)
151 else:
151 else:
152 missing.add(rev)
152 missing.add(rev)
153 if common:
153 if common:
154 self.addcommons(common)
154 self.addcommons(common)
155 if missing:
155 if missing:
156 self.addmissings(missing)
156 self.addmissings(missing)
157
157
158 def hasinfo(self):
158 def hasinfo(self):
159 """return True is we have any clue about the remote state"""
159 """return True is we have any clue about the remote state"""
160 return self._common.hasbases()
160 return self._common.hasbases()
161
161
162 def iscomplete(self):
162 def iscomplete(self):
163 """True if all the necessary data have been gathered"""
163 """True if all the necessary data have been gathered"""
164 return self._undecided is not None and not self._undecided
164 return self._undecided is not None and not self._undecided
165
165
166 @property
166 @property
167 def undecided(self):
167 def undecided(self):
168 if self._undecided is not None:
168 if self._undecided is not None:
169 return self._undecided
169 return self._undecided
170 self._undecided = set(self._common.missingancestors(self._targetheads))
170 self._undecided = set(self._common.missingancestors(self._targetheads))
171 return self._undecided
171 return self._undecided
172
172
173 def stats(self):
173 def stats(self):
174 return {
174 return {
175 'undecided': len(self.undecided),
175 'undecided': len(self.undecided),
176 }
176 }
177
177
178 def commonheads(self):
178 def commonheads(self):
179 """the heads of the known common set"""
179 """the heads of the known common set"""
180 # heads(common) == heads(common.bases) since common represents
180 # heads(common) == heads(common.bases) since common represents
181 # common.bases and all its ancestors
181 # common.bases and all its ancestors
182 return self._common.basesheads()
182 return self._common.basesheads()
183
183
184 def _parentsgetter(self):
184 def _parentsgetter(self):
185 getrev = self._repo.changelog.index.__getitem__
185 getrev = self._repo.changelog.index.__getitem__
186
186
187 def getparents(r):
187 def getparents(r):
188 return getrev(r)[5:7]
188 return getrev(r)[5:7]
189
189
190 return getparents
190 return getparents
191
191
192 def _childrengetter(self):
192 def _childrengetter(self):
193
193
194 if self._childrenmap is not None:
194 if self._childrenmap is not None:
195 # During discovery, the `undecided` set keep shrinking.
195 # During discovery, the `undecided` set keep shrinking.
196 # Therefore, the map computed for an iteration N will be
196 # Therefore, the map computed for an iteration N will be
197 # valid for iteration N+1. Instead of computing the same
197 # valid for iteration N+1. Instead of computing the same
198 # data over and over we cached it the first time.
198 # data over and over we cached it the first time.
199 return self._childrenmap.__getitem__
199 return self._childrenmap.__getitem__
200
200
201 # _updatesample() essentially does interaction over revisions to look
201 # _updatesample() essentially does interaction over revisions to look
202 # up their children. This lookup is expensive and doing it in a loop is
202 # up their children. This lookup is expensive and doing it in a loop is
203 # quadratic. We precompute the children for all relevant revisions and
203 # quadratic. We precompute the children for all relevant revisions and
204 # make the lookup in _updatesample() a simple dict lookup.
204 # make the lookup in _updatesample() a simple dict lookup.
205 self._childrenmap = children = {}
205 self._childrenmap = children = {}
206
206
207 parentrevs = self._parentsgetter()
207 parentrevs = self._parentsgetter()
208 revs = self.undecided
208 revs = self.undecided
209
209
210 for rev in sorted(revs):
210 for rev in sorted(revs):
211 # Always ensure revision has an entry so we don't need to worry
211 # Always ensure revision has an entry so we don't need to worry
212 # about missing keys.
212 # about missing keys.
213 children[rev] = []
213 children[rev] = []
214 for prev in parentrevs(rev):
214 for prev in parentrevs(rev):
215 if prev == nullrev:
215 if prev == nullrev:
216 continue
216 continue
217 c = children.get(prev)
217 c = children.get(prev)
218 if c is not None:
218 if c is not None:
219 c.append(rev)
219 c.append(rev)
220 return children.__getitem__
220 return children.__getitem__
221
221
222 def takequicksample(self, headrevs, size):
222 def takequicksample(self, headrevs, size):
223 """takes a quick sample of size <size>
223 """takes a quick sample of size <size>
224
224
225 It is meant for initial sampling and focuses on querying heads and close
225 It is meant for initial sampling and focuses on querying heads and close
226 ancestors of heads.
226 ancestors of heads.
227
227
228 :headrevs: set of head revisions in local DAG to consider
228 :headrevs: set of head revisions in local DAG to consider
229 :size: the maximum size of the sample"""
229 :size: the maximum size of the sample"""
230 revs = self.undecided
230 revs = self.undecided
231 if len(revs) <= size:
231 if len(revs) <= size:
232 return list(revs)
232 return list(revs)
233 sample = set(self._repo.revs(b'heads(%ld)', revs))
233 sample = set(self._repo.revs(b'heads(%ld)', revs))
234
234
235 if len(sample) >= size:
235 if len(sample) >= size:
236 return _limitsample(sample, size, randomize=self.randomize)
236 return _limitsample(sample, size, randomize=self.randomize)
237
237
238 _updatesample(
238 _updatesample(
239 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
239 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
240 )
240 )
241 return sample
241 return sample
242
242
243 def takefullsample(self, headrevs, size):
243 def takefullsample(self, headrevs, size):
244 revs = self.undecided
244 revs = self.undecided
245 if len(revs) <= size:
245 if len(revs) <= size:
246 return list(revs)
246 return list(revs)
247 repo = self._repo
247 repo = self._repo
248 sample = set(repo.revs(b'heads(%ld)', revs))
248 sample = set(repo.revs(b'heads(%ld)', revs))
249 parentrevs = self._parentsgetter()
249 parentrevs = self._parentsgetter()
250
250
251 # update from heads
251 # update from heads
252 revsheads = sample.copy()
252 revsheads = sample.copy()
253 _updatesample(revs, revsheads, sample, parentrevs)
253 _updatesample(revs, revsheads, sample, parentrevs)
254
254
255 # update from roots
255 # update from roots
256 revsroots = set(repo.revs(b'roots(%ld)', revs))
256 revsroots = set(repo.revs(b'roots(%ld)', revs))
257 childrenrevs = self._childrengetter()
257 childrenrevs = self._childrengetter()
258 _updatesample(revs, revsroots, sample, childrenrevs)
258 _updatesample(revs, revsroots, sample, childrenrevs)
259 assert sample
259 assert sample
260
260
261 if not self._respectsize:
261 if not self._respectsize:
262 size = max(size, min(len(revsroots), len(revsheads)))
262 size = max(size, min(len(revsroots), len(revsheads)))
263
263
264 sample = _limitsample(sample, size, randomize=self.randomize)
264 sample = _limitsample(sample, size, randomize=self.randomize)
265 if len(sample) < size:
265 if len(sample) < size:
266 more = size - len(sample)
266 more = size - len(sample)
267 takefrom = list(revs - sample)
267 takefrom = list(revs - sample)
268 if self.randomize:
268 if self.randomize:
269 sample.update(random.sample(takefrom, more))
269 sample.update(random.sample(takefrom, more))
270 else:
270 else:
271 takefrom.sort()
271 takefrom.sort()
272 sample.update(takefrom[:more])
272 sample.update(takefrom[:more])
273 return sample
273 return sample
274
274
275
275
276 pure_partialdiscovery = partialdiscovery
276 pure_partialdiscovery = partialdiscovery
277
277
278 partialdiscovery = policy.importrust(
278 partialdiscovery = policy.importrust(
279 'discovery', member='PartialDiscovery', default=partialdiscovery
279 'discovery', member='PartialDiscovery', default=partialdiscovery
280 )
280 )
281
281
282
282
283 def findcommonheads(
283 def findcommonheads(
284 ui,
284 ui,
285 local,
285 local,
286 remote,
286 remote,
287 abortwhenunrelated=True,
287 abortwhenunrelated=True,
288 ancestorsof=None,
288 ancestorsof=None,
289 audit=None,
289 audit=None,
290 ):
290 ):
291 """Return a tuple (common, anyincoming, remoteheads) used to identify
291 """Return a tuple (common, anyincoming, remoteheads) used to identify
292 missing nodes from or in remote.
292 missing nodes from or in remote.
293
293
294 The audit argument is an optional dictionnary that a caller can pass. it
294 The audit argument is an optional dictionnary that a caller can pass. it
295 will be updated with extra data about the discovery, this is useful for
295 will be updated with extra data about the discovery, this is useful for
296 debug.
296 debug.
297 """
297 """
298
298
299 samplegrowth = float(ui.config(b'devel', b'discovery.grow-sample.rate'))
299 samplegrowth = float(ui.config(b'devel', b'discovery.grow-sample.rate'))
300
300
301 if audit is not None:
302 audit[b'total-queries'] = 0
303
301 start = util.timer()
304 start = util.timer()
302
305
303 roundtrips = 0
306 roundtrips = 0
304 cl = local.changelog
307 cl = local.changelog
305 clnode = cl.node
308 clnode = cl.node
306 clrev = cl.rev
309 clrev = cl.rev
307
310
308 if ancestorsof is not None:
311 if ancestorsof is not None:
309 ownheads = [clrev(n) for n in ancestorsof]
312 ownheads = [clrev(n) for n in ancestorsof]
310 else:
313 else:
311 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
314 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
312
315
313 initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads')
316 initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads')
314 initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial')
317 initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial')
315 fullsamplesize = ui.configint(b'devel', b'discovery.sample-size')
318 fullsamplesize = ui.configint(b'devel', b'discovery.sample-size')
316 # We also ask remote about all the local heads. That set can be arbitrarily
319 # We also ask remote about all the local heads. That set can be arbitrarily
317 # large, so we used to limit it size to `initialsamplesize`. We no longer
320 # large, so we used to limit it size to `initialsamplesize`. We no longer
318 # do as it proved counter productive. The skipped heads could lead to a
321 # do as it proved counter productive. The skipped heads could lead to a
319 # large "undecided" set, slower to be clarified than if we asked the
322 # large "undecided" set, slower to be clarified than if we asked the
320 # question for all heads right away.
323 # question for all heads right away.
321 #
324 #
322 # We are already fetching all server heads using the `heads` commands,
325 # We are already fetching all server heads using the `heads` commands,
323 # sending a equivalent number of heads the other way should not have a
326 # sending a equivalent number of heads the other way should not have a
324 # significant impact. In addition, it is very likely that we are going to
327 # significant impact. In addition, it is very likely that we are going to
325 # have to issue "known" request for an equivalent amount of revisions in
328 # have to issue "known" request for an equivalent amount of revisions in
326 # order to decide if theses heads are common or missing.
329 # order to decide if theses heads are common or missing.
327 #
330 #
328 # find a detailled analysis below.
331 # find a detailled analysis below.
329 #
332 #
330 # Case A: local and server both has few heads
333 # Case A: local and server both has few heads
331 #
334 #
332 # Ownheads is below initialsamplesize, limit would not have any effect.
335 # Ownheads is below initialsamplesize, limit would not have any effect.
333 #
336 #
334 # Case B: local has few heads and server has many
337 # Case B: local has few heads and server has many
335 #
338 #
336 # Ownheads is below initialsamplesize, limit would not have any effect.
339 # Ownheads is below initialsamplesize, limit would not have any effect.
337 #
340 #
338 # Case C: local and server both has many heads
341 # Case C: local and server both has many heads
339 #
342 #
340 # We now transfert some more data, but not significantly more than is
343 # We now transfert some more data, but not significantly more than is
341 # already transfered to carry the server heads.
344 # already transfered to carry the server heads.
342 #
345 #
343 # Case D: local has many heads, server has few
346 # Case D: local has many heads, server has few
344 #
347 #
345 # D.1 local heads are mostly known remotely
348 # D.1 local heads are mostly known remotely
346 #
349 #
347 # All the known head will have be part of a `known` request at some
350 # All the known head will have be part of a `known` request at some
348 # point for the discovery to finish. Sending them all earlier is
351 # point for the discovery to finish. Sending them all earlier is
349 # actually helping.
352 # actually helping.
350 #
353 #
351 # (This case is fairly unlikely, it requires the numerous heads to all
354 # (This case is fairly unlikely, it requires the numerous heads to all
352 # be merged server side in only a few heads)
355 # be merged server side in only a few heads)
353 #
356 #
354 # D.2 local heads are mostly missing remotely
357 # D.2 local heads are mostly missing remotely
355 #
358 #
356 # To determine that the heads are missing, we'll have to issue `known`
359 # To determine that the heads are missing, we'll have to issue `known`
357 # request for them or one of their ancestors. This amount of `known`
360 # request for them or one of their ancestors. This amount of `known`
358 # request will likely be in the same order of magnitude than the amount
361 # request will likely be in the same order of magnitude than the amount
359 # of local heads.
362 # of local heads.
360 #
363 #
361 # The only case where we can be more efficient using `known` request on
364 # The only case where we can be more efficient using `known` request on
362 # ancestors are case were all the "missing" local heads are based on a
365 # ancestors are case were all the "missing" local heads are based on a
363 # few changeset, also "missing". This means we would have a "complex"
366 # few changeset, also "missing". This means we would have a "complex"
364 # graph (with many heads) attached to, but very independant to a the
367 # graph (with many heads) attached to, but very independant to a the
365 # "simple" graph on the server. This is a fairly usual case and have
368 # "simple" graph on the server. This is a fairly usual case and have
366 # not been met in the wild so far.
369 # not been met in the wild so far.
367 if initial_head_exchange:
370 if initial_head_exchange:
368 if remote.limitedarguments:
371 if remote.limitedarguments:
369 sample = _limitsample(ownheads, initialsamplesize)
372 sample = _limitsample(ownheads, initialsamplesize)
370 # indices between sample and externalized version must match
373 # indices between sample and externalized version must match
371 sample = list(sample)
374 sample = list(sample)
372 else:
375 else:
373 sample = ownheads
376 sample = ownheads
374
377
375 ui.debug(b"query 1; heads\n")
378 ui.debug(b"query 1; heads\n")
376 roundtrips += 1
379 roundtrips += 1
377 with remote.commandexecutor() as e:
380 with remote.commandexecutor() as e:
378 fheads = e.callcommand(b'heads', {})
381 fheads = e.callcommand(b'heads', {})
382 if audit is not None:
383 audit[b'total-queries'] += len(sample)
379 fknown = e.callcommand(
384 fknown = e.callcommand(
380 b'known',
385 b'known',
381 {
386 {
382 b'nodes': [clnode(r) for r in sample],
387 b'nodes': [clnode(r) for r in sample],
383 },
388 },
384 )
389 )
385
390
386 srvheadhashes, yesno = fheads.result(), fknown.result()
391 srvheadhashes, yesno = fheads.result(), fknown.result()
387
392
388 if audit is not None:
393 if audit is not None:
389 audit[b'total-roundtrips'] = 1
394 audit[b'total-roundtrips'] = 1
390
395
391 if cl.tiprev() == nullrev:
396 if cl.tiprev() == nullrev:
392 if srvheadhashes != [cl.nullid]:
397 if srvheadhashes != [cl.nullid]:
393 return [cl.nullid], True, srvheadhashes
398 return [cl.nullid], True, srvheadhashes
394 return [cl.nullid], False, []
399 return [cl.nullid], False, []
395 else:
400 else:
396 # we still need the remote head for the function return
401 # we still need the remote head for the function return
397 with remote.commandexecutor() as e:
402 with remote.commandexecutor() as e:
398 fheads = e.callcommand(b'heads', {})
403 fheads = e.callcommand(b'heads', {})
399 srvheadhashes = fheads.result()
404 srvheadhashes = fheads.result()
400
405
401 # start actual discovery (we note this before the next "if" for
406 # start actual discovery (we note this before the next "if" for
402 # compatibility reasons)
407 # compatibility reasons)
403 ui.status(_(b"searching for changes\n"))
408 ui.status(_(b"searching for changes\n"))
404
409
405 knownsrvheads = [] # revnos of remote heads that are known locally
410 knownsrvheads = [] # revnos of remote heads that are known locally
406 for node in srvheadhashes:
411 for node in srvheadhashes:
407 if node == cl.nullid:
412 if node == cl.nullid:
408 continue
413 continue
409
414
410 try:
415 try:
411 knownsrvheads.append(clrev(node))
416 knownsrvheads.append(clrev(node))
412 # Catches unknown and filtered nodes.
417 # Catches unknown and filtered nodes.
413 except error.LookupError:
418 except error.LookupError:
414 continue
419 continue
415
420
416 if initial_head_exchange:
421 if initial_head_exchange:
417 # early exit if we know all the specified remote heads already
422 # early exit if we know all the specified remote heads already
418 if len(knownsrvheads) == len(srvheadhashes):
423 if len(knownsrvheads) == len(srvheadhashes):
419 ui.debug(b"all remote heads known locally\n")
424 ui.debug(b"all remote heads known locally\n")
420 return srvheadhashes, False, srvheadhashes
425 return srvheadhashes, False, srvheadhashes
421
426
422 if len(sample) == len(ownheads) and all(yesno):
427 if len(sample) == len(ownheads) and all(yesno):
423 ui.note(_(b"all local changesets known remotely\n"))
428 ui.note(_(b"all local changesets known remotely\n"))
424 ownheadhashes = [clnode(r) for r in ownheads]
429 ownheadhashes = [clnode(r) for r in ownheads]
425 return ownheadhashes, True, srvheadhashes
430 return ownheadhashes, True, srvheadhashes
426
431
427 # full blown discovery
432 # full blown discovery
428
433
429 # if the server has a limit to its arguments size, we can't grow the sample.
434 # if the server has a limit to its arguments size, we can't grow the sample.
430 configbool = local.ui.configbool
435 configbool = local.ui.configbool
431 grow_sample = configbool(b'devel', b'discovery.grow-sample')
436 grow_sample = configbool(b'devel', b'discovery.grow-sample')
432 grow_sample = grow_sample and not remote.limitedarguments
437 grow_sample = grow_sample and not remote.limitedarguments
433
438
434 dynamic_sample = configbool(b'devel', b'discovery.grow-sample.dynamic')
439 dynamic_sample = configbool(b'devel', b'discovery.grow-sample.dynamic')
435 hard_limit_sample = not (dynamic_sample or remote.limitedarguments)
440 hard_limit_sample = not (dynamic_sample or remote.limitedarguments)
436
441
437 randomize = ui.configbool(b'devel', b'discovery.randomize')
442 randomize = ui.configbool(b'devel', b'discovery.randomize')
438 if cl.index.rust_ext_compat:
443 if cl.index.rust_ext_compat:
439 pd = partialdiscovery
444 pd = partialdiscovery
440 else:
445 else:
441 pd = pure_partialdiscovery
446 pd = pure_partialdiscovery
442 disco = pd(local, ownheads, hard_limit_sample, randomize=randomize)
447 disco = pd(local, ownheads, hard_limit_sample, randomize=randomize)
443 if initial_head_exchange:
448 if initial_head_exchange:
444 # treat remote heads (and maybe own heads) as a first implicit sample
449 # treat remote heads (and maybe own heads) as a first implicit sample
445 # response
450 # response
446 disco.addcommons(knownsrvheads)
451 disco.addcommons(knownsrvheads)
447 disco.addinfo(zip(sample, yesno))
452 disco.addinfo(zip(sample, yesno))
448
453
449 full = not initial_head_exchange
454 full = not initial_head_exchange
450 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
455 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
451 while not disco.iscomplete():
456 while not disco.iscomplete():
452
457
453 if full or disco.hasinfo():
458 if full or disco.hasinfo():
454 if full:
459 if full:
455 ui.note(_(b"sampling from both directions\n"))
460 ui.note(_(b"sampling from both directions\n"))
456 else:
461 else:
457 ui.debug(b"taking initial sample\n")
462 ui.debug(b"taking initial sample\n")
458 samplefunc = disco.takefullsample
463 samplefunc = disco.takefullsample
459 targetsize = fullsamplesize
464 targetsize = fullsamplesize
460 if grow_sample:
465 if grow_sample:
461 fullsamplesize = int(fullsamplesize * samplegrowth)
466 fullsamplesize = int(fullsamplesize * samplegrowth)
462 else:
467 else:
463 # use even cheaper initial sample
468 # use even cheaper initial sample
464 ui.debug(b"taking quick initial sample\n")
469 ui.debug(b"taking quick initial sample\n")
465 samplefunc = disco.takequicksample
470 samplefunc = disco.takequicksample
466 targetsize = initialsamplesize
471 targetsize = initialsamplesize
467 sample = samplefunc(ownheads, targetsize)
472 sample = samplefunc(ownheads, targetsize)
468
473
469 roundtrips += 1
474 roundtrips += 1
470 progress.update(roundtrips)
475 progress.update(roundtrips)
471 stats = disco.stats()
476 stats = disco.stats()
472 ui.debug(
477 ui.debug(
473 b"query %i; still undecided: %i, sample size is: %i\n"
478 b"query %i; still undecided: %i, sample size is: %i\n"
474 % (roundtrips, stats['undecided'], len(sample))
479 % (roundtrips, stats['undecided'], len(sample))
475 )
480 )
476
481
477 # indices between sample and externalized version must match
482 # indices between sample and externalized version must match
478 sample = list(sample)
483 sample = list(sample)
479
484
480 with remote.commandexecutor() as e:
485 with remote.commandexecutor() as e:
486 if audit is not None:
487 audit[b'total-queries'] += len(sample)
481 yesno = e.callcommand(
488 yesno = e.callcommand(
482 b'known',
489 b'known',
483 {
490 {
484 b'nodes': [clnode(r) for r in sample],
491 b'nodes': [clnode(r) for r in sample],
485 },
492 },
486 ).result()
493 ).result()
487
494
488 full = True
495 full = True
489
496
490 disco.addinfo(zip(sample, yesno))
497 disco.addinfo(zip(sample, yesno))
491
498
492 result = disco.commonheads()
499 result = disco.commonheads()
493 elapsed = util.timer() - start
500 elapsed = util.timer() - start
494 progress.complete()
501 progress.complete()
495 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
502 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
496 msg = (
503 msg = (
497 b'found %d common and %d unknown server heads,'
504 b'found %d common and %d unknown server heads,'
498 b' %d roundtrips in %.4fs\n'
505 b' %d roundtrips in %.4fs\n'
499 )
506 )
500 missing = set(result) - set(knownsrvheads)
507 missing = set(result) - set(knownsrvheads)
501 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
508 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
502
509
503 if audit is not None:
510 if audit is not None:
504 audit[b'total-roundtrips'] = roundtrips
511 audit[b'total-roundtrips'] = roundtrips
505
512
506 if not result and srvheadhashes != [cl.nullid]:
513 if not result and srvheadhashes != [cl.nullid]:
507 if abortwhenunrelated:
514 if abortwhenunrelated:
508 raise error.Abort(_(b"repository is unrelated"))
515 raise error.Abort(_(b"repository is unrelated"))
509 else:
516 else:
510 ui.warn(_(b"warning: repository is unrelated\n"))
517 ui.warn(_(b"warning: repository is unrelated\n"))
511 return (
518 return (
512 {cl.nullid},
519 {cl.nullid},
513 True,
520 True,
514 srvheadhashes,
521 srvheadhashes,
515 )
522 )
516
523
517 anyincoming = srvheadhashes != [cl.nullid]
524 anyincoming = srvheadhashes != [cl.nullid]
518 result = {clnode(r) for r in result}
525 result = {clnode(r) for r in result}
519 return result, anyincoming, srvheadhashes
526 return result, anyincoming, srvheadhashes
@@ -1,186 +1,194 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10
10
11 from .i18n import _
11 from .i18n import _
12 from .node import short
12 from .node import short
13 from . import (
13 from . import (
14 error,
14 error,
15 pycompat,
15 pycompat,
16 )
16 )
17
17
18
18
19 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
19 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
20 """Return a tuple (common, fetch, heads) used to identify the common
20 """Return a tuple (common, fetch, heads) used to identify the common
21 subset of nodes between repo and remote.
21 subset of nodes between repo and remote.
22
22
23 "common" is a list of (at least) the heads of the common subset.
23 "common" is a list of (at least) the heads of the common subset.
24 "fetch" is a list of roots of the nodes that would be incoming, to be
24 "fetch" is a list of roots of the nodes that would be incoming, to be
25 supplied to changegroupsubset.
25 supplied to changegroupsubset.
26 "heads" is either the supplied heads, or else the remote's heads.
26 "heads" is either the supplied heads, or else the remote's heads.
27 """
27 """
28
28
29 knownnode = repo.changelog.hasnode
29 knownnode = repo.changelog.hasnode
30 search = []
30 search = []
31 fetch = set()
31 fetch = set()
32 seen = set()
32 seen = set()
33 seenbranch = set()
33 seenbranch = set()
34 base = set()
34 base = set()
35
35
36 if not heads:
36 if not heads:
37 with remote.commandexecutor() as e:
37 with remote.commandexecutor() as e:
38 heads = e.callcommand(b'heads', {}).result()
38 heads = e.callcommand(b'heads', {}).result()
39
39
40 if audit is not None:
40 if audit is not None:
41 audit[b'total-roundtrips'] = 1
41 audit[b'total-roundtrips'] = 1
42 audit[b'total-queries'] = 0
42
43
43 if repo.changelog.tip() == repo.nullid:
44 if repo.changelog.tip() == repo.nullid:
44 base.add(repo.nullid)
45 base.add(repo.nullid)
45 if heads != [repo.nullid]:
46 if heads != [repo.nullid]:
46 return [repo.nullid], [repo.nullid], list(heads)
47 return [repo.nullid], [repo.nullid], list(heads)
47 return [repo.nullid], [], heads
48 return [repo.nullid], [], heads
48
49
49 # assume we're closer to the tip than the root
50 # assume we're closer to the tip than the root
50 # and start by examining the heads
51 # and start by examining the heads
51 repo.ui.status(_(b"searching for changes\n"))
52 repo.ui.status(_(b"searching for changes\n"))
52
53
53 unknown = []
54 unknown = []
54 for h in heads:
55 for h in heads:
55 if not knownnode(h):
56 if not knownnode(h):
56 unknown.append(h)
57 unknown.append(h)
57 else:
58 else:
58 base.add(h)
59 base.add(h)
59
60
60 if not unknown:
61 if not unknown:
61 return list(base), [], list(heads)
62 return list(base), [], list(heads)
62
63
63 req = set(unknown)
64 req = set(unknown)
64 reqcnt = 0
65 reqcnt = 0
65 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
66 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
66
67
67 # search through remote branches
68 # search through remote branches
68 # a 'branch' here is a linear segment of history, with four parts:
69 # a 'branch' here is a linear segment of history, with four parts:
69 # head, root, first parent, second parent
70 # head, root, first parent, second parent
70 # (a branch always has two parents (or none) by definition)
71 # (a branch always has two parents (or none) by definition)
71 with remote.commandexecutor() as e:
72 with remote.commandexecutor() as e:
73 if audit is not None:
74 audit[b'total-queries'] += len(unknown)
72 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
75 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
73
76
74 unknown = collections.deque(branches)
77 unknown = collections.deque(branches)
75 while unknown:
78 while unknown:
76 r = []
79 r = []
77 while unknown:
80 while unknown:
78 n = unknown.popleft()
81 n = unknown.popleft()
79 if n[0] in seen:
82 if n[0] in seen:
80 continue
83 continue
81
84
82 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
85 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
83 if n[0] == repo.nullid: # found the end of the branch
86 if n[0] == repo.nullid: # found the end of the branch
84 pass
87 pass
85 elif n in seenbranch:
88 elif n in seenbranch:
86 repo.ui.debug(b"branch already found\n")
89 repo.ui.debug(b"branch already found\n")
87 continue
90 continue
88 elif n[1] and knownnode(n[1]): # do we know the base?
91 elif n[1] and knownnode(n[1]): # do we know the base?
89 repo.ui.debug(
92 repo.ui.debug(
90 b"found incomplete branch %s:%s\n"
93 b"found incomplete branch %s:%s\n"
91 % (short(n[0]), short(n[1]))
94 % (short(n[0]), short(n[1]))
92 )
95 )
93 search.append(n[0:2]) # schedule branch range for scanning
96 search.append(n[0:2]) # schedule branch range for scanning
94 seenbranch.add(n)
97 seenbranch.add(n)
95 else:
98 else:
96 if n[1] not in seen and n[1] not in fetch:
99 if n[1] not in seen and n[1] not in fetch:
97 if knownnode(n[2]) and knownnode(n[3]):
100 if knownnode(n[2]) and knownnode(n[3]):
98 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
101 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
99 fetch.add(n[1]) # earliest unknown
102 fetch.add(n[1]) # earliest unknown
100 for p in n[2:4]:
103 for p in n[2:4]:
101 if knownnode(p):
104 if knownnode(p):
102 base.add(p) # latest known
105 base.add(p) # latest known
103
106
104 for p in n[2:4]:
107 for p in n[2:4]:
105 if p not in req and not knownnode(p):
108 if p not in req and not knownnode(p):
106 r.append(p)
109 r.append(p)
107 req.add(p)
110 req.add(p)
108 seen.add(n[0])
111 seen.add(n[0])
109
112
110 if r:
113 if r:
111 reqcnt += 1
114 reqcnt += 1
112 progress.increment()
115 progress.increment()
113 repo.ui.debug(
116 repo.ui.debug(
114 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
117 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
115 )
118 )
116 for p in pycompat.xrange(0, len(r), 10):
119 for p in pycompat.xrange(0, len(r), 10):
117 with remote.commandexecutor() as e:
120 with remote.commandexecutor() as e:
121 subset = r[p : p + 10]
122 if audit is not None:
123 audit[b'total-queries'] += len(subset)
118 branches = e.callcommand(
124 branches = e.callcommand(
119 b'branches',
125 b'branches',
120 {
126 {
121 b'nodes': r[p : p + 10],
127 b'nodes': subset,
122 },
128 },
123 ).result()
129 ).result()
124
130
125 for b in branches:
131 for b in branches:
126 repo.ui.debug(
132 repo.ui.debug(
127 b"received %s:%s\n" % (short(b[0]), short(b[1]))
133 b"received %s:%s\n" % (short(b[0]), short(b[1]))
128 )
134 )
129 unknown.append(b)
135 unknown.append(b)
130
136
131 # do binary search on the branches we found
137 # do binary search on the branches we found
132 while search:
138 while search:
133 newsearch = []
139 newsearch = []
134 reqcnt += 1
140 reqcnt += 1
135 progress.increment()
141 progress.increment()
136
142
137 with remote.commandexecutor() as e:
143 with remote.commandexecutor() as e:
144 if audit is not None:
145 audit[b'total-queries'] += len(search)
138 between = e.callcommand(b'between', {b'pairs': search}).result()
146 between = e.callcommand(b'between', {b'pairs': search}).result()
139
147
140 for n, l in zip(search, between):
148 for n, l in zip(search, between):
141 l.append(n[1])
149 l.append(n[1])
142 p = n[0]
150 p = n[0]
143 f = 1
151 f = 1
144 for i in l:
152 for i in l:
145 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
153 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
146 if knownnode(i):
154 if knownnode(i):
147 if f <= 2:
155 if f <= 2:
148 repo.ui.debug(
156 repo.ui.debug(
149 b"found new branch changeset %s\n" % short(p)
157 b"found new branch changeset %s\n" % short(p)
150 )
158 )
151 fetch.add(p)
159 fetch.add(p)
152 base.add(i)
160 base.add(i)
153 else:
161 else:
154 repo.ui.debug(
162 repo.ui.debug(
155 b"narrowed branch search to %s:%s\n"
163 b"narrowed branch search to %s:%s\n"
156 % (short(p), short(i))
164 % (short(p), short(i))
157 )
165 )
158 newsearch.append((p, i))
166 newsearch.append((p, i))
159 break
167 break
160 p, f = i, f * 2
168 p, f = i, f * 2
161 search = newsearch
169 search = newsearch
162
170
163 # sanity check our fetch list
171 # sanity check our fetch list
164 for f in fetch:
172 for f in fetch:
165 if knownnode(f):
173 if knownnode(f):
166 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
174 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
167
175
168 base = list(base)
176 base = list(base)
169 if base == [repo.nullid]:
177 if base == [repo.nullid]:
170 if force:
178 if force:
171 repo.ui.warn(_(b"warning: repository is unrelated\n"))
179 repo.ui.warn(_(b"warning: repository is unrelated\n"))
172 else:
180 else:
173 raise error.Abort(_(b"repository is unrelated"))
181 raise error.Abort(_(b"repository is unrelated"))
174
182
175 repo.ui.debug(
183 repo.ui.debug(
176 b"found new changesets starting at "
184 b"found new changesets starting at "
177 + b" ".join([short(f) for f in fetch])
185 + b" ".join([short(f) for f in fetch])
178 + b"\n"
186 + b"\n"
179 )
187 )
180
188
181 progress.complete()
189 progress.complete()
182 repo.ui.debug(b"%d total queries\n" % reqcnt)
190 repo.ui.debug(b"%d total queries\n" % reqcnt)
183 if audit is not None:
191 if audit is not None:
184 audit[b'total-roundtrips'] = reqcnt
192 audit[b'total-roundtrips'] = reqcnt
185
193
186 return base, list(fetch), heads
194 return base, list(fetch), heads
@@ -1,1762 +1,1806 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 round-trips: 2
47 round-trips: 2
48 queries: 6
48 heads summary:
49 heads summary:
49 total common heads: 2
50 total common heads: 2
50 also local heads: 2
51 also local heads: 2
51 also remote heads: 1
52 also remote heads: 1
52 both: 1
53 both: 1
53 local heads: 2
54 local heads: 2
54 common: 2
55 common: 2
55 missing: 0
56 missing: 0
56 remote heads: 3
57 remote heads: 3
57 common: 1
58 common: 1
58 unknown: 2
59 unknown: 2
59 local changesets: 7
60 local changesets: 7
60 common: 7
61 common: 7
61 heads: 2
62 heads: 2
62 roots: 1
63 roots: 1
63 missing: 0
64 missing: 0
64 heads: 0
65 heads: 0
65 roots: 0
66 roots: 0
66 first undecided set: 3
67 first undecided set: 3
67 heads: 1
68 heads: 1
68 roots: 1
69 roots: 1
69 common: 3
70 common: 3
70 missing: 0
71 missing: 0
71 common heads: 01241442b3c2 b5714e113bc0
72 common heads: 01241442b3c2 b5714e113bc0
72
73
73 % -- a -> b set
74 % -- a -> b set
74 comparing with b
75 comparing with b
75 query 1; heads
76 query 1; heads
76 searching for changes
77 searching for changes
77 all local changesets known remotely
78 all local changesets known remotely
78 elapsed time: * seconds (glob)
79 elapsed time: * seconds (glob)
79 round-trips: 1
80 round-trips: 1
81 queries: 2
80 heads summary:
82 heads summary:
81 total common heads: 2
83 total common heads: 2
82 also local heads: 2
84 also local heads: 2
83 also remote heads: 1
85 also remote heads: 1
84 both: 1
86 both: 1
85 local heads: 2
87 local heads: 2
86 common: 2
88 common: 2
87 missing: 0
89 missing: 0
88 remote heads: 3
90 remote heads: 3
89 common: 1
91 common: 1
90 unknown: 2
92 unknown: 2
91 local changesets: 7
93 local changesets: 7
92 common: 7
94 common: 7
93 heads: 2
95 heads: 2
94 roots: 1
96 roots: 1
95 missing: 0
97 missing: 0
96 heads: 0
98 heads: 0
97 roots: 0
99 roots: 0
98 first undecided set: 3
100 first undecided set: 3
99 heads: 1
101 heads: 1
100 roots: 1
102 roots: 1
101 common: 3
103 common: 3
102 missing: 0
104 missing: 0
103 common heads: 01241442b3c2 b5714e113bc0
105 common heads: 01241442b3c2 b5714e113bc0
104
106
105 % -- a -> b set (tip only)
107 % -- a -> b set (tip only)
106 comparing with b
108 comparing with b
107 query 1; heads
109 query 1; heads
108 searching for changes
110 searching for changes
109 all local changesets known remotely
111 all local changesets known remotely
110 elapsed time: * seconds (glob)
112 elapsed time: * seconds (glob)
111 round-trips: 1
113 round-trips: 1
114 queries: 1
112 heads summary:
115 heads summary:
113 total common heads: 1
116 total common heads: 1
114 also local heads: 1
117 also local heads: 1
115 also remote heads: 0
118 also remote heads: 0
116 both: 0
119 both: 0
117 local heads: 2
120 local heads: 2
118 common: 1
121 common: 1
119 missing: 1
122 missing: 1
120 remote heads: 3
123 remote heads: 3
121 common: 0
124 common: 0
122 unknown: 3
125 unknown: 3
123 local changesets: 7
126 local changesets: 7
124 common: 6
127 common: 6
125 heads: 1
128 heads: 1
126 roots: 1
129 roots: 1
127 missing: 1
130 missing: 1
128 heads: 1
131 heads: 1
129 roots: 1
132 roots: 1
130 first undecided set: 6
133 first undecided set: 6
131 heads: 2
134 heads: 2
132 roots: 1
135 roots: 1
133 common: 5
136 common: 5
134 missing: 1
137 missing: 1
135 common heads: b5714e113bc0
138 common heads: b5714e113bc0
136
139
137 % -- b -> a tree
140 % -- b -> a tree
138 comparing with a
141 comparing with a
139 searching for changes
142 searching for changes
140 unpruned common: 01241442b3c2 b5714e113bc0
143 unpruned common: 01241442b3c2 b5714e113bc0
141 elapsed time: * seconds (glob)
144 elapsed time: * seconds (glob)
142 round-trips: 1
145 round-trips: 1
146 queries: 0
143 heads summary:
147 heads summary:
144 total common heads: 2
148 total common heads: 2
145 also local heads: 1
149 also local heads: 1
146 also remote heads: 2
150 also remote heads: 2
147 both: 1
151 both: 1
148 local heads: 3
152 local heads: 3
149 common: 1
153 common: 1
150 missing: 2
154 missing: 2
151 remote heads: 2
155 remote heads: 2
152 common: 2
156 common: 2
153 unknown: 0
157 unknown: 0
154 local changesets: 15
158 local changesets: 15
155 common: 7
159 common: 7
156 heads: 2
160 heads: 2
157 roots: 1
161 roots: 1
158 missing: 8
162 missing: 8
159 heads: 2
163 heads: 2
160 roots: 2
164 roots: 2
161 first undecided set: 8
165 first undecided set: 8
162 heads: 2
166 heads: 2
163 roots: 2
167 roots: 2
164 common: 0
168 common: 0
165 missing: 8
169 missing: 8
166 common heads: 01241442b3c2 b5714e113bc0
170 common heads: 01241442b3c2 b5714e113bc0
167
171
168 % -- b -> a set
172 % -- b -> a set
169 comparing with a
173 comparing with a
170 query 1; heads
174 query 1; heads
171 searching for changes
175 searching for changes
172 all remote heads known locally
176 all remote heads known locally
173 elapsed time: * seconds (glob)
177 elapsed time: * seconds (glob)
174 round-trips: 1
178 round-trips: 1
179 queries: 3
175 heads summary:
180 heads summary:
176 total common heads: 2
181 total common heads: 2
177 also local heads: 1
182 also local heads: 1
178 also remote heads: 2
183 also remote heads: 2
179 both: 1
184 both: 1
180 local heads: 3
185 local heads: 3
181 common: 1
186 common: 1
182 missing: 2
187 missing: 2
183 remote heads: 2
188 remote heads: 2
184 common: 2
189 common: 2
185 unknown: 0
190 unknown: 0
186 local changesets: 15
191 local changesets: 15
187 common: 7
192 common: 7
188 heads: 2
193 heads: 2
189 roots: 1
194 roots: 1
190 missing: 8
195 missing: 8
191 heads: 2
196 heads: 2
192 roots: 2
197 roots: 2
193 first undecided set: 8
198 first undecided set: 8
194 heads: 2
199 heads: 2
195 roots: 2
200 roots: 2
196 common: 0
201 common: 0
197 missing: 8
202 missing: 8
198 common heads: 01241442b3c2 b5714e113bc0
203 common heads: 01241442b3c2 b5714e113bc0
199
204
200 % -- b -> a set (tip only)
205 % -- b -> a set (tip only)
201 comparing with a
206 comparing with a
202 query 1; heads
207 query 1; heads
203 searching for changes
208 searching for changes
204 all remote heads known locally
209 all remote heads known locally
205 elapsed time: * seconds (glob)
210 elapsed time: * seconds (glob)
206 round-trips: 1
211 round-trips: 1
212 queries: 1
207 heads summary:
213 heads summary:
208 total common heads: 2
214 total common heads: 2
209 also local heads: 1
215 also local heads: 1
210 also remote heads: 2
216 also remote heads: 2
211 both: 1
217 both: 1
212 local heads: 3
218 local heads: 3
213 common: 1
219 common: 1
214 missing: 2
220 missing: 2
215 remote heads: 2
221 remote heads: 2
216 common: 2
222 common: 2
217 unknown: 0
223 unknown: 0
218 local changesets: 15
224 local changesets: 15
219 common: 7
225 common: 7
220 heads: 2
226 heads: 2
221 roots: 1
227 roots: 1
222 missing: 8
228 missing: 8
223 heads: 2
229 heads: 2
224 roots: 2
230 roots: 2
225 first undecided set: 8
231 first undecided set: 8
226 heads: 2
232 heads: 2
227 roots: 2
233 roots: 2
228 common: 0
234 common: 0
229 missing: 8
235 missing: 8
230 common heads: 01241442b3c2 b5714e113bc0
236 common heads: 01241442b3c2 b5714e113bc0
231
237
232
238
233 Many new:
239 Many new:
234
240
235 $ testdesc '-ra1 -ra2' '-rb' '
241 $ testdesc '-ra1 -ra2' '-rb' '
236 > +2:f +3:a1 +3:b
242 > +2:f +3:a1 +3:b
237 > <f +30 :a2'
243 > <f +30 :a2'
238
244
239 % -- a -> b tree
245 % -- a -> b tree
240 comparing with b
246 comparing with b
241 searching for changes
247 searching for changes
242 unpruned common: bebd167eb94d
248 unpruned common: bebd167eb94d
243 elapsed time: * seconds (glob)
249 elapsed time: * seconds (glob)
244 round-trips: 2
250 round-trips: 2
251 queries: 3
245 heads summary:
252 heads summary:
246 total common heads: 1
253 total common heads: 1
247 also local heads: 1
254 also local heads: 1
248 also remote heads: 0
255 also remote heads: 0
249 both: 0
256 both: 0
250 local heads: 2
257 local heads: 2
251 common: 1
258 common: 1
252 missing: 1
259 missing: 1
253 remote heads: 1
260 remote heads: 1
254 common: 0
261 common: 0
255 unknown: 1
262 unknown: 1
256 local changesets: 35
263 local changesets: 35
257 common: 5
264 common: 5
258 heads: 1
265 heads: 1
259 roots: 1
266 roots: 1
260 missing: 30
267 missing: 30
261 heads: 1
268 heads: 1
262 roots: 1
269 roots: 1
263 first undecided set: 34
270 first undecided set: 34
264 heads: 2
271 heads: 2
265 roots: 1
272 roots: 1
266 common: 4
273 common: 4
267 missing: 30
274 missing: 30
268 common heads: bebd167eb94d
275 common heads: bebd167eb94d
269
276
270 % -- a -> b set
277 % -- a -> b set
271 comparing with b
278 comparing with b
272 query 1; heads
279 query 1; heads
273 searching for changes
280 searching for changes
274 taking initial sample
281 taking initial sample
275 searching: 2 queries
282 searching: 2 queries
276 query 2; still undecided: 29, sample size is: 29
283 query 2; still undecided: 29, sample size is: 29
277 2 total queries in *.????s (glob)
284 2 total queries in *.????s (glob)
278 elapsed time: * seconds (glob)
285 elapsed time: * seconds (glob)
279 round-trips: 2
286 round-trips: 2
287 queries: 31
280 heads summary:
288 heads summary:
281 total common heads: 1
289 total common heads: 1
282 also local heads: 1
290 also local heads: 1
283 also remote heads: 0
291 also remote heads: 0
284 both: 0
292 both: 0
285 local heads: 2
293 local heads: 2
286 common: 1
294 common: 1
287 missing: 1
295 missing: 1
288 remote heads: 1
296 remote heads: 1
289 common: 0
297 common: 0
290 unknown: 1
298 unknown: 1
291 local changesets: 35
299 local changesets: 35
292 common: 5
300 common: 5
293 heads: 1
301 heads: 1
294 roots: 1
302 roots: 1
295 missing: 30
303 missing: 30
296 heads: 1
304 heads: 1
297 roots: 1
305 roots: 1
298 first undecided set: 34
306 first undecided set: 34
299 heads: 2
307 heads: 2
300 roots: 1
308 roots: 1
301 common: 4
309 common: 4
302 missing: 30
310 missing: 30
303 common heads: bebd167eb94d
311 common heads: bebd167eb94d
304
312
305 % -- a -> b set (tip only)
313 % -- a -> b set (tip only)
306 comparing with b
314 comparing with b
307 query 1; heads
315 query 1; heads
308 searching for changes
316 searching for changes
309 taking quick initial sample
317 taking quick initial sample
310 searching: 2 queries
318 searching: 2 queries
311 query 2; still undecided: 31, sample size is: 31
319 query 2; still undecided: 31, sample size is: 31
312 2 total queries in *.????s (glob)
320 2 total queries in *.????s (glob)
313 elapsed time: * seconds (glob)
321 elapsed time: * seconds (glob)
314 round-trips: 2
322 round-trips: 2
323 queries: 32
315 heads summary:
324 heads summary:
316 total common heads: 1
325 total common heads: 1
317 also local heads: 0
326 also local heads: 0
318 also remote heads: 0
327 also remote heads: 0
319 both: 0
328 both: 0
320 local heads: 2
329 local heads: 2
321 common: 0
330 common: 0
322 missing: 2
331 missing: 2
323 remote heads: 1
332 remote heads: 1
324 common: 0
333 common: 0
325 unknown: 1
334 unknown: 1
326 local changesets: 35
335 local changesets: 35
327 common: 2
336 common: 2
328 heads: 1
337 heads: 1
329 roots: 1
338 roots: 1
330 missing: 33
339 missing: 33
331 heads: 2
340 heads: 2
332 roots: 2
341 roots: 2
333 first undecided set: 35
342 first undecided set: 35
334 heads: 2
343 heads: 2
335 roots: 1
344 roots: 1
336 common: 2
345 common: 2
337 missing: 33
346 missing: 33
338 common heads: 66f7d451a68b
347 common heads: 66f7d451a68b
339
348
340 % -- b -> a tree
349 % -- b -> a tree
341 comparing with a
350 comparing with a
342 searching for changes
351 searching for changes
343 unpruned common: 66f7d451a68b bebd167eb94d
352 unpruned common: 66f7d451a68b bebd167eb94d
344 elapsed time: * seconds (glob)
353 elapsed time: * seconds (glob)
345 round-trips: 4
354 round-trips: 4
355 queries: 5
346 heads summary:
356 heads summary:
347 total common heads: 1
357 total common heads: 1
348 also local heads: 0
358 also local heads: 0
349 also remote heads: 1
359 also remote heads: 1
350 both: 0
360 both: 0
351 local heads: 1
361 local heads: 1
352 common: 0
362 common: 0
353 missing: 1
363 missing: 1
354 remote heads: 2
364 remote heads: 2
355 common: 1
365 common: 1
356 unknown: 1
366 unknown: 1
357 local changesets: 8
367 local changesets: 8
358 common: 5
368 common: 5
359 heads: 1
369 heads: 1
360 roots: 1
370 roots: 1
361 missing: 3
371 missing: 3
362 heads: 1
372 heads: 1
363 roots: 1
373 roots: 1
364 first undecided set: 3
374 first undecided set: 3
365 heads: 1
375 heads: 1
366 roots: 1
376 roots: 1
367 common: 0
377 common: 0
368 missing: 3
378 missing: 3
369 common heads: bebd167eb94d
379 common heads: bebd167eb94d
370
380
371 % -- b -> a set
381 % -- b -> a set
372 comparing with a
382 comparing with a
373 query 1; heads
383 query 1; heads
374 searching for changes
384 searching for changes
375 taking initial sample
385 taking initial sample
376 searching: 2 queries
386 searching: 2 queries
377 query 2; still undecided: 2, sample size is: 2
387 query 2; still undecided: 2, sample size is: 2
378 2 total queries in *.????s (glob)
388 2 total queries in *.????s (glob)
379 elapsed time: * seconds (glob)
389 elapsed time: * seconds (glob)
380 round-trips: 2
390 round-trips: 2
391 queries: 3
381 heads summary:
392 heads summary:
382 total common heads: 1
393 total common heads: 1
383 also local heads: 0
394 also local heads: 0
384 also remote heads: 1
395 also remote heads: 1
385 both: 0
396 both: 0
386 local heads: 1
397 local heads: 1
387 common: 0
398 common: 0
388 missing: 1
399 missing: 1
389 remote heads: 2
400 remote heads: 2
390 common: 1
401 common: 1
391 unknown: 1
402 unknown: 1
392 local changesets: 8
403 local changesets: 8
393 common: 5
404 common: 5
394 heads: 1
405 heads: 1
395 roots: 1
406 roots: 1
396 missing: 3
407 missing: 3
397 heads: 1
408 heads: 1
398 roots: 1
409 roots: 1
399 first undecided set: 3
410 first undecided set: 3
400 heads: 1
411 heads: 1
401 roots: 1
412 roots: 1
402 common: 0
413 common: 0
403 missing: 3
414 missing: 3
404 common heads: bebd167eb94d
415 common heads: bebd167eb94d
405
416
406 % -- b -> a set (tip only)
417 % -- b -> a set (tip only)
407 comparing with a
418 comparing with a
408 query 1; heads
419 query 1; heads
409 searching for changes
420 searching for changes
410 taking initial sample
421 taking initial sample
411 searching: 2 queries
422 searching: 2 queries
412 query 2; still undecided: 2, sample size is: 2
423 query 2; still undecided: 2, sample size is: 2
413 2 total queries in *.????s (glob)
424 2 total queries in *.????s (glob)
414 elapsed time: * seconds (glob)
425 elapsed time: * seconds (glob)
415 round-trips: 2
426 round-trips: 2
427 queries: 3
416 heads summary:
428 heads summary:
417 total common heads: 1
429 total common heads: 1
418 also local heads: 0
430 also local heads: 0
419 also remote heads: 1
431 also remote heads: 1
420 both: 0
432 both: 0
421 local heads: 1
433 local heads: 1
422 common: 0
434 common: 0
423 missing: 1
435 missing: 1
424 remote heads: 2
436 remote heads: 2
425 common: 1
437 common: 1
426 unknown: 1
438 unknown: 1
427 local changesets: 8
439 local changesets: 8
428 common: 5
440 common: 5
429 heads: 1
441 heads: 1
430 roots: 1
442 roots: 1
431 missing: 3
443 missing: 3
432 heads: 1
444 heads: 1
433 roots: 1
445 roots: 1
434 first undecided set: 3
446 first undecided set: 3
435 heads: 1
447 heads: 1
436 roots: 1
448 roots: 1
437 common: 0
449 common: 0
438 missing: 3
450 missing: 3
439 common heads: bebd167eb94d
451 common heads: bebd167eb94d
440
452
441 Both sides many new with stub:
453 Both sides many new with stub:
442
454
443 $ testdesc '-ra1 -ra2' '-rb' '
455 $ testdesc '-ra1 -ra2' '-rb' '
444 > +2:f +2:a1 +30 :b
456 > +2:f +2:a1 +30 :b
445 > <f +30 :a2'
457 > <f +30 :a2'
446
458
447 % -- a -> b tree
459 % -- a -> b tree
448 comparing with b
460 comparing with b
449 searching for changes
461 searching for changes
450 unpruned common: 2dc09a01254d
462 unpruned common: 2dc09a01254d
451 elapsed time: * seconds (glob)
463 elapsed time: * seconds (glob)
452 round-trips: 4
464 round-trips: 4
465 queries: 5
453 heads summary:
466 heads summary:
454 total common heads: 1
467 total common heads: 1
455 also local heads: 1
468 also local heads: 1
456 also remote heads: 0
469 also remote heads: 0
457 both: 0
470 both: 0
458 local heads: 2
471 local heads: 2
459 common: 1
472 common: 1
460 missing: 1
473 missing: 1
461 remote heads: 1
474 remote heads: 1
462 common: 0
475 common: 0
463 unknown: 1
476 unknown: 1
464 local changesets: 34
477 local changesets: 34
465 common: 4
478 common: 4
466 heads: 1
479 heads: 1
467 roots: 1
480 roots: 1
468 missing: 30
481 missing: 30
469 heads: 1
482 heads: 1
470 roots: 1
483 roots: 1
471 first undecided set: 33
484 first undecided set: 33
472 heads: 2
485 heads: 2
473 roots: 1
486 roots: 1
474 common: 3
487 common: 3
475 missing: 30
488 missing: 30
476 common heads: 2dc09a01254d
489 common heads: 2dc09a01254d
477
490
478 % -- a -> b set
491 % -- a -> b set
479 comparing with b
492 comparing with b
480 query 1; heads
493 query 1; heads
481 searching for changes
494 searching for changes
482 taking initial sample
495 taking initial sample
483 searching: 2 queries
496 searching: 2 queries
484 query 2; still undecided: 29, sample size is: 29
497 query 2; still undecided: 29, sample size is: 29
485 2 total queries in *.????s (glob)
498 2 total queries in *.????s (glob)
486 elapsed time: * seconds (glob)
499 elapsed time: * seconds (glob)
487 round-trips: 2
500 round-trips: 2
501 queries: 31
488 heads summary:
502 heads summary:
489 total common heads: 1
503 total common heads: 1
490 also local heads: 1
504 also local heads: 1
491 also remote heads: 0
505 also remote heads: 0
492 both: 0
506 both: 0
493 local heads: 2
507 local heads: 2
494 common: 1
508 common: 1
495 missing: 1
509 missing: 1
496 remote heads: 1
510 remote heads: 1
497 common: 0
511 common: 0
498 unknown: 1
512 unknown: 1
499 local changesets: 34
513 local changesets: 34
500 common: 4
514 common: 4
501 heads: 1
515 heads: 1
502 roots: 1
516 roots: 1
503 missing: 30
517 missing: 30
504 heads: 1
518 heads: 1
505 roots: 1
519 roots: 1
506 first undecided set: 33
520 first undecided set: 33
507 heads: 2
521 heads: 2
508 roots: 1
522 roots: 1
509 common: 3
523 common: 3
510 missing: 30
524 missing: 30
511 common heads: 2dc09a01254d
525 common heads: 2dc09a01254d
512
526
513 % -- a -> b set (tip only)
527 % -- a -> b set (tip only)
514 comparing with b
528 comparing with b
515 query 1; heads
529 query 1; heads
516 searching for changes
530 searching for changes
517 taking quick initial sample
531 taking quick initial sample
518 searching: 2 queries
532 searching: 2 queries
519 query 2; still undecided: 31, sample size is: 31
533 query 2; still undecided: 31, sample size is: 31
520 2 total queries in *.????s (glob)
534 2 total queries in *.????s (glob)
521 elapsed time: * seconds (glob)
535 elapsed time: * seconds (glob)
522 round-trips: 2
536 round-trips: 2
537 queries: 32
523 heads summary:
538 heads summary:
524 total common heads: 1
539 total common heads: 1
525 also local heads: 0
540 also local heads: 0
526 also remote heads: 0
541 also remote heads: 0
527 both: 0
542 both: 0
528 local heads: 2
543 local heads: 2
529 common: 0
544 common: 0
530 missing: 2
545 missing: 2
531 remote heads: 1
546 remote heads: 1
532 common: 0
547 common: 0
533 unknown: 1
548 unknown: 1
534 local changesets: 34
549 local changesets: 34
535 common: 2
550 common: 2
536 heads: 1
551 heads: 1
537 roots: 1
552 roots: 1
538 missing: 32
553 missing: 32
539 heads: 2
554 heads: 2
540 roots: 2
555 roots: 2
541 first undecided set: 34
556 first undecided set: 34
542 heads: 2
557 heads: 2
543 roots: 1
558 roots: 1
544 common: 2
559 common: 2
545 missing: 32
560 missing: 32
546 common heads: 66f7d451a68b
561 common heads: 66f7d451a68b
547
562
548 % -- b -> a tree
563 % -- b -> a tree
549 comparing with a
564 comparing with a
550 searching for changes
565 searching for changes
551 unpruned common: 2dc09a01254d 66f7d451a68b
566 unpruned common: 2dc09a01254d 66f7d451a68b
552 elapsed time: * seconds (glob)
567 elapsed time: * seconds (glob)
553 round-trips: 4
568 round-trips: 4
569 queries: 5
554 heads summary:
570 heads summary:
555 total common heads: 1
571 total common heads: 1
556 also local heads: 0
572 also local heads: 0
557 also remote heads: 1
573 also remote heads: 1
558 both: 0
574 both: 0
559 local heads: 1
575 local heads: 1
560 common: 0
576 common: 0
561 missing: 1
577 missing: 1
562 remote heads: 2
578 remote heads: 2
563 common: 1
579 common: 1
564 unknown: 1
580 unknown: 1
565 local changesets: 34
581 local changesets: 34
566 common: 4
582 common: 4
567 heads: 1
583 heads: 1
568 roots: 1
584 roots: 1
569 missing: 30
585 missing: 30
570 heads: 1
586 heads: 1
571 roots: 1
587 roots: 1
572 first undecided set: 30
588 first undecided set: 30
573 heads: 1
589 heads: 1
574 roots: 1
590 roots: 1
575 common: 0
591 common: 0
576 missing: 30
592 missing: 30
577 common heads: 2dc09a01254d
593 common heads: 2dc09a01254d
578
594
579 % -- b -> a set
595 % -- b -> a set
580 comparing with a
596 comparing with a
581 query 1; heads
597 query 1; heads
582 searching for changes
598 searching for changes
583 taking initial sample
599 taking initial sample
584 searching: 2 queries
600 searching: 2 queries
585 query 2; still undecided: 29, sample size is: 29
601 query 2; still undecided: 29, sample size is: 29
586 2 total queries in *.????s (glob)
602 2 total queries in *.????s (glob)
587 elapsed time: * seconds (glob)
603 elapsed time: * seconds (glob)
588 round-trips: 2
604 round-trips: 2
605 queries: 30
589 heads summary:
606 heads summary:
590 total common heads: 1
607 total common heads: 1
591 also local heads: 0
608 also local heads: 0
592 also remote heads: 1
609 also remote heads: 1
593 both: 0
610 both: 0
594 local heads: 1
611 local heads: 1
595 common: 0
612 common: 0
596 missing: 1
613 missing: 1
597 remote heads: 2
614 remote heads: 2
598 common: 1
615 common: 1
599 unknown: 1
616 unknown: 1
600 local changesets: 34
617 local changesets: 34
601 common: 4
618 common: 4
602 heads: 1
619 heads: 1
603 roots: 1
620 roots: 1
604 missing: 30
621 missing: 30
605 heads: 1
622 heads: 1
606 roots: 1
623 roots: 1
607 first undecided set: 30
624 first undecided set: 30
608 heads: 1
625 heads: 1
609 roots: 1
626 roots: 1
610 common: 0
627 common: 0
611 missing: 30
628 missing: 30
612 common heads: 2dc09a01254d
629 common heads: 2dc09a01254d
613
630
614 % -- b -> a set (tip only)
631 % -- b -> a set (tip only)
615 comparing with a
632 comparing with a
616 query 1; heads
633 query 1; heads
617 searching for changes
634 searching for changes
618 taking initial sample
635 taking initial sample
619 searching: 2 queries
636 searching: 2 queries
620 query 2; still undecided: 29, sample size is: 29
637 query 2; still undecided: 29, sample size is: 29
621 2 total queries in *.????s (glob)
638 2 total queries in *.????s (glob)
622 elapsed time: * seconds (glob)
639 elapsed time: * seconds (glob)
623 round-trips: 2
640 round-trips: 2
641 queries: 30
624 heads summary:
642 heads summary:
625 total common heads: 1
643 total common heads: 1
626 also local heads: 0
644 also local heads: 0
627 also remote heads: 1
645 also remote heads: 1
628 both: 0
646 both: 0
629 local heads: 1
647 local heads: 1
630 common: 0
648 common: 0
631 missing: 1
649 missing: 1
632 remote heads: 2
650 remote heads: 2
633 common: 1
651 common: 1
634 unknown: 1
652 unknown: 1
635 local changesets: 34
653 local changesets: 34
636 common: 4
654 common: 4
637 heads: 1
655 heads: 1
638 roots: 1
656 roots: 1
639 missing: 30
657 missing: 30
640 heads: 1
658 heads: 1
641 roots: 1
659 roots: 1
642 first undecided set: 30
660 first undecided set: 30
643 heads: 1
661 heads: 1
644 roots: 1
662 roots: 1
645 common: 0
663 common: 0
646 missing: 30
664 missing: 30
647 common heads: 2dc09a01254d
665 common heads: 2dc09a01254d
648
666
649
667
650 Both many new:
668 Both many new:
651
669
652 $ testdesc '-ra' '-rb' '
670 $ testdesc '-ra' '-rb' '
653 > +2:f +30 :b
671 > +2:f +30 :b
654 > <f +30 :a'
672 > <f +30 :a'
655
673
656 % -- a -> b tree
674 % -- a -> b tree
657 comparing with b
675 comparing with b
658 searching for changes
676 searching for changes
659 unpruned common: 66f7d451a68b
677 unpruned common: 66f7d451a68b
660 elapsed time: * seconds (glob)
678 elapsed time: * seconds (glob)
661 round-trips: 4
679 round-trips: 4
680 queries: 5
662 heads summary:
681 heads summary:
663 total common heads: 1
682 total common heads: 1
664 also local heads: 0
683 also local heads: 0
665 also remote heads: 0
684 also remote heads: 0
666 both: 0
685 both: 0
667 local heads: 1
686 local heads: 1
668 common: 0
687 common: 0
669 missing: 1
688 missing: 1
670 remote heads: 1
689 remote heads: 1
671 common: 0
690 common: 0
672 unknown: 1
691 unknown: 1
673 local changesets: 32
692 local changesets: 32
674 common: 2
693 common: 2
675 heads: 1
694 heads: 1
676 roots: 1
695 roots: 1
677 missing: 30
696 missing: 30
678 heads: 1
697 heads: 1
679 roots: 1
698 roots: 1
680 first undecided set: 32
699 first undecided set: 32
681 heads: 1
700 heads: 1
682 roots: 1
701 roots: 1
683 common: 2
702 common: 2
684 missing: 30
703 missing: 30
685 common heads: 66f7d451a68b
704 common heads: 66f7d451a68b
686
705
687 % -- a -> b set
706 % -- a -> b set
688 comparing with b
707 comparing with b
689 query 1; heads
708 query 1; heads
690 searching for changes
709 searching for changes
691 taking quick initial sample
710 taking quick initial sample
692 searching: 2 queries
711 searching: 2 queries
693 query 2; still undecided: 31, sample size is: 31
712 query 2; still undecided: 31, sample size is: 31
694 2 total queries in *.????s (glob)
713 2 total queries in *.????s (glob)
695 elapsed time: * seconds (glob)
714 elapsed time: * seconds (glob)
696 round-trips: 2
715 round-trips: 2
716 queries: 32
697 heads summary:
717 heads summary:
698 total common heads: 1
718 total common heads: 1
699 also local heads: 0
719 also local heads: 0
700 also remote heads: 0
720 also remote heads: 0
701 both: 0
721 both: 0
702 local heads: 1
722 local heads: 1
703 common: 0
723 common: 0
704 missing: 1
724 missing: 1
705 remote heads: 1
725 remote heads: 1
706 common: 0
726 common: 0
707 unknown: 1
727 unknown: 1
708 local changesets: 32
728 local changesets: 32
709 common: 2
729 common: 2
710 heads: 1
730 heads: 1
711 roots: 1
731 roots: 1
712 missing: 30
732 missing: 30
713 heads: 1
733 heads: 1
714 roots: 1
734 roots: 1
715 first undecided set: 32
735 first undecided set: 32
716 heads: 1
736 heads: 1
717 roots: 1
737 roots: 1
718 common: 2
738 common: 2
719 missing: 30
739 missing: 30
720 common heads: 66f7d451a68b
740 common heads: 66f7d451a68b
721
741
722 % -- a -> b set (tip only)
742 % -- a -> b set (tip only)
723 comparing with b
743 comparing with b
724 query 1; heads
744 query 1; heads
725 searching for changes
745 searching for changes
726 taking quick initial sample
746 taking quick initial sample
727 searching: 2 queries
747 searching: 2 queries
728 query 2; still undecided: 31, sample size is: 31
748 query 2; still undecided: 31, sample size is: 31
729 2 total queries in *.????s (glob)
749 2 total queries in *.????s (glob)
730 elapsed time: * seconds (glob)
750 elapsed time: * seconds (glob)
731 round-trips: 2
751 round-trips: 2
752 queries: 32
732 heads summary:
753 heads summary:
733 total common heads: 1
754 total common heads: 1
734 also local heads: 0
755 also local heads: 0
735 also remote heads: 0
756 also remote heads: 0
736 both: 0
757 both: 0
737 local heads: 1
758 local heads: 1
738 common: 0
759 common: 0
739 missing: 1
760 missing: 1
740 remote heads: 1
761 remote heads: 1
741 common: 0
762 common: 0
742 unknown: 1
763 unknown: 1
743 local changesets: 32
764 local changesets: 32
744 common: 2
765 common: 2
745 heads: 1
766 heads: 1
746 roots: 1
767 roots: 1
747 missing: 30
768 missing: 30
748 heads: 1
769 heads: 1
749 roots: 1
770 roots: 1
750 first undecided set: 32
771 first undecided set: 32
751 heads: 1
772 heads: 1
752 roots: 1
773 roots: 1
753 common: 2
774 common: 2
754 missing: 30
775 missing: 30
755 common heads: 66f7d451a68b
776 common heads: 66f7d451a68b
756
777
757 % -- b -> a tree
778 % -- b -> a tree
758 comparing with a
779 comparing with a
759 searching for changes
780 searching for changes
760 unpruned common: 66f7d451a68b
781 unpruned common: 66f7d451a68b
761 elapsed time: * seconds (glob)
782 elapsed time: * seconds (glob)
762 round-trips: 4
783 round-trips: 4
784 queries: 5
763 heads summary:
785 heads summary:
764 total common heads: 1
786 total common heads: 1
765 also local heads: 0
787 also local heads: 0
766 also remote heads: 0
788 also remote heads: 0
767 both: 0
789 both: 0
768 local heads: 1
790 local heads: 1
769 common: 0
791 common: 0
770 missing: 1
792 missing: 1
771 remote heads: 1
793 remote heads: 1
772 common: 0
794 common: 0
773 unknown: 1
795 unknown: 1
774 local changesets: 32
796 local changesets: 32
775 common: 2
797 common: 2
776 heads: 1
798 heads: 1
777 roots: 1
799 roots: 1
778 missing: 30
800 missing: 30
779 heads: 1
801 heads: 1
780 roots: 1
802 roots: 1
781 first undecided set: 32
803 first undecided set: 32
782 heads: 1
804 heads: 1
783 roots: 1
805 roots: 1
784 common: 2
806 common: 2
785 missing: 30
807 missing: 30
786 common heads: 66f7d451a68b
808 common heads: 66f7d451a68b
787
809
788 % -- b -> a set
810 % -- b -> a set
789 comparing with a
811 comparing with a
790 query 1; heads
812 query 1; heads
791 searching for changes
813 searching for changes
792 taking quick initial sample
814 taking quick initial sample
793 searching: 2 queries
815 searching: 2 queries
794 query 2; still undecided: 31, sample size is: 31
816 query 2; still undecided: 31, sample size is: 31
795 2 total queries in *.????s (glob)
817 2 total queries in *.????s (glob)
796 elapsed time: * seconds (glob)
818 elapsed time: * seconds (glob)
797 round-trips: 2
819 round-trips: 2
820 queries: 32
798 heads summary:
821 heads summary:
799 total common heads: 1
822 total common heads: 1
800 also local heads: 0
823 also local heads: 0
801 also remote heads: 0
824 also remote heads: 0
802 both: 0
825 both: 0
803 local heads: 1
826 local heads: 1
804 common: 0
827 common: 0
805 missing: 1
828 missing: 1
806 remote heads: 1
829 remote heads: 1
807 common: 0
830 common: 0
808 unknown: 1
831 unknown: 1
809 local changesets: 32
832 local changesets: 32
810 common: 2
833 common: 2
811 heads: 1
834 heads: 1
812 roots: 1
835 roots: 1
813 missing: 30
836 missing: 30
814 heads: 1
837 heads: 1
815 roots: 1
838 roots: 1
816 first undecided set: 32
839 first undecided set: 32
817 heads: 1
840 heads: 1
818 roots: 1
841 roots: 1
819 common: 2
842 common: 2
820 missing: 30
843 missing: 30
821 common heads: 66f7d451a68b
844 common heads: 66f7d451a68b
822
845
823 % -- b -> a set (tip only)
846 % -- b -> a set (tip only)
824 comparing with a
847 comparing with a
825 query 1; heads
848 query 1; heads
826 searching for changes
849 searching for changes
827 taking quick initial sample
850 taking quick initial sample
828 searching: 2 queries
851 searching: 2 queries
829 query 2; still undecided: 31, sample size is: 31
852 query 2; still undecided: 31, sample size is: 31
830 2 total queries in *.????s (glob)
853 2 total queries in *.????s (glob)
831 elapsed time: * seconds (glob)
854 elapsed time: * seconds (glob)
832 round-trips: 2
855 round-trips: 2
856 queries: 32
833 heads summary:
857 heads summary:
834 total common heads: 1
858 total common heads: 1
835 also local heads: 0
859 also local heads: 0
836 also remote heads: 0
860 also remote heads: 0
837 both: 0
861 both: 0
838 local heads: 1
862 local heads: 1
839 common: 0
863 common: 0
840 missing: 1
864 missing: 1
841 remote heads: 1
865 remote heads: 1
842 common: 0
866 common: 0
843 unknown: 1
867 unknown: 1
844 local changesets: 32
868 local changesets: 32
845 common: 2
869 common: 2
846 heads: 1
870 heads: 1
847 roots: 1
871 roots: 1
848 missing: 30
872 missing: 30
849 heads: 1
873 heads: 1
850 roots: 1
874 roots: 1
851 first undecided set: 32
875 first undecided set: 32
852 heads: 1
876 heads: 1
853 roots: 1
877 roots: 1
854 common: 2
878 common: 2
855 missing: 30
879 missing: 30
856 common heads: 66f7d451a68b
880 common heads: 66f7d451a68b
857
881
858
882
859 Both many new skewed:
883 Both many new skewed:
860
884
861 $ testdesc '-ra' '-rb' '
885 $ testdesc '-ra' '-rb' '
862 > +2:f +30 :b
886 > +2:f +30 :b
863 > <f +50 :a'
887 > <f +50 :a'
864
888
865 % -- a -> b tree
889 % -- a -> b tree
866 comparing with b
890 comparing with b
867 searching for changes
891 searching for changes
868 unpruned common: 66f7d451a68b
892 unpruned common: 66f7d451a68b
869 elapsed time: * seconds (glob)
893 elapsed time: * seconds (glob)
870 round-trips: 4
894 round-trips: 4
895 queries: 5
871 heads summary:
896 heads summary:
872 total common heads: 1
897 total common heads: 1
873 also local heads: 0
898 also local heads: 0
874 also remote heads: 0
899 also remote heads: 0
875 both: 0
900 both: 0
876 local heads: 1
901 local heads: 1
877 common: 0
902 common: 0
878 missing: 1
903 missing: 1
879 remote heads: 1
904 remote heads: 1
880 common: 0
905 common: 0
881 unknown: 1
906 unknown: 1
882 local changesets: 52
907 local changesets: 52
883 common: 2
908 common: 2
884 heads: 1
909 heads: 1
885 roots: 1
910 roots: 1
886 missing: 50
911 missing: 50
887 heads: 1
912 heads: 1
888 roots: 1
913 roots: 1
889 first undecided set: 52
914 first undecided set: 52
890 heads: 1
915 heads: 1
891 roots: 1
916 roots: 1
892 common: 2
917 common: 2
893 missing: 50
918 missing: 50
894 common heads: 66f7d451a68b
919 common heads: 66f7d451a68b
895
920
896 % -- a -> b set
921 % -- a -> b set
897 comparing with b
922 comparing with b
898 query 1; heads
923 query 1; heads
899 searching for changes
924 searching for changes
900 taking quick initial sample
925 taking quick initial sample
901 searching: 2 queries
926 searching: 2 queries
902 query 2; still undecided: 51, sample size is: 51
927 query 2; still undecided: 51, sample size is: 51
903 2 total queries in *.????s (glob)
928 2 total queries in *.????s (glob)
904 elapsed time: * seconds (glob)
929 elapsed time: * seconds (glob)
905 round-trips: 2
930 round-trips: 2
931 queries: 52
906 heads summary:
932 heads summary:
907 total common heads: 1
933 total common heads: 1
908 also local heads: 0
934 also local heads: 0
909 also remote heads: 0
935 also remote heads: 0
910 both: 0
936 both: 0
911 local heads: 1
937 local heads: 1
912 common: 0
938 common: 0
913 missing: 1
939 missing: 1
914 remote heads: 1
940 remote heads: 1
915 common: 0
941 common: 0
916 unknown: 1
942 unknown: 1
917 local changesets: 52
943 local changesets: 52
918 common: 2
944 common: 2
919 heads: 1
945 heads: 1
920 roots: 1
946 roots: 1
921 missing: 50
947 missing: 50
922 heads: 1
948 heads: 1
923 roots: 1
949 roots: 1
924 first undecided set: 52
950 first undecided set: 52
925 heads: 1
951 heads: 1
926 roots: 1
952 roots: 1
927 common: 2
953 common: 2
928 missing: 50
954 missing: 50
929 common heads: 66f7d451a68b
955 common heads: 66f7d451a68b
930
956
931 % -- a -> b set (tip only)
957 % -- a -> b set (tip only)
932 comparing with b
958 comparing with b
933 query 1; heads
959 query 1; heads
934 searching for changes
960 searching for changes
935 taking quick initial sample
961 taking quick initial sample
936 searching: 2 queries
962 searching: 2 queries
937 query 2; still undecided: 51, sample size is: 51
963 query 2; still undecided: 51, sample size is: 51
938 2 total queries in *.????s (glob)
964 2 total queries in *.????s (glob)
939 elapsed time: * seconds (glob)
965 elapsed time: * seconds (glob)
940 round-trips: 2
966 round-trips: 2
967 queries: 52
941 heads summary:
968 heads summary:
942 total common heads: 1
969 total common heads: 1
943 also local heads: 0
970 also local heads: 0
944 also remote heads: 0
971 also remote heads: 0
945 both: 0
972 both: 0
946 local heads: 1
973 local heads: 1
947 common: 0
974 common: 0
948 missing: 1
975 missing: 1
949 remote heads: 1
976 remote heads: 1
950 common: 0
977 common: 0
951 unknown: 1
978 unknown: 1
952 local changesets: 52
979 local changesets: 52
953 common: 2
980 common: 2
954 heads: 1
981 heads: 1
955 roots: 1
982 roots: 1
956 missing: 50
983 missing: 50
957 heads: 1
984 heads: 1
958 roots: 1
985 roots: 1
959 first undecided set: 52
986 first undecided set: 52
960 heads: 1
987 heads: 1
961 roots: 1
988 roots: 1
962 common: 2
989 common: 2
963 missing: 50
990 missing: 50
964 common heads: 66f7d451a68b
991 common heads: 66f7d451a68b
965
992
966 % -- b -> a tree
993 % -- b -> a tree
967 comparing with a
994 comparing with a
968 searching for changes
995 searching for changes
969 unpruned common: 66f7d451a68b
996 unpruned common: 66f7d451a68b
970 elapsed time: * seconds (glob)
997 elapsed time: * seconds (glob)
971 round-trips: 3
998 round-trips: 3
999 queries: 4
972 heads summary:
1000 heads summary:
973 total common heads: 1
1001 total common heads: 1
974 also local heads: 0
1002 also local heads: 0
975 also remote heads: 0
1003 also remote heads: 0
976 both: 0
1004 both: 0
977 local heads: 1
1005 local heads: 1
978 common: 0
1006 common: 0
979 missing: 1
1007 missing: 1
980 remote heads: 1
1008 remote heads: 1
981 common: 0
1009 common: 0
982 unknown: 1
1010 unknown: 1
983 local changesets: 32
1011 local changesets: 32
984 common: 2
1012 common: 2
985 heads: 1
1013 heads: 1
986 roots: 1
1014 roots: 1
987 missing: 30
1015 missing: 30
988 heads: 1
1016 heads: 1
989 roots: 1
1017 roots: 1
990 first undecided set: 32
1018 first undecided set: 32
991 heads: 1
1019 heads: 1
992 roots: 1
1020 roots: 1
993 common: 2
1021 common: 2
994 missing: 30
1022 missing: 30
995 common heads: 66f7d451a68b
1023 common heads: 66f7d451a68b
996
1024
997 % -- b -> a set
1025 % -- b -> a set
998 comparing with a
1026 comparing with a
999 query 1; heads
1027 query 1; heads
1000 searching for changes
1028 searching for changes
1001 taking quick initial sample
1029 taking quick initial sample
1002 searching: 2 queries
1030 searching: 2 queries
1003 query 2; still undecided: 31, sample size is: 31
1031 query 2; still undecided: 31, sample size is: 31
1004 2 total queries in *.????s (glob)
1032 2 total queries in *.????s (glob)
1005 elapsed time: * seconds (glob)
1033 elapsed time: * seconds (glob)
1006 round-trips: 2
1034 round-trips: 2
1035 queries: 32
1007 heads summary:
1036 heads summary:
1008 total common heads: 1
1037 total common heads: 1
1009 also local heads: 0
1038 also local heads: 0
1010 also remote heads: 0
1039 also remote heads: 0
1011 both: 0
1040 both: 0
1012 local heads: 1
1041 local heads: 1
1013 common: 0
1042 common: 0
1014 missing: 1
1043 missing: 1
1015 remote heads: 1
1044 remote heads: 1
1016 common: 0
1045 common: 0
1017 unknown: 1
1046 unknown: 1
1018 local changesets: 32
1047 local changesets: 32
1019 common: 2
1048 common: 2
1020 heads: 1
1049 heads: 1
1021 roots: 1
1050 roots: 1
1022 missing: 30
1051 missing: 30
1023 heads: 1
1052 heads: 1
1024 roots: 1
1053 roots: 1
1025 first undecided set: 32
1054 first undecided set: 32
1026 heads: 1
1055 heads: 1
1027 roots: 1
1056 roots: 1
1028 common: 2
1057 common: 2
1029 missing: 30
1058 missing: 30
1030 common heads: 66f7d451a68b
1059 common heads: 66f7d451a68b
1031
1060
1032 % -- b -> a set (tip only)
1061 % -- b -> a set (tip only)
1033 comparing with a
1062 comparing with a
1034 query 1; heads
1063 query 1; heads
1035 searching for changes
1064 searching for changes
1036 taking quick initial sample
1065 taking quick initial sample
1037 searching: 2 queries
1066 searching: 2 queries
1038 query 2; still undecided: 31, sample size is: 31
1067 query 2; still undecided: 31, sample size is: 31
1039 2 total queries in *.????s (glob)
1068 2 total queries in *.????s (glob)
1040 elapsed time: * seconds (glob)
1069 elapsed time: * seconds (glob)
1041 round-trips: 2
1070 round-trips: 2
1071 queries: 32
1042 heads summary:
1072 heads summary:
1043 total common heads: 1
1073 total common heads: 1
1044 also local heads: 0
1074 also local heads: 0
1045 also remote heads: 0
1075 also remote heads: 0
1046 both: 0
1076 both: 0
1047 local heads: 1
1077 local heads: 1
1048 common: 0
1078 common: 0
1049 missing: 1
1079 missing: 1
1050 remote heads: 1
1080 remote heads: 1
1051 common: 0
1081 common: 0
1052 unknown: 1
1082 unknown: 1
1053 local changesets: 32
1083 local changesets: 32
1054 common: 2
1084 common: 2
1055 heads: 1
1085 heads: 1
1056 roots: 1
1086 roots: 1
1057 missing: 30
1087 missing: 30
1058 heads: 1
1088 heads: 1
1059 roots: 1
1089 roots: 1
1060 first undecided set: 32
1090 first undecided set: 32
1061 heads: 1
1091 heads: 1
1062 roots: 1
1092 roots: 1
1063 common: 2
1093 common: 2
1064 missing: 30
1094 missing: 30
1065 common heads: 66f7d451a68b
1095 common heads: 66f7d451a68b
1066
1096
1067
1097
1068 Both many new on top of long history:
1098 Both many new on top of long history:
1069
1099
1070 $ testdesc '-ra' '-rb' '
1100 $ testdesc '-ra' '-rb' '
1071 > +1000:f +30 :b
1101 > +1000:f +30 :b
1072 > <f +50 :a'
1102 > <f +50 :a'
1073
1103
1074 % -- a -> b tree
1104 % -- a -> b tree
1075 comparing with b
1105 comparing with b
1076 searching for changes
1106 searching for changes
1077 unpruned common: 7ead0cba2838
1107 unpruned common: 7ead0cba2838
1078 elapsed time: * seconds (glob)
1108 elapsed time: * seconds (glob)
1079 round-trips: 4
1109 round-trips: 4
1110 queries: 5
1080 heads summary:
1111 heads summary:
1081 total common heads: 1
1112 total common heads: 1
1082 also local heads: 0
1113 also local heads: 0
1083 also remote heads: 0
1114 also remote heads: 0
1084 both: 0
1115 both: 0
1085 local heads: 1
1116 local heads: 1
1086 common: 0
1117 common: 0
1087 missing: 1
1118 missing: 1
1088 remote heads: 1
1119 remote heads: 1
1089 common: 0
1120 common: 0
1090 unknown: 1
1121 unknown: 1
1091 local changesets: 1050
1122 local changesets: 1050
1092 common: 1000
1123 common: 1000
1093 heads: 1
1124 heads: 1
1094 roots: 1
1125 roots: 1
1095 missing: 50
1126 missing: 50
1096 heads: 1
1127 heads: 1
1097 roots: 1
1128 roots: 1
1098 first undecided set: 1050
1129 first undecided set: 1050
1099 heads: 1
1130 heads: 1
1100 roots: 1
1131 roots: 1
1101 common: 1000
1132 common: 1000
1102 missing: 50
1133 missing: 50
1103 common heads: 7ead0cba2838
1134 common heads: 7ead0cba2838
1104
1135
1105 % -- a -> b set
1136 % -- a -> b set
1106 comparing with b
1137 comparing with b
1107 query 1; heads
1138 query 1; heads
1108 searching for changes
1139 searching for changes
1109 taking quick initial sample
1140 taking quick initial sample
1110 searching: 2 queries
1141 searching: 2 queries
1111 query 2; still undecided: 1049, sample size is: 11
1142 query 2; still undecided: 1049, sample size is: 11
1112 sampling from both directions
1143 sampling from both directions
1113 searching: 3 queries
1144 searching: 3 queries
1114 query 3; still undecided: 31, sample size is: 31
1145 query 3; still undecided: 31, sample size is: 31
1115 3 total queries in *.????s (glob)
1146 3 total queries in *.????s (glob)
1116 elapsed time: * seconds (glob)
1147 elapsed time: * seconds (glob)
1117 round-trips: 3
1148 round-trips: 3
1149 queries: 43
1118 heads summary:
1150 heads summary:
1119 total common heads: 1
1151 total common heads: 1
1120 also local heads: 0
1152 also local heads: 0
1121 also remote heads: 0
1153 also remote heads: 0
1122 both: 0
1154 both: 0
1123 local heads: 1
1155 local heads: 1
1124 common: 0
1156 common: 0
1125 missing: 1
1157 missing: 1
1126 remote heads: 1
1158 remote heads: 1
1127 common: 0
1159 common: 0
1128 unknown: 1
1160 unknown: 1
1129 local changesets: 1050
1161 local changesets: 1050
1130 common: 1000
1162 common: 1000
1131 heads: 1
1163 heads: 1
1132 roots: 1
1164 roots: 1
1133 missing: 50
1165 missing: 50
1134 heads: 1
1166 heads: 1
1135 roots: 1
1167 roots: 1
1136 first undecided set: 1050
1168 first undecided set: 1050
1137 heads: 1
1169 heads: 1
1138 roots: 1
1170 roots: 1
1139 common: 1000
1171 common: 1000
1140 missing: 50
1172 missing: 50
1141 common heads: 7ead0cba2838
1173 common heads: 7ead0cba2838
1142
1174
1143 % -- a -> b set (tip only)
1175 % -- a -> b set (tip only)
1144 comparing with b
1176 comparing with b
1145 query 1; heads
1177 query 1; heads
1146 searching for changes
1178 searching for changes
1147 taking quick initial sample
1179 taking quick initial sample
1148 searching: 2 queries
1180 searching: 2 queries
1149 query 2; still undecided: 1049, sample size is: 11
1181 query 2; still undecided: 1049, sample size is: 11
1150 sampling from both directions
1182 sampling from both directions
1151 searching: 3 queries
1183 searching: 3 queries
1152 query 3; still undecided: 31, sample size is: 31
1184 query 3; still undecided: 31, sample size is: 31
1153 3 total queries in *.????s (glob)
1185 3 total queries in *.????s (glob)
1154 elapsed time: * seconds (glob)
1186 elapsed time: * seconds (glob)
1155 round-trips: 3
1187 round-trips: 3
1188 queries: 43
1156 heads summary:
1189 heads summary:
1157 total common heads: 1
1190 total common heads: 1
1158 also local heads: 0
1191 also local heads: 0
1159 also remote heads: 0
1192 also remote heads: 0
1160 both: 0
1193 both: 0
1161 local heads: 1
1194 local heads: 1
1162 common: 0
1195 common: 0
1163 missing: 1
1196 missing: 1
1164 remote heads: 1
1197 remote heads: 1
1165 common: 0
1198 common: 0
1166 unknown: 1
1199 unknown: 1
1167 local changesets: 1050
1200 local changesets: 1050
1168 common: 1000
1201 common: 1000
1169 heads: 1
1202 heads: 1
1170 roots: 1
1203 roots: 1
1171 missing: 50
1204 missing: 50
1172 heads: 1
1205 heads: 1
1173 roots: 1
1206 roots: 1
1174 first undecided set: 1050
1207 first undecided set: 1050
1175 heads: 1
1208 heads: 1
1176 roots: 1
1209 roots: 1
1177 common: 1000
1210 common: 1000
1178 missing: 50
1211 missing: 50
1179 common heads: 7ead0cba2838
1212 common heads: 7ead0cba2838
1180
1213
1181 % -- b -> a tree
1214 % -- b -> a tree
1182 comparing with a
1215 comparing with a
1183 searching for changes
1216 searching for changes
1184 unpruned common: 7ead0cba2838
1217 unpruned common: 7ead0cba2838
1185 elapsed time: * seconds (glob)
1218 elapsed time: * seconds (glob)
1186 round-trips: 3
1219 round-trips: 3
1220 queries: 4
1187 heads summary:
1221 heads summary:
1188 total common heads: 1
1222 total common heads: 1
1189 also local heads: 0
1223 also local heads: 0
1190 also remote heads: 0
1224 also remote heads: 0
1191 both: 0
1225 both: 0
1192 local heads: 1
1226 local heads: 1
1193 common: 0
1227 common: 0
1194 missing: 1
1228 missing: 1
1195 remote heads: 1
1229 remote heads: 1
1196 common: 0
1230 common: 0
1197 unknown: 1
1231 unknown: 1
1198 local changesets: 1030
1232 local changesets: 1030
1199 common: 1000
1233 common: 1000
1200 heads: 1
1234 heads: 1
1201 roots: 1
1235 roots: 1
1202 missing: 30
1236 missing: 30
1203 heads: 1
1237 heads: 1
1204 roots: 1
1238 roots: 1
1205 first undecided set: 1030
1239 first undecided set: 1030
1206 heads: 1
1240 heads: 1
1207 roots: 1
1241 roots: 1
1208 common: 1000
1242 common: 1000
1209 missing: 30
1243 missing: 30
1210 common heads: 7ead0cba2838
1244 common heads: 7ead0cba2838
1211
1245
1212 % -- b -> a set
1246 % -- b -> a set
1213 comparing with a
1247 comparing with a
1214 query 1; heads
1248 query 1; heads
1215 searching for changes
1249 searching for changes
1216 taking quick initial sample
1250 taking quick initial sample
1217 searching: 2 queries
1251 searching: 2 queries
1218 query 2; still undecided: 1029, sample size is: 11
1252 query 2; still undecided: 1029, sample size is: 11
1219 sampling from both directions
1253 sampling from both directions
1220 searching: 3 queries
1254 searching: 3 queries
1221 query 3; still undecided: 15, sample size is: 15
1255 query 3; still undecided: 15, sample size is: 15
1222 3 total queries in *.????s (glob)
1256 3 total queries in *.????s (glob)
1223 elapsed time: * seconds (glob)
1257 elapsed time: * seconds (glob)
1224 round-trips: 3
1258 round-trips: 3
1259 queries: 27
1225 heads summary:
1260 heads summary:
1226 total common heads: 1
1261 total common heads: 1
1227 also local heads: 0
1262 also local heads: 0
1228 also remote heads: 0
1263 also remote heads: 0
1229 both: 0
1264 both: 0
1230 local heads: 1
1265 local heads: 1
1231 common: 0
1266 common: 0
1232 missing: 1
1267 missing: 1
1233 remote heads: 1
1268 remote heads: 1
1234 common: 0
1269 common: 0
1235 unknown: 1
1270 unknown: 1
1236 local changesets: 1030
1271 local changesets: 1030
1237 common: 1000
1272 common: 1000
1238 heads: 1
1273 heads: 1
1239 roots: 1
1274 roots: 1
1240 missing: 30
1275 missing: 30
1241 heads: 1
1276 heads: 1
1242 roots: 1
1277 roots: 1
1243 first undecided set: 1030
1278 first undecided set: 1030
1244 heads: 1
1279 heads: 1
1245 roots: 1
1280 roots: 1
1246 common: 1000
1281 common: 1000
1247 missing: 30
1282 missing: 30
1248 common heads: 7ead0cba2838
1283 common heads: 7ead0cba2838
1249
1284
1250 % -- b -> a set (tip only)
1285 % -- b -> a set (tip only)
1251 comparing with a
1286 comparing with a
1252 query 1; heads
1287 query 1; heads
1253 searching for changes
1288 searching for changes
1254 taking quick initial sample
1289 taking quick initial sample
1255 searching: 2 queries
1290 searching: 2 queries
1256 query 2; still undecided: 1029, sample size is: 11
1291 query 2; still undecided: 1029, sample size is: 11
1257 sampling from both directions
1292 sampling from both directions
1258 searching: 3 queries
1293 searching: 3 queries
1259 query 3; still undecided: 15, sample size is: 15
1294 query 3; still undecided: 15, sample size is: 15
1260 3 total queries in *.????s (glob)
1295 3 total queries in *.????s (glob)
1261 elapsed time: * seconds (glob)
1296 elapsed time: * seconds (glob)
1262 round-trips: 3
1297 round-trips: 3
1298 queries: 27
1263 heads summary:
1299 heads summary:
1264 total common heads: 1
1300 total common heads: 1
1265 also local heads: 0
1301 also local heads: 0
1266 also remote heads: 0
1302 also remote heads: 0
1267 both: 0
1303 both: 0
1268 local heads: 1
1304 local heads: 1
1269 common: 0
1305 common: 0
1270 missing: 1
1306 missing: 1
1271 remote heads: 1
1307 remote heads: 1
1272 common: 0
1308 common: 0
1273 unknown: 1
1309 unknown: 1
1274 local changesets: 1030
1310 local changesets: 1030
1275 common: 1000
1311 common: 1000
1276 heads: 1
1312 heads: 1
1277 roots: 1
1313 roots: 1
1278 missing: 30
1314 missing: 30
1279 heads: 1
1315 heads: 1
1280 roots: 1
1316 roots: 1
1281 first undecided set: 1030
1317 first undecided set: 1030
1282 heads: 1
1318 heads: 1
1283 roots: 1
1319 roots: 1
1284 common: 1000
1320 common: 1000
1285 missing: 30
1321 missing: 30
1286 common heads: 7ead0cba2838
1322 common heads: 7ead0cba2838
1287
1323
1288
1324
1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1325 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1290
1326
1291 $ hg init manyheads
1327 $ hg init manyheads
1292 $ cd manyheads
1328 $ cd manyheads
1293 $ echo "+300:r @a" >dagdesc
1329 $ echo "+300:r @a" >dagdesc
1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1330 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1331 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1332 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1333 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1334 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1335 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1336 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1337 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1338 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1339 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1340 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1341 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1342 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1307 $ echo "@b *r+3" >>dagdesc # one more head
1343 $ echo "@b *r+3" >>dagdesc # one more head
1308 $ hg debugbuilddag <dagdesc
1344 $ hg debugbuilddag <dagdesc
1309 reading DAG from stdin
1345 reading DAG from stdin
1310
1346
1311 $ hg heads -t --template . | wc -c
1347 $ hg heads -t --template . | wc -c
1312 \s*261 (re)
1348 \s*261 (re)
1313
1349
1314 $ hg clone -b a . a
1350 $ hg clone -b a . a
1315 adding changesets
1351 adding changesets
1316 adding manifests
1352 adding manifests
1317 adding file changes
1353 adding file changes
1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1354 added 1340 changesets with 0 changes to 0 files (+259 heads)
1319 new changesets 1ea73414a91b:1c51e2c80832
1355 new changesets 1ea73414a91b:1c51e2c80832
1320 updating to branch a
1356 updating to branch a
1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1357 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1322 $ hg clone -b b . b
1358 $ hg clone -b b . b
1323 adding changesets
1359 adding changesets
1324 adding manifests
1360 adding manifests
1325 adding file changes
1361 adding file changes
1326 added 304 changesets with 0 changes to 0 files
1362 added 304 changesets with 0 changes to 0 files
1327 new changesets 1ea73414a91b:513314ca8b3a
1363 new changesets 1ea73414a91b:513314ca8b3a
1328 updating to branch b
1364 updating to branch b
1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1365 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1330
1366
1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1367 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1332 comparing with b
1368 comparing with b
1333 query 1; heads
1369 query 1; heads
1334 searching for changes
1370 searching for changes
1335 taking quick initial sample
1371 taking quick initial sample
1336 searching: 2 queries
1372 searching: 2 queries
1337 query 2; still undecided: 1080, sample size is: 50
1373 query 2; still undecided: 1080, sample size is: 50
1338 sampling from both directions
1374 sampling from both directions
1339 searching: 3 queries
1375 searching: 3 queries
1340 query 3; still undecided: 1030, sample size is: 200
1376 query 3; still undecided: 1030, sample size is: 200
1341 sampling from both directions
1377 sampling from both directions
1342 searching: 4 queries
1378 searching: 4 queries
1343 query 4; still undecided: 547, sample size is: 210
1379 query 4; still undecided: 547, sample size is: 210
1344 sampling from both directions
1380 sampling from both directions
1345 searching: 5 queries
1381 searching: 5 queries
1346 query 5; still undecided: 336, sample size is: 220
1382 query 5; still undecided: 336, sample size is: 220
1347 sampling from both directions
1383 sampling from both directions
1348 searching: 6 queries
1384 searching: 6 queries
1349 query 6; still undecided: 114, sample size is: 114
1385 query 6; still undecided: 114, sample size is: 114
1350 6 total queries in *.????s (glob)
1386 6 total queries in *.????s (glob)
1351 elapsed time: * seconds (glob)
1387 elapsed time: * seconds (glob)
1352 round-trips: 6
1388 round-trips: 6
1389 queries: 1054
1353 heads summary:
1390 heads summary:
1354 total common heads: 1
1391 total common heads: 1
1355 also local heads: 0
1392 also local heads: 0
1356 also remote heads: 0
1393 also remote heads: 0
1357 both: 0
1394 both: 0
1358 local heads: 260
1395 local heads: 260
1359 common: 0
1396 common: 0
1360 missing: 260
1397 missing: 260
1361 remote heads: 1
1398 remote heads: 1
1362 common: 0
1399 common: 0
1363 unknown: 1
1400 unknown: 1
1364 local changesets: 1340
1401 local changesets: 1340
1365 common: 300
1402 common: 300
1366 heads: 1
1403 heads: 1
1367 roots: 1
1404 roots: 1
1368 missing: 1040
1405 missing: 1040
1369 heads: 260
1406 heads: 260
1370 roots: 260
1407 roots: 260
1371 first undecided set: 1340
1408 first undecided set: 1340
1372 heads: 260
1409 heads: 260
1373 roots: 1
1410 roots: 1
1374 common: 300
1411 common: 300
1375 missing: 1040
1412 missing: 1040
1376 common heads: 3ee37d65064a
1413 common heads: 3ee37d65064a
1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1414 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1378 comparing with b
1415 comparing with b
1379 query 1; heads
1416 query 1; heads
1380 searching for changes
1417 searching for changes
1381 taking quick initial sample
1418 taking quick initial sample
1382 searching: 2 queries
1419 searching: 2 queries
1383 query 2; still undecided: 303, sample size is: 9
1420 query 2; still undecided: 303, sample size is: 9
1384 sampling from both directions
1421 sampling from both directions
1385 searching: 3 queries
1422 searching: 3 queries
1386 query 3; still undecided: 3, sample size is: 3
1423 query 3; still undecided: 3, sample size is: 3
1387 3 total queries in *.????s (glob)
1424 3 total queries in *.????s (glob)
1388 elapsed time: * seconds (glob)
1425 elapsed time: * seconds (glob)
1389 round-trips: 3
1426 round-trips: 3
1427 queries: 13
1390 heads summary:
1428 heads summary:
1391 total common heads: 1
1429 total common heads: 1
1392 also local heads: 0
1430 also local heads: 0
1393 also remote heads: 0
1431 also remote heads: 0
1394 both: 0
1432 both: 0
1395 local heads: 260
1433 local heads: 260
1396 common: 0
1434 common: 0
1397 missing: 260
1435 missing: 260
1398 remote heads: 1
1436 remote heads: 1
1399 common: 0
1437 common: 0
1400 unknown: 1
1438 unknown: 1
1401 local changesets: 1340
1439 local changesets: 1340
1402 common: 300
1440 common: 300
1403 heads: 1
1441 heads: 1
1404 roots: 1
1442 roots: 1
1405 missing: 1040
1443 missing: 1040
1406 heads: 260
1444 heads: 260
1407 roots: 260
1445 roots: 260
1408 first undecided set: 1340
1446 first undecided set: 1340
1409 heads: 260
1447 heads: 260
1410 roots: 1
1448 roots: 1
1411 common: 300
1449 common: 300
1412 missing: 1040
1450 missing: 1040
1413 common heads: 3ee37d65064a
1451 common heads: 3ee37d65064a
1414
1452
1415 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1453 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1416 comparing with b
1454 comparing with b
1417 searching for changes
1455 searching for changes
1418 sampling from both directions
1456 sampling from both directions
1419 query 1; still undecided: 1340, sample size is: 50
1457 query 1; still undecided: 1340, sample size is: 50
1420 sampling from both directions
1458 sampling from both directions
1421 query 2; still undecided: 995, sample size is: 60
1459 query 2; still undecided: 995, sample size is: 60
1422 sampling from both directions
1460 sampling from both directions
1423 query 3; still undecided: 913, sample size is: 72
1461 query 3; still undecided: 913, sample size is: 72
1424 sampling from both directions
1462 sampling from both directions
1425 query 4; still undecided: 816, sample size is: 204
1463 query 4; still undecided: 816, sample size is: 204
1426 sampling from both directions
1464 sampling from both directions
1427 query 5; still undecided: 612, sample size is: 153
1465 query 5; still undecided: 612, sample size is: 153
1428 sampling from both directions
1466 sampling from both directions
1429 query 6; still undecided: 456, sample size is: 123
1467 query 6; still undecided: 456, sample size is: 123
1430 sampling from both directions
1468 sampling from both directions
1431 query 7; still undecided: 332, sample size is: 147
1469 query 7; still undecided: 332, sample size is: 147
1432 sampling from both directions
1470 sampling from both directions
1433 query 8; still undecided: 184, sample size is: 176
1471 query 8; still undecided: 184, sample size is: 176
1434 sampling from both directions
1472 sampling from both directions
1435 query 9; still undecided: 8, sample size is: 8
1473 query 9; still undecided: 8, sample size is: 8
1436 9 total queries in *s (glob)
1474 9 total queries in *s (glob)
1437 elapsed time: * seconds (glob)
1475 elapsed time: * seconds (glob)
1438 round-trips: 9
1476 round-trips: 9
1477 queries: 993
1439 heads summary:
1478 heads summary:
1440 total common heads: 1
1479 total common heads: 1
1441 also local heads: 0
1480 also local heads: 0
1442 also remote heads: 0
1481 also remote heads: 0
1443 both: 0
1482 both: 0
1444 local heads: 260
1483 local heads: 260
1445 common: 0
1484 common: 0
1446 missing: 260
1485 missing: 260
1447 remote heads: 1
1486 remote heads: 1
1448 common: 0
1487 common: 0
1449 unknown: 1
1488 unknown: 1
1450 local changesets: 1340
1489 local changesets: 1340
1451 common: 300
1490 common: 300
1452 heads: 1
1491 heads: 1
1453 roots: 1
1492 roots: 1
1454 missing: 1040
1493 missing: 1040
1455 heads: 260
1494 heads: 260
1456 roots: 260
1495 roots: 260
1457 first undecided set: 1340
1496 first undecided set: 1340
1458 heads: 260
1497 heads: 260
1459 roots: 1
1498 roots: 1
1460 common: 300
1499 common: 300
1461 missing: 1040
1500 missing: 1040
1462 common heads: 3ee37d65064a
1501 common heads: 3ee37d65064a
1463
1502
1464 Test actual protocol when pulling one new head in addition to common heads
1503 Test actual protocol when pulling one new head in addition to common heads
1465
1504
1466 $ hg clone -U b c
1505 $ hg clone -U b c
1467 $ hg -R c id -ir tip
1506 $ hg -R c id -ir tip
1468 513314ca8b3a
1507 513314ca8b3a
1469 $ hg -R c up -qr default
1508 $ hg -R c up -qr default
1470 $ touch c/f
1509 $ touch c/f
1471 $ hg -R c ci -Aqm "extra head"
1510 $ hg -R c ci -Aqm "extra head"
1472 $ hg -R c id -i
1511 $ hg -R c id -i
1473 e64a39e7da8b
1512 e64a39e7da8b
1474
1513
1475 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1514 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1476 $ cat hg.pid >> $DAEMON_PIDS
1515 $ cat hg.pid >> $DAEMON_PIDS
1477
1516
1478 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1517 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1479 comparing with http://localhost:$HGPORT/
1518 comparing with http://localhost:$HGPORT/
1480 searching for changes
1519 searching for changes
1481 e64a39e7da8b
1520 e64a39e7da8b
1482
1521
1483 $ killdaemons.py
1522 $ killdaemons.py
1484 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1523 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1485 "GET /?cmd=capabilities HTTP/1.1" 200 -
1524 "GET /?cmd=capabilities HTTP/1.1" 200 -
1486 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1525 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1487 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1526 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1488 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1489 $ cat errors.log
1528 $ cat errors.log
1490
1529
1491 $ cd ..
1530 $ cd ..
1492
1531
1493
1532
1494 Issue 4438 - test coverage for 3ef893520a85 issues.
1533 Issue 4438 - test coverage for 3ef893520a85 issues.
1495
1534
1496 $ mkdir issue4438
1535 $ mkdir issue4438
1497 $ cd issue4438
1536 $ cd issue4438
1498 #if false
1537 #if false
1499 generate new bundles:
1538 generate new bundles:
1500 $ hg init r1
1539 $ hg init r1
1501 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1540 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1502 $ hg clone -q r1 r2
1541 $ hg clone -q r1 r2
1503 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1542 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1504 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1543 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1505 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1544 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1506 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1545 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1507 #else
1546 #else
1508 use existing bundles:
1547 use existing bundles:
1509 $ hg init r1
1548 $ hg init r1
1510 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1549 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1511 $ hg -R r1 -q up
1550 $ hg -R r1 -q up
1512 $ hg init r2
1551 $ hg init r2
1513 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1552 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1514 $ hg -R r2 -q up
1553 $ hg -R r2 -q up
1515 #endif
1554 #endif
1516
1555
1517 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1556 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1518
1557
1519 $ hg -R r1 outgoing r2 -T'{rev} '
1558 $ hg -R r1 outgoing r2 -T'{rev} '
1520 comparing with r2
1559 comparing with r2
1521 searching for changes
1560 searching for changes
1522 101 102 103 104 105 106 107 108 109 110 (no-eol)
1561 101 102 103 104 105 106 107 108 109 110 (no-eol)
1523
1562
1524 The case where all the 'initialsamplesize' samples already were common would
1563 The case where all the 'initialsamplesize' samples already were common would
1525 give 'all remote heads known locally' without checking the remaining heads -
1564 give 'all remote heads known locally' without checking the remaining heads -
1526 fixed in 86c35b7ae300:
1565 fixed in 86c35b7ae300:
1527
1566
1528 $ cat >> r1/.hg/hgrc << EOF
1567 $ cat >> r1/.hg/hgrc << EOF
1529 > [devel]
1568 > [devel]
1530 > discovery.randomize = False
1569 > discovery.randomize = False
1531 > EOF
1570 > EOF
1532
1571
1533 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1572 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1534 > --config blackbox.track='command commandfinish discovery'
1573 > --config blackbox.track='command commandfinish discovery'
1535 comparing with r2
1574 comparing with r2
1536 searching for changes
1575 searching for changes
1537 101 102 103 104 105 106 107 108 109 110 (no-eol)
1576 101 102 103 104 105 106 107 108 109 110 (no-eol)
1538 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1577 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1539 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1578 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --no-profile --cmdserver chgunix * (glob) (chg !)
1540 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1579 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1541 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1580 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1542 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1581 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1543 $ cd ..
1582 $ cd ..
1544
1583
1545 Even if the set of revs to discover is restricted, unrelated revs may be
1584 Even if the set of revs to discover is restricted, unrelated revs may be
1546 returned as common heads.
1585 returned as common heads.
1547
1586
1548 $ mkdir ancestorsof
1587 $ mkdir ancestorsof
1549 $ cd ancestorsof
1588 $ cd ancestorsof
1550 $ hg init a
1589 $ hg init a
1551 $ hg clone a b -q
1590 $ hg clone a b -q
1552 $ cd b
1591 $ cd b
1553 $ hg debugbuilddag '.:root *root *root'
1592 $ hg debugbuilddag '.:root *root *root'
1554 $ hg log -G -T '{node|short}'
1593 $ hg log -G -T '{node|short}'
1555 o fa942426a6fd
1594 o fa942426a6fd
1556 |
1595 |
1557 | o 66f7d451a68b
1596 | o 66f7d451a68b
1558 |/
1597 |/
1559 o 1ea73414a91b
1598 o 1ea73414a91b
1560
1599
1561 $ hg push -r 66f7d451a68b -q
1600 $ hg push -r 66f7d451a68b -q
1562 $ hg debugdiscovery --verbose --rev fa942426a6fd
1601 $ hg debugdiscovery --verbose --rev fa942426a6fd
1563 comparing with $TESTTMP/ancestorsof/a
1602 comparing with $TESTTMP/ancestorsof/a
1564 searching for changes
1603 searching for changes
1565 elapsed time: * seconds (glob)
1604 elapsed time: * seconds (glob)
1566 round-trips: 1
1605 round-trips: 1
1606 queries: 1
1567 heads summary:
1607 heads summary:
1568 total common heads: 1
1608 total common heads: 1
1569 also local heads: 1
1609 also local heads: 1
1570 also remote heads: 1
1610 also remote heads: 1
1571 both: 1
1611 both: 1
1572 local heads: 2
1612 local heads: 2
1573 common: 1
1613 common: 1
1574 missing: 1
1614 missing: 1
1575 remote heads: 1
1615 remote heads: 1
1576 common: 1
1616 common: 1
1577 unknown: 0
1617 unknown: 0
1578 local changesets: 3
1618 local changesets: 3
1579 common: 2
1619 common: 2
1580 heads: 1
1620 heads: 1
1581 roots: 1
1621 roots: 1
1582 missing: 1
1622 missing: 1
1583 heads: 1
1623 heads: 1
1584 roots: 1
1624 roots: 1
1585 first undecided set: 1
1625 first undecided set: 1
1586 heads: 1
1626 heads: 1
1587 roots: 1
1627 roots: 1
1588 common: 0
1628 common: 0
1589 missing: 1
1629 missing: 1
1590 common heads: 66f7d451a68b
1630 common heads: 66f7d451a68b
1591
1631
1592 $ cd ..
1632 $ cd ..
1593
1633
1594
1634
1595 Test debuging discovery using different subset of the same repository
1635 Test debuging discovery using different subset of the same repository
1596 =====================================================================
1636 =====================================================================
1597
1637
1598 remote is a local subset
1638 remote is a local subset
1599 ------------------------
1639 ------------------------
1600
1640
1601 remote will be last 25 heads of the local graph
1641 remote will be last 25 heads of the local graph
1602
1642
1603 $ cd $TESTTMP/manyheads
1643 $ cd $TESTTMP/manyheads
1604 $ hg -R a debugdiscovery \
1644 $ hg -R a debugdiscovery \
1605 > --debug \
1645 > --debug \
1606 > --remote-as-revs 'last(heads(all()), 25)' \
1646 > --remote-as-revs 'last(heads(all()), 25)' \
1607 > --config devel.discovery.randomize=false
1647 > --config devel.discovery.randomize=false
1608 query 1; heads
1648 query 1; heads
1609 searching for changes
1649 searching for changes
1610 all remote heads known locally
1650 all remote heads known locally
1611 elapsed time: * seconds (glob)
1651 elapsed time: * seconds (glob)
1612 round-trips: 1
1652 round-trips: 1
1653 queries: 260
1613 heads summary:
1654 heads summary:
1614 total common heads: 25
1655 total common heads: 25
1615 also local heads: 25
1656 also local heads: 25
1616 also remote heads: 25
1657 also remote heads: 25
1617 both: 25
1658 both: 25
1618 local heads: 260
1659 local heads: 260
1619 common: 25
1660 common: 25
1620 missing: 235
1661 missing: 235
1621 remote heads: 25
1662 remote heads: 25
1622 common: 25
1663 common: 25
1623 unknown: 0
1664 unknown: 0
1624 local changesets: 1340
1665 local changesets: 1340
1625 common: 400
1666 common: 400
1626 heads: 25
1667 heads: 25
1627 roots: 1
1668 roots: 1
1628 missing: 940
1669 missing: 940
1629 heads: 235
1670 heads: 235
1630 roots: 235
1671 roots: 235
1631 first undecided set: 940
1672 first undecided set: 940
1632 heads: 235
1673 heads: 235
1633 roots: 235
1674 roots: 235
1634 common: 0
1675 common: 0
1635 missing: 940
1676 missing: 940
1636 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1677 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1637
1678
1638 local is a local subset
1679 local is a local subset
1639 ------------------------
1680 ------------------------
1640
1681
1641 remote will be last 25 heads of the local graph
1682 remote will be last 25 heads of the local graph
1642
1683
1643 $ cd $TESTTMP/manyheads
1684 $ cd $TESTTMP/manyheads
1644 $ hg -R a debugdiscovery b \
1685 $ hg -R a debugdiscovery b \
1645 > --debug \
1686 > --debug \
1646 > --local-as-revs 'first(heads(all()), 25)' \
1687 > --local-as-revs 'first(heads(all()), 25)' \
1647 > --config devel.discovery.randomize=false
1688 > --config devel.discovery.randomize=false
1648 comparing with b
1689 comparing with b
1649 query 1; heads
1690 query 1; heads
1650 searching for changes
1691 searching for changes
1651 taking quick initial sample
1692 taking quick initial sample
1652 query 2; still undecided: 375, sample size is: 81
1693 query 2; still undecided: 375, sample size is: 81
1653 sampling from both directions
1694 sampling from both directions
1654 query 3; still undecided: 3, sample size is: 3
1695 query 3; still undecided: 3, sample size is: 3
1655 3 total queries *s (glob)
1696 3 total queries *s (glob)
1656 elapsed time: * seconds (glob)
1697 elapsed time: * seconds (glob)
1657 round-trips: 3
1698 round-trips: 3
1699 queries: 109
1658 heads summary:
1700 heads summary:
1659 total common heads: 1
1701 total common heads: 1
1660 also local heads: 0
1702 also local heads: 0
1661 also remote heads: 0
1703 also remote heads: 0
1662 both: 0
1704 both: 0
1663 local heads: 25
1705 local heads: 25
1664 common: 0
1706 common: 0
1665 missing: 25
1707 missing: 25
1666 remote heads: 1
1708 remote heads: 1
1667 common: 0
1709 common: 0
1668 unknown: 1
1710 unknown: 1
1669 local changesets: 400
1711 local changesets: 400
1670 common: 300
1712 common: 300
1671 heads: 1
1713 heads: 1
1672 roots: 1
1714 roots: 1
1673 missing: 100
1715 missing: 100
1674 heads: 25
1716 heads: 25
1675 roots: 25
1717 roots: 25
1676 first undecided set: 400
1718 first undecided set: 400
1677 heads: 25
1719 heads: 25
1678 roots: 1
1720 roots: 1
1679 common: 300
1721 common: 300
1680 missing: 100
1722 missing: 100
1681 common heads: 3ee37d65064a
1723 common heads: 3ee37d65064a
1682
1724
1683 both local and remove are subset
1725 both local and remove are subset
1684 ------------------------
1726 ------------------------
1685
1727
1686 remote will be last 25 heads of the local graph
1728 remote will be last 25 heads of the local graph
1687
1729
1688 $ cd $TESTTMP/manyheads
1730 $ cd $TESTTMP/manyheads
1689 $ hg -R a debugdiscovery \
1731 $ hg -R a debugdiscovery \
1690 > --debug \
1732 > --debug \
1691 > --local-as-revs 'first(heads(all()), 25)' \
1733 > --local-as-revs 'first(heads(all()), 25)' \
1692 > --remote-as-revs 'last(heads(all()), 25)' \
1734 > --remote-as-revs 'last(heads(all()), 25)' \
1693 > --config devel.discovery.randomize=false
1735 > --config devel.discovery.randomize=false
1694 query 1; heads
1736 query 1; heads
1695 searching for changes
1737 searching for changes
1696 taking quick initial sample
1738 taking quick initial sample
1697 query 2; still undecided: 375, sample size is: 81
1739 query 2; still undecided: 375, sample size is: 81
1698 sampling from both directions
1740 sampling from both directions
1699 query 3; still undecided: 3, sample size is: 3
1741 query 3; still undecided: 3, sample size is: 3
1700 3 total queries in *s (glob)
1742 3 total queries in *s (glob)
1701 elapsed time: * seconds (glob)
1743 elapsed time: * seconds (glob)
1702 round-trips: 3
1744 round-trips: 3
1745 queries: 109
1703 heads summary:
1746 heads summary:
1704 total common heads: 1
1747 total common heads: 1
1705 also local heads: 0
1748 also local heads: 0
1706 also remote heads: 0
1749 also remote heads: 0
1707 both: 0
1750 both: 0
1708 local heads: 25
1751 local heads: 25
1709 common: 0
1752 common: 0
1710 missing: 25
1753 missing: 25
1711 remote heads: 25
1754 remote heads: 25
1712 common: 0
1755 common: 0
1713 unknown: 25
1756 unknown: 25
1714 local changesets: 400
1757 local changesets: 400
1715 common: 300
1758 common: 300
1716 heads: 1
1759 heads: 1
1717 roots: 1
1760 roots: 1
1718 missing: 100
1761 missing: 100
1719 heads: 25
1762 heads: 25
1720 roots: 25
1763 roots: 25
1721 first undecided set: 400
1764 first undecided set: 400
1722 heads: 25
1765 heads: 25
1723 roots: 1
1766 roots: 1
1724 common: 300
1767 common: 300
1725 missing: 100
1768 missing: 100
1726 common heads: 3ee37d65064a
1769 common heads: 3ee37d65064a
1727
1770
1728 Test -T json output
1771 Test -T json output
1729 -------------------
1772 -------------------
1730
1773
1731 $ hg -R a debugdiscovery \
1774 $ hg -R a debugdiscovery \
1732 > -T json \
1775 > -T json \
1733 > --debug \
1776 > --debug \
1734 > --local-as-revs 'first(heads(all()), 25)' \
1777 > --local-as-revs 'first(heads(all()), 25)' \
1735 > --remote-as-revs 'last(heads(all()), 25)' \
1778 > --remote-as-revs 'last(heads(all()), 25)' \
1736 > --config devel.discovery.randomize=false
1779 > --config devel.discovery.randomize=false
1737 [
1780 [
1738 {
1781 {
1739 "elapsed": *, (glob)
1782 "elapsed": *, (glob)
1740 "nb-common-heads": 1,
1783 "nb-common-heads": 1,
1741 "nb-common-heads-both": 0,
1784 "nb-common-heads-both": 0,
1742 "nb-common-heads-local": 0,
1785 "nb-common-heads-local": 0,
1743 "nb-common-heads-remote": 0,
1786 "nb-common-heads-remote": 0,
1744 "nb-common-roots": 1,
1787 "nb-common-roots": 1,
1745 "nb-head-local": 25,
1788 "nb-head-local": 25,
1746 "nb-head-local-missing": 25,
1789 "nb-head-local-missing": 25,
1747 "nb-head-remote": 25,
1790 "nb-head-remote": 25,
1748 "nb-head-remote-unknown": 25,
1791 "nb-head-remote-unknown": 25,
1749 "nb-ini_und": 400,
1792 "nb-ini_und": 400,
1750 "nb-ini_und-common": 300,
1793 "nb-ini_und-common": 300,
1751 "nb-ini_und-heads": 25,
1794 "nb-ini_und-heads": 25,
1752 "nb-ini_und-missing": 100,
1795 "nb-ini_und-missing": 100,
1753 "nb-ini_und-roots": 1,
1796 "nb-ini_und-roots": 1,
1754 "nb-missing-heads": 25,
1797 "nb-missing-heads": 25,
1755 "nb-missing-roots": 25,
1798 "nb-missing-roots": 25,
1756 "nb-revs": 400,
1799 "nb-revs": 400,
1757 "nb-revs-common": 300,
1800 "nb-revs-common": 300,
1758 "nb-revs-missing": 100,
1801 "nb-revs-missing": 100,
1759 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1802 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1803 "total-queries": 109,
1760 "total-roundtrips": 3
1804 "total-roundtrips": 3
1761 }
1805 }
1762 ]
1806 ]
General Comments 0
You need to be logged in to leave comments. Login now