##// END OF EJS Templates
debugdiscovery: display the number of roundtrip used...
marmoute -
r46726:d90f439f default
parent child Browse files
Show More
@@ -1,4652 +1,4653 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 revlog,
72 revlog,
73 revset,
73 revset,
74 revsetlang,
74 revsetlang,
75 scmutil,
75 scmutil,
76 setdiscovery,
76 setdiscovery,
77 simplemerge,
77 simplemerge,
78 sshpeer,
78 sshpeer,
79 sslutil,
79 sslutil,
80 streamclone,
80 streamclone,
81 strip,
81 strip,
82 tags as tagsmod,
82 tags as tagsmod,
83 templater,
83 templater,
84 treediscovery,
84 treediscovery,
85 upgrade,
85 upgrade,
86 url as urlmod,
86 url as urlmod,
87 util,
87 util,
88 vfs as vfsmod,
88 vfs as vfsmod,
89 wireprotoframing,
89 wireprotoframing,
90 wireprotoserver,
90 wireprotoserver,
91 wireprotov2peer,
91 wireprotov2peer,
92 )
92 )
93 from .utils import (
93 from .utils import (
94 cborutil,
94 cborutil,
95 compression,
95 compression,
96 dateutil,
96 dateutil,
97 procutil,
97 procutil,
98 stringutil,
98 stringutil,
99 )
99 )
100
100
101 from .revlogutils import (
101 from .revlogutils import (
102 deltas as deltautil,
102 deltas as deltautil,
103 nodemap,
103 nodemap,
104 sidedata,
104 sidedata,
105 )
105 )
106
106
107 release = lockmod.release
107 release = lockmod.release
108
108
109 table = {}
109 table = {}
110 table.update(strip.command._table)
110 table.update(strip.command._table)
111 command = registrar.command(table)
111 command = registrar.command(table)
112
112
113
113
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 def debugancestor(ui, repo, *args):
115 def debugancestor(ui, repo, *args):
116 """find the ancestor revision of two revisions in a given index"""
116 """find the ancestor revision of two revisions in a given index"""
117 if len(args) == 3:
117 if len(args) == 3:
118 index, rev1, rev2 = args
118 index, rev1, rev2 = args
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 lookup = r.lookup
120 lookup = r.lookup
121 elif len(args) == 2:
121 elif len(args) == 2:
122 if not repo:
122 if not repo:
123 raise error.Abort(
123 raise error.Abort(
124 _(b'there is no Mercurial repository here (.hg not found)')
124 _(b'there is no Mercurial repository here (.hg not found)')
125 )
125 )
126 rev1, rev2 = args
126 rev1, rev2 = args
127 r = repo.changelog
127 r = repo.changelog
128 lookup = repo.lookup
128 lookup = repo.lookup
129 else:
129 else:
130 raise error.Abort(_(b'either two or three arguments required'))
130 raise error.Abort(_(b'either two or three arguments required'))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133
133
134
134
135 @command(b'debugantivirusrunning', [])
135 @command(b'debugantivirusrunning', [])
136 def debugantivirusrunning(ui, repo):
136 def debugantivirusrunning(ui, repo):
137 """attempt to trigger an antivirus scanner to see if one is active"""
137 """attempt to trigger an antivirus scanner to see if one is active"""
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 f.write(
139 f.write(
140 util.b85decode(
140 util.b85decode(
141 # This is a base85-armored version of the EICAR test file. See
141 # This is a base85-armored version of the EICAR test file. See
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 )
145 )
146 )
146 )
147 # Give an AV engine time to scan the file.
147 # Give an AV engine time to scan the file.
148 time.sleep(2)
148 time.sleep(2)
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150
150
151
151
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 def debugapplystreamclonebundle(ui, repo, fname):
153 def debugapplystreamclonebundle(ui, repo, fname):
154 """apply a stream clone bundle file"""
154 """apply a stream clone bundle file"""
155 f = hg.openpath(ui, fname)
155 f = hg.openpath(ui, fname)
156 gen = exchange.readbundle(ui, f, fname)
156 gen = exchange.readbundle(ui, f, fname)
157 gen.apply(repo)
157 gen.apply(repo)
158
158
159
159
160 @command(
160 @command(
161 b'debugbuilddag',
161 b'debugbuilddag',
162 [
162 [
163 (
163 (
164 b'm',
164 b'm',
165 b'mergeable-file',
165 b'mergeable-file',
166 None,
166 None,
167 _(b'add single file mergeable changes'),
167 _(b'add single file mergeable changes'),
168 ),
168 ),
169 (
169 (
170 b'o',
170 b'o',
171 b'overwritten-file',
171 b'overwritten-file',
172 None,
172 None,
173 _(b'add single file all revs overwrite'),
173 _(b'add single file all revs overwrite'),
174 ),
174 ),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 ],
176 ],
177 _(b'[OPTION]... [TEXT]'),
177 _(b'[OPTION]... [TEXT]'),
178 )
178 )
179 def debugbuilddag(
179 def debugbuilddag(
180 ui,
180 ui,
181 repo,
181 repo,
182 text=None,
182 text=None,
183 mergeable_file=False,
183 mergeable_file=False,
184 overwritten_file=False,
184 overwritten_file=False,
185 new_file=False,
185 new_file=False,
186 ):
186 ):
187 """builds a repo with a given DAG from scratch in the current empty repo
187 """builds a repo with a given DAG from scratch in the current empty repo
188
188
189 The description of the DAG is read from stdin if not given on the
189 The description of the DAG is read from stdin if not given on the
190 command line.
190 command line.
191
191
192 Elements:
192 Elements:
193
193
194 - "+n" is a linear run of n nodes based on the current default parent
194 - "+n" is a linear run of n nodes based on the current default parent
195 - "." is a single node based on the current default parent
195 - "." is a single node based on the current default parent
196 - "$" resets the default parent to null (implied at the start);
196 - "$" resets the default parent to null (implied at the start);
197 otherwise the default parent is always the last node created
197 otherwise the default parent is always the last node created
198 - "<p" sets the default parent to the backref p
198 - "<p" sets the default parent to the backref p
199 - "*p" is a fork at parent p, which is a backref
199 - "*p" is a fork at parent p, which is a backref
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 - "/p2" is a merge of the preceding node and p2
201 - "/p2" is a merge of the preceding node and p2
202 - ":tag" defines a local tag for the preceding node
202 - ":tag" defines a local tag for the preceding node
203 - "@branch" sets the named branch for subsequent nodes
203 - "@branch" sets the named branch for subsequent nodes
204 - "#...\\n" is a comment up to the end of the line
204 - "#...\\n" is a comment up to the end of the line
205
205
206 Whitespace between the above elements is ignored.
206 Whitespace between the above elements is ignored.
207
207
208 A backref is either
208 A backref is either
209
209
210 - a number n, which references the node curr-n, where curr is the current
210 - a number n, which references the node curr-n, where curr is the current
211 node, or
211 node, or
212 - the name of a local tag you placed earlier using ":tag", or
212 - the name of a local tag you placed earlier using ":tag", or
213 - empty to denote the default parent.
213 - empty to denote the default parent.
214
214
215 All string valued-elements are either strictly alphanumeric, or must
215 All string valued-elements are either strictly alphanumeric, or must
216 be enclosed in double quotes ("..."), with "\\" as escape character.
216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 """
217 """
218
218
219 if text is None:
219 if text is None:
220 ui.status(_(b"reading DAG from stdin\n"))
220 ui.status(_(b"reading DAG from stdin\n"))
221 text = ui.fin.read()
221 text = ui.fin.read()
222
222
223 cl = repo.changelog
223 cl = repo.changelog
224 if len(cl) > 0:
224 if len(cl) > 0:
225 raise error.Abort(_(b'repository is not empty'))
225 raise error.Abort(_(b'repository is not empty'))
226
226
227 # determine number of revs in DAG
227 # determine number of revs in DAG
228 total = 0
228 total = 0
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 total += 1
231 total += 1
232
232
233 if mergeable_file:
233 if mergeable_file:
234 linesperrev = 2
234 linesperrev = 2
235 # make a file with k lines per rev
235 # make a file with k lines per rev
236 initialmergedlines = [
236 initialmergedlines = [
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 ]
238 ]
239 initialmergedlines.append(b"")
239 initialmergedlines.append(b"")
240
240
241 tags = []
241 tags = []
242 progress = ui.makeprogress(
242 progress = ui.makeprogress(
243 _(b'building'), unit=_(b'revisions'), total=total
243 _(b'building'), unit=_(b'revisions'), total=total
244 )
244 )
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 at = -1
246 at = -1
247 atbranch = b'default'
247 atbranch = b'default'
248 nodeids = []
248 nodeids = []
249 id = 0
249 id = 0
250 progress.update(id)
250 progress.update(id)
251 for type, data in dagparser.parsedag(text):
251 for type, data in dagparser.parsedag(text):
252 if type == b'n':
252 if type == b'n':
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 id, ps = data
254 id, ps = data
255
255
256 files = []
256 files = []
257 filecontent = {}
257 filecontent = {}
258
258
259 p2 = None
259 p2 = None
260 if mergeable_file:
260 if mergeable_file:
261 fn = b"mf"
261 fn = b"mf"
262 p1 = repo[ps[0]]
262 p1 = repo[ps[0]]
263 if len(ps) > 1:
263 if len(ps) > 1:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 pa = p1.ancestor(p2)
265 pa = p1.ancestor(p2)
266 base, local, other = [
266 base, local, other = [
267 x[fn].data() for x in (pa, p1, p2)
267 x[fn].data() for x in (pa, p1, p2)
268 ]
268 ]
269 m3 = simplemerge.Merge3Text(base, local, other)
269 m3 = simplemerge.Merge3Text(base, local, other)
270 ml = [l.strip() for l in m3.merge_lines()]
270 ml = [l.strip() for l in m3.merge_lines()]
271 ml.append(b"")
271 ml.append(b"")
272 elif at > 0:
272 elif at > 0:
273 ml = p1[fn].data().split(b"\n")
273 ml = p1[fn].data().split(b"\n")
274 else:
274 else:
275 ml = initialmergedlines
275 ml = initialmergedlines
276 ml[id * linesperrev] += b" r%i" % id
276 ml[id * linesperrev] += b" r%i" % id
277 mergedtext = b"\n".join(ml)
277 mergedtext = b"\n".join(ml)
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = mergedtext
279 filecontent[fn] = mergedtext
280
280
281 if overwritten_file:
281 if overwritten_file:
282 fn = b"of"
282 fn = b"of"
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285
285
286 if new_file:
286 if new_file:
287 fn = b"nf%i" % id
287 fn = b"nf%i" % id
288 files.append(fn)
288 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
289 filecontent[fn] = b"r%i\n" % id
290 if len(ps) > 1:
290 if len(ps) > 1:
291 if not p2:
291 if not p2:
292 p2 = repo[ps[1]]
292 p2 = repo[ps[1]]
293 for fn in p2:
293 for fn in p2:
294 if fn.startswith(b"nf"):
294 if fn.startswith(b"nf"):
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = p2[fn].data()
296 filecontent[fn] = p2[fn].data()
297
297
298 def fctxfn(repo, cx, path):
298 def fctxfn(repo, cx, path):
299 if path in filecontent:
299 if path in filecontent:
300 return context.memfilectx(
300 return context.memfilectx(
301 repo, cx, path, filecontent[path]
301 repo, cx, path, filecontent[path]
302 )
302 )
303 return None
303 return None
304
304
305 if len(ps) == 0 or ps[0] < 0:
305 if len(ps) == 0 or ps[0] < 0:
306 pars = [None, None]
306 pars = [None, None]
307 elif len(ps) == 1:
307 elif len(ps) == 1:
308 pars = [nodeids[ps[0]], None]
308 pars = [nodeids[ps[0]], None]
309 else:
309 else:
310 pars = [nodeids[p] for p in ps]
310 pars = [nodeids[p] for p in ps]
311 cx = context.memctx(
311 cx = context.memctx(
312 repo,
312 repo,
313 pars,
313 pars,
314 b"r%i" % id,
314 b"r%i" % id,
315 files,
315 files,
316 fctxfn,
316 fctxfn,
317 date=(id, 0),
317 date=(id, 0),
318 user=b"debugbuilddag",
318 user=b"debugbuilddag",
319 extra={b'branch': atbranch},
319 extra={b'branch': atbranch},
320 )
320 )
321 nodeid = repo.commitctx(cx)
321 nodeid = repo.commitctx(cx)
322 nodeids.append(nodeid)
322 nodeids.append(nodeid)
323 at = id
323 at = id
324 elif type == b'l':
324 elif type == b'l':
325 id, name = data
325 id, name = data
326 ui.note((b'tag %s\n' % name))
326 ui.note((b'tag %s\n' % name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 elif type == b'a':
328 elif type == b'a':
329 ui.note((b'branch %s\n' % data))
329 ui.note((b'branch %s\n' % data))
330 atbranch = data
330 atbranch = data
331 progress.update(id)
331 progress.update(id)
332
332
333 if tags:
333 if tags:
334 repo.vfs.write(b"localtags", b"".join(tags))
334 repo.vfs.write(b"localtags", b"".join(tags))
335
335
336
336
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 indent_string = b' ' * indent
338 indent_string = b' ' * indent
339 if all:
339 if all:
340 ui.writenoi18n(
340 ui.writenoi18n(
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 % indent_string
342 % indent_string
343 )
343 )
344
344
345 def showchunks(named):
345 def showchunks(named):
346 ui.write(b"\n%s%s\n" % (indent_string, named))
346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 for deltadata in gen.deltaiter():
347 for deltadata in gen.deltaiter():
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 ui.write(
349 ui.write(
350 b"%s%s %s %s %s %s %d\n"
350 b"%s%s %s %s %s %s %d\n"
351 % (
351 % (
352 indent_string,
352 indent_string,
353 hex(node),
353 hex(node),
354 hex(p1),
354 hex(p1),
355 hex(p2),
355 hex(p2),
356 hex(cs),
356 hex(cs),
357 hex(deltabase),
357 hex(deltabase),
358 len(delta),
358 len(delta),
359 )
359 )
360 )
360 )
361
361
362 gen.changelogheader()
362 gen.changelogheader()
363 showchunks(b"changelog")
363 showchunks(b"changelog")
364 gen.manifestheader()
364 gen.manifestheader()
365 showchunks(b"manifest")
365 showchunks(b"manifest")
366 for chunkdata in iter(gen.filelogheader, {}):
366 for chunkdata in iter(gen.filelogheader, {}):
367 fname = chunkdata[b'filename']
367 fname = chunkdata[b'filename']
368 showchunks(fname)
368 showchunks(fname)
369 else:
369 else:
370 if isinstance(gen, bundle2.unbundle20):
370 if isinstance(gen, bundle2.unbundle20):
371 raise error.Abort(_(b'use debugbundle2 for this file'))
371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 gen.changelogheader()
372 gen.changelogheader()
373 for deltadata in gen.deltaiter():
373 for deltadata in gen.deltaiter():
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376
376
377
377
378 def _debugobsmarkers(ui, part, indent=0, **opts):
378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 """display version and markers contained in 'data'"""
379 """display version and markers contained in 'data'"""
380 opts = pycompat.byteskwargs(opts)
380 opts = pycompat.byteskwargs(opts)
381 data = part.read()
381 data = part.read()
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 try:
383 try:
384 version, markers = obsolete._readmarkers(data)
384 version, markers = obsolete._readmarkers(data)
385 except error.UnknownVersion as exc:
385 except error.UnknownVersion as exc:
386 msg = b"%sunsupported version: %s (%d bytes)\n"
386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 msg %= indent_string, exc.version, len(data)
387 msg %= indent_string, exc.version, len(data)
388 ui.write(msg)
388 ui.write(msg)
389 else:
389 else:
390 msg = b"%sversion: %d (%d bytes)\n"
390 msg = b"%sversion: %d (%d bytes)\n"
391 msg %= indent_string, version, len(data)
391 msg %= indent_string, version, len(data)
392 ui.write(msg)
392 ui.write(msg)
393 fm = ui.formatter(b'debugobsolete', opts)
393 fm = ui.formatter(b'debugobsolete', opts)
394 for rawmarker in sorted(markers):
394 for rawmarker in sorted(markers):
395 m = obsutil.marker(None, rawmarker)
395 m = obsutil.marker(None, rawmarker)
396 fm.startitem()
396 fm.startitem()
397 fm.plain(indent_string)
397 fm.plain(indent_string)
398 cmdutil.showmarker(fm, m)
398 cmdutil.showmarker(fm, m)
399 fm.end()
399 fm.end()
400
400
401
401
402 def _debugphaseheads(ui, data, indent=0):
402 def _debugphaseheads(ui, data, indent=0):
403 """display version and markers contained in 'data'"""
403 """display version and markers contained in 'data'"""
404 indent_string = b' ' * indent
404 indent_string = b' ' * indent
405 headsbyphase = phases.binarydecode(data)
405 headsbyphase = phases.binarydecode(data)
406 for phase in phases.allphases:
406 for phase in phases.allphases:
407 for head in headsbyphase[phase]:
407 for head in headsbyphase[phase]:
408 ui.write(indent_string)
408 ui.write(indent_string)
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410
410
411
411
412 def _quasirepr(thing):
412 def _quasirepr(thing):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 return b'{%s}' % (
414 return b'{%s}' % (
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 )
416 )
417 return pycompat.bytestr(repr(thing))
417 return pycompat.bytestr(repr(thing))
418
418
419
419
420 def _debugbundle2(ui, gen, all=None, **opts):
420 def _debugbundle2(ui, gen, all=None, **opts):
421 """lists the contents of a bundle2"""
421 """lists the contents of a bundle2"""
422 if not isinstance(gen, bundle2.unbundle20):
422 if not isinstance(gen, bundle2.unbundle20):
423 raise error.Abort(_(b'not a bundle2 file'))
423 raise error.Abort(_(b'not a bundle2 file'))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 parttypes = opts.get('part_type', [])
425 parttypes = opts.get('part_type', [])
426 for part in gen.iterparts():
426 for part in gen.iterparts():
427 if parttypes and part.type not in parttypes:
427 if parttypes and part.type not in parttypes:
428 continue
428 continue
429 msg = b'%s -- %s (mandatory: %r)\n'
429 msg = b'%s -- %s (mandatory: %r)\n'
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 if part.type == b'changegroup':
431 if part.type == b'changegroup':
432 version = part.params.get(b'version', b'01')
432 version = part.params.get(b'version', b'01')
433 cg = changegroup.getunbundler(version, part, b'UN')
433 cg = changegroup.getunbundler(version, part, b'UN')
434 if not ui.quiet:
434 if not ui.quiet:
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 if part.type == b'obsmarkers':
436 if part.type == b'obsmarkers':
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugobsmarkers(ui, part, indent=4, **opts)
438 _debugobsmarkers(ui, part, indent=4, **opts)
439 if part.type == b'phase-heads':
439 if part.type == b'phase-heads':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugphaseheads(ui, part, indent=4)
441 _debugphaseheads(ui, part, indent=4)
442
442
443
443
444 @command(
444 @command(
445 b'debugbundle',
445 b'debugbundle',
446 [
446 [
447 (b'a', b'all', None, _(b'show all details')),
447 (b'a', b'all', None, _(b'show all details')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 ],
450 ],
451 _(b'FILE'),
451 _(b'FILE'),
452 norepo=True,
452 norepo=True,
453 )
453 )
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 """lists the contents of a bundle"""
455 """lists the contents of a bundle"""
456 with hg.openpath(ui, bundlepath) as f:
456 with hg.openpath(ui, bundlepath) as f:
457 if spec:
457 if spec:
458 spec = exchange.getbundlespec(ui, f)
458 spec = exchange.getbundlespec(ui, f)
459 ui.write(b'%s\n' % spec)
459 ui.write(b'%s\n' % spec)
460 return
460 return
461
461
462 gen = exchange.readbundle(ui, f, bundlepath)
462 gen = exchange.readbundle(ui, f, bundlepath)
463 if isinstance(gen, bundle2.unbundle20):
463 if isinstance(gen, bundle2.unbundle20):
464 return _debugbundle2(ui, gen, all=all, **opts)
464 return _debugbundle2(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
466
466
467
467
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 def debugcapabilities(ui, path, **opts):
469 def debugcapabilities(ui, path, **opts):
470 """lists the capabilities of a remote peer"""
470 """lists the capabilities of a remote peer"""
471 opts = pycompat.byteskwargs(opts)
471 opts = pycompat.byteskwargs(opts)
472 peer = hg.peer(ui, opts, path)
472 peer = hg.peer(ui, opts, path)
473 caps = peer.capabilities()
473 caps = peer.capabilities()
474 ui.writenoi18n(b'Main capabilities:\n')
474 ui.writenoi18n(b'Main capabilities:\n')
475 for c in sorted(caps):
475 for c in sorted(caps):
476 ui.write(b' %s\n' % c)
476 ui.write(b' %s\n' % c)
477 b2caps = bundle2.bundle2caps(peer)
477 b2caps = bundle2.bundle2caps(peer)
478 if b2caps:
478 if b2caps:
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 for key, values in sorted(pycompat.iteritems(b2caps)):
480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 ui.write(b' %s\n' % key)
481 ui.write(b' %s\n' % key)
482 for v in values:
482 for v in values:
483 ui.write(b' %s\n' % v)
483 ui.write(b' %s\n' % v)
484
484
485
485
486 @command(b'debugchangedfiles', [], b'REV')
486 @command(b'debugchangedfiles', [], b'REV')
487 def debugchangedfiles(ui, repo, rev):
487 def debugchangedfiles(ui, repo, rev):
488 """list the stored files changes for a revision"""
488 """list the stored files changes for a revision"""
489 ctx = scmutil.revsingle(repo, rev, None)
489 ctx = scmutil.revsingle(repo, rev, None)
490 sd = repo.changelog.sidedata(ctx.rev())
490 sd = repo.changelog.sidedata(ctx.rev())
491 files_block = sd.get(sidedata.SD_FILES)
491 files_block = sd.get(sidedata.SD_FILES)
492 if files_block is not None:
492 if files_block is not None:
493 files = metadata.decode_files_sidedata(sd)
493 files = metadata.decode_files_sidedata(sd)
494 for f in sorted(files.touched):
494 for f in sorted(files.touched):
495 if f in files.added:
495 if f in files.added:
496 action = b"added"
496 action = b"added"
497 elif f in files.removed:
497 elif f in files.removed:
498 action = b"removed"
498 action = b"removed"
499 elif f in files.merged:
499 elif f in files.merged:
500 action = b"merged"
500 action = b"merged"
501 elif f in files.salvaged:
501 elif f in files.salvaged:
502 action = b"salvaged"
502 action = b"salvaged"
503 else:
503 else:
504 action = b"touched"
504 action = b"touched"
505
505
506 copy_parent = b""
506 copy_parent = b""
507 copy_source = b""
507 copy_source = b""
508 if f in files.copied_from_p1:
508 if f in files.copied_from_p1:
509 copy_parent = b"p1"
509 copy_parent = b"p1"
510 copy_source = files.copied_from_p1[f]
510 copy_source = files.copied_from_p1[f]
511 elif f in files.copied_from_p2:
511 elif f in files.copied_from_p2:
512 copy_parent = b"p2"
512 copy_parent = b"p2"
513 copy_source = files.copied_from_p2[f]
513 copy_source = files.copied_from_p2[f]
514
514
515 data = (action, copy_parent, f, copy_source)
515 data = (action, copy_parent, f, copy_source)
516 template = b"%-8s %2s: %s, %s;\n"
516 template = b"%-8s %2s: %s, %s;\n"
517 ui.write(template % data)
517 ui.write(template % data)
518
518
519
519
520 @command(b'debugcheckstate', [], b'')
520 @command(b'debugcheckstate', [], b'')
521 def debugcheckstate(ui, repo):
521 def debugcheckstate(ui, repo):
522 """validate the correctness of the current dirstate"""
522 """validate the correctness of the current dirstate"""
523 parent1, parent2 = repo.dirstate.parents()
523 parent1, parent2 = repo.dirstate.parents()
524 m1 = repo[parent1].manifest()
524 m1 = repo[parent1].manifest()
525 m2 = repo[parent2].manifest()
525 m2 = repo[parent2].manifest()
526 errors = 0
526 errors = 0
527 for f in repo.dirstate:
527 for f in repo.dirstate:
528 state = repo.dirstate[f]
528 state = repo.dirstate[f]
529 if state in b"nr" and f not in m1:
529 if state in b"nr" and f not in m1:
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 errors += 1
531 errors += 1
532 if state in b"a" and f in m1:
532 if state in b"a" and f in m1:
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 errors += 1
534 errors += 1
535 if state in b"m" and f not in m1 and f not in m2:
535 if state in b"m" and f not in m1 and f not in m2:
536 ui.warn(
536 ui.warn(
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 )
538 )
539 errors += 1
539 errors += 1
540 for f in m1:
540 for f in m1:
541 state = repo.dirstate[f]
541 state = repo.dirstate[f]
542 if state not in b"nrm":
542 if state not in b"nrm":
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 errors += 1
544 errors += 1
545 if errors:
545 if errors:
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 raise error.Abort(errstr)
547 raise error.Abort(errstr)
548
548
549
549
550 @command(
550 @command(
551 b'debugcolor',
551 b'debugcolor',
552 [(b'', b'style', None, _(b'show all configured styles'))],
552 [(b'', b'style', None, _(b'show all configured styles'))],
553 b'hg debugcolor',
553 b'hg debugcolor',
554 )
554 )
555 def debugcolor(ui, repo, **opts):
555 def debugcolor(ui, repo, **opts):
556 """show available color, effects or style"""
556 """show available color, effects or style"""
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 if opts.get('style'):
558 if opts.get('style'):
559 return _debugdisplaystyle(ui)
559 return _debugdisplaystyle(ui)
560 else:
560 else:
561 return _debugdisplaycolor(ui)
561 return _debugdisplaycolor(ui)
562
562
563
563
564 def _debugdisplaycolor(ui):
564 def _debugdisplaycolor(ui):
565 ui = ui.copy()
565 ui = ui.copy()
566 ui._styles.clear()
566 ui._styles.clear()
567 for effect in color._activeeffects(ui).keys():
567 for effect in color._activeeffects(ui).keys():
568 ui._styles[effect] = effect
568 ui._styles[effect] = effect
569 if ui._terminfoparams:
569 if ui._terminfoparams:
570 for k, v in ui.configitems(b'color'):
570 for k, v in ui.configitems(b'color'):
571 if k.startswith(b'color.'):
571 if k.startswith(b'color.'):
572 ui._styles[k] = k[6:]
572 ui._styles[k] = k[6:]
573 elif k.startswith(b'terminfo.'):
573 elif k.startswith(b'terminfo.'):
574 ui._styles[k] = k[9:]
574 ui._styles[k] = k[9:]
575 ui.write(_(b'available colors:\n'))
575 ui.write(_(b'available colors:\n'))
576 # sort label with a '_' after the other to group '_background' entry.
576 # sort label with a '_' after the other to group '_background' entry.
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 for colorname, label in items:
578 for colorname, label in items:
579 ui.write(b'%s\n' % colorname, label=label)
579 ui.write(b'%s\n' % colorname, label=label)
580
580
581
581
582 def _debugdisplaystyle(ui):
582 def _debugdisplaystyle(ui):
583 ui.write(_(b'available style:\n'))
583 ui.write(_(b'available style:\n'))
584 if not ui._styles:
584 if not ui._styles:
585 return
585 return
586 width = max(len(s) for s in ui._styles)
586 width = max(len(s) for s in ui._styles)
587 for label, effects in sorted(ui._styles.items()):
587 for label, effects in sorted(ui._styles.items()):
588 ui.write(b'%s' % label, label=label)
588 ui.write(b'%s' % label, label=label)
589 if effects:
589 if effects:
590 # 50
590 # 50
591 ui.write(b': ')
591 ui.write(b': ')
592 ui.write(b' ' * (max(0, width - len(label))))
592 ui.write(b' ' * (max(0, width - len(label))))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 ui.write(b'\n')
594 ui.write(b'\n')
595
595
596
596
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 def debugcreatestreamclonebundle(ui, repo, fname):
598 def debugcreatestreamclonebundle(ui, repo, fname):
599 """create a stream clone bundle file
599 """create a stream clone bundle file
600
600
601 Stream bundles are special bundles that are essentially archives of
601 Stream bundles are special bundles that are essentially archives of
602 revlog files. They are commonly used for cloning very quickly.
602 revlog files. They are commonly used for cloning very quickly.
603 """
603 """
604 # TODO we may want to turn this into an abort when this functionality
604 # TODO we may want to turn this into an abort when this functionality
605 # is moved into `hg bundle`.
605 # is moved into `hg bundle`.
606 if phases.hassecret(repo):
606 if phases.hassecret(repo):
607 ui.warn(
607 ui.warn(
608 _(
608 _(
609 b'(warning: stream clone bundle will contain secret '
609 b'(warning: stream clone bundle will contain secret '
610 b'revisions)\n'
610 b'revisions)\n'
611 )
611 )
612 )
612 )
613
613
614 requirements, gen = streamclone.generatebundlev1(repo)
614 requirements, gen = streamclone.generatebundlev1(repo)
615 changegroup.writechunks(ui, gen, fname)
615 changegroup.writechunks(ui, gen, fname)
616
616
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618
618
619
619
620 @command(
620 @command(
621 b'debugdag',
621 b'debugdag',
622 [
622 [
623 (b't', b'tags', None, _(b'use tags as labels')),
623 (b't', b'tags', None, _(b'use tags as labels')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 (b'', b'dots', None, _(b'use dots for runs')),
625 (b'', b'dots', None, _(b'use dots for runs')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 ],
627 ],
628 _(b'[OPTION]... [FILE [REV]...]'),
628 _(b'[OPTION]... [FILE [REV]...]'),
629 optionalrepo=True,
629 optionalrepo=True,
630 )
630 )
631 def debugdag(ui, repo, file_=None, *revs, **opts):
631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 """format the changelog or an index DAG as a concise textual description
632 """format the changelog or an index DAG as a concise textual description
633
633
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 revision numbers, they get labeled in the output as rN.
635 revision numbers, they get labeled in the output as rN.
636
636
637 Otherwise, the changelog DAG of the current repo is emitted.
637 Otherwise, the changelog DAG of the current repo is emitted.
638 """
638 """
639 spaces = opts.get('spaces')
639 spaces = opts.get('spaces')
640 dots = opts.get('dots')
640 dots = opts.get('dots')
641 if file_:
641 if file_:
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 revs = {int(r) for r in revs}
643 revs = {int(r) for r in revs}
644
644
645 def events():
645 def events():
646 for r in rlog:
646 for r in rlog:
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 if r in revs:
648 if r in revs:
649 yield b'l', (r, b"r%i" % r)
649 yield b'l', (r, b"r%i" % r)
650
650
651 elif repo:
651 elif repo:
652 cl = repo.changelog
652 cl = repo.changelog
653 tags = opts.get('tags')
653 tags = opts.get('tags')
654 branches = opts.get('branches')
654 branches = opts.get('branches')
655 if tags:
655 if tags:
656 labels = {}
656 labels = {}
657 for l, n in repo.tags().items():
657 for l, n in repo.tags().items():
658 labels.setdefault(cl.rev(n), []).append(l)
658 labels.setdefault(cl.rev(n), []).append(l)
659
659
660 def events():
660 def events():
661 b = b"default"
661 b = b"default"
662 for r in cl:
662 for r in cl:
663 if branches:
663 if branches:
664 newb = cl.read(cl.node(r))[5][b'branch']
664 newb = cl.read(cl.node(r))[5][b'branch']
665 if newb != b:
665 if newb != b:
666 yield b'a', newb
666 yield b'a', newb
667 b = newb
667 b = newb
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 if tags:
669 if tags:
670 ls = labels.get(r)
670 ls = labels.get(r)
671 if ls:
671 if ls:
672 for l in ls:
672 for l in ls:
673 yield b'l', (r, l)
673 yield b'l', (r, l)
674
674
675 else:
675 else:
676 raise error.Abort(_(b'need repo for changelog dag'))
676 raise error.Abort(_(b'need repo for changelog dag'))
677
677
678 for line in dagparser.dagtextlines(
678 for line in dagparser.dagtextlines(
679 events(),
679 events(),
680 addspaces=spaces,
680 addspaces=spaces,
681 wraplabels=True,
681 wraplabels=True,
682 wrapannotations=True,
682 wrapannotations=True,
683 wrapnonlinear=dots,
683 wrapnonlinear=dots,
684 usedots=dots,
684 usedots=dots,
685 maxlinewidth=70,
685 maxlinewidth=70,
686 ):
686 ):
687 ui.write(line)
687 ui.write(line)
688 ui.write(b"\n")
688 ui.write(b"\n")
689
689
690
690
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 def debugdata(ui, repo, file_, rev=None, **opts):
692 def debugdata(ui, repo, file_, rev=None, **opts):
693 """dump the contents of a data file revision"""
693 """dump the contents of a data file revision"""
694 opts = pycompat.byteskwargs(opts)
694 opts = pycompat.byteskwargs(opts)
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 if rev is not None:
696 if rev is not None:
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 file_, rev = None, file_
698 file_, rev = None, file_
699 elif rev is None:
699 elif rev is None:
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 try:
702 try:
703 ui.write(r.rawdata(r.lookup(rev)))
703 ui.write(r.rawdata(r.lookup(rev)))
704 except KeyError:
704 except KeyError:
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706
706
707
707
708 @command(
708 @command(
709 b'debugdate',
709 b'debugdate',
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 _(b'[-e] DATE [RANGE]'),
711 _(b'[-e] DATE [RANGE]'),
712 norepo=True,
712 norepo=True,
713 optionalrepo=True,
713 optionalrepo=True,
714 )
714 )
715 def debugdate(ui, date, range=None, **opts):
715 def debugdate(ui, date, range=None, **opts):
716 """parse and display a date"""
716 """parse and display a date"""
717 if opts["extended"]:
717 if opts["extended"]:
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 else:
719 else:
720 d = dateutil.parsedate(date)
720 d = dateutil.parsedate(date)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 if range:
723 if range:
724 m = dateutil.matchdate(range)
724 m = dateutil.matchdate(range)
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726
726
727
727
728 @command(
728 @command(
729 b'debugdeltachain',
729 b'debugdeltachain',
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 _(b'-c|-m|FILE'),
731 _(b'-c|-m|FILE'),
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdeltachain(ui, repo, file_=None, **opts):
734 def debugdeltachain(ui, repo, file_=None, **opts):
735 """dump information about delta chains in a revlog
735 """dump information about delta chains in a revlog
736
736
737 Output can be templatized. Available template keywords are:
737 Output can be templatized. Available template keywords are:
738
738
739 :``rev``: revision number
739 :``rev``: revision number
740 :``chainid``: delta chain identifier (numbered by unique base)
740 :``chainid``: delta chain identifier (numbered by unique base)
741 :``chainlen``: delta chain length to this revision
741 :``chainlen``: delta chain length to this revision
742 :``prevrev``: previous revision in delta chain
742 :``prevrev``: previous revision in delta chain
743 :``deltatype``: role of delta / how it was computed
743 :``deltatype``: role of delta / how it was computed
744 :``compsize``: compressed size of revision
744 :``compsize``: compressed size of revision
745 :``uncompsize``: uncompressed size of revision
745 :``uncompsize``: uncompressed size of revision
746 :``chainsize``: total size of compressed revisions in chain
746 :``chainsize``: total size of compressed revisions in chain
747 :``chainratio``: total chain size divided by uncompressed revision size
747 :``chainratio``: total chain size divided by uncompressed revision size
748 (new delta chains typically start at ratio 2.00)
748 (new delta chains typically start at ratio 2.00)
749 :``lindist``: linear distance from base revision in delta chain to end
749 :``lindist``: linear distance from base revision in delta chain to end
750 of this revision
750 of this revision
751 :``extradist``: total size of revisions not part of this delta chain from
751 :``extradist``: total size of revisions not part of this delta chain from
752 base of delta chain to end of this revision; a measurement
752 base of delta chain to end of this revision; a measurement
753 of how much extra data we need to read/seek across to read
753 of how much extra data we need to read/seek across to read
754 the delta chain for this revision
754 the delta chain for this revision
755 :``extraratio``: extradist divided by chainsize; another representation of
755 :``extraratio``: extradist divided by chainsize; another representation of
756 how much unrelated data is needed to load this delta chain
756 how much unrelated data is needed to load this delta chain
757
757
758 If the repository is configured to use the sparse read, additional keywords
758 If the repository is configured to use the sparse read, additional keywords
759 are available:
759 are available:
760
760
761 :``readsize``: total size of data read from the disk for a revision
761 :``readsize``: total size of data read from the disk for a revision
762 (sum of the sizes of all the blocks)
762 (sum of the sizes of all the blocks)
763 :``largestblock``: size of the largest block of data read from the disk
763 :``largestblock``: size of the largest block of data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
765 :``srchunks``: in how many data hunks the whole revision would be read
765 :``srchunks``: in how many data hunks the whole revision would be read
766
766
767 The sparse read can be enabled with experimental.sparse-read = True
767 The sparse read can be enabled with experimental.sparse-read = True
768 """
768 """
769 opts = pycompat.byteskwargs(opts)
769 opts = pycompat.byteskwargs(opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 index = r.index
771 index = r.index
772 start = r.start
772 start = r.start
773 length = r.length
773 length = r.length
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 withsparseread = getattr(r, '_withsparseread', False)
775 withsparseread = getattr(r, '_withsparseread', False)
776
776
777 def revinfo(rev):
777 def revinfo(rev):
778 e = index[rev]
778 e = index[rev]
779 compsize = e[1]
779 compsize = e[1]
780 uncompsize = e[2]
780 uncompsize = e[2]
781 chainsize = 0
781 chainsize = 0
782
782
783 if generaldelta:
783 if generaldelta:
784 if e[3] == e[5]:
784 if e[3] == e[5]:
785 deltatype = b'p1'
785 deltatype = b'p1'
786 elif e[3] == e[6]:
786 elif e[3] == e[6]:
787 deltatype = b'p2'
787 deltatype = b'p2'
788 elif e[3] == rev - 1:
788 elif e[3] == rev - 1:
789 deltatype = b'prev'
789 deltatype = b'prev'
790 elif e[3] == rev:
790 elif e[3] == rev:
791 deltatype = b'base'
791 deltatype = b'base'
792 else:
792 else:
793 deltatype = b'other'
793 deltatype = b'other'
794 else:
794 else:
795 if e[3] == rev:
795 if e[3] == rev:
796 deltatype = b'base'
796 deltatype = b'base'
797 else:
797 else:
798 deltatype = b'prev'
798 deltatype = b'prev'
799
799
800 chain = r._deltachain(rev)[0]
800 chain = r._deltachain(rev)[0]
801 for iterrev in chain:
801 for iterrev in chain:
802 e = index[iterrev]
802 e = index[iterrev]
803 chainsize += e[1]
803 chainsize += e[1]
804
804
805 return compsize, uncompsize, deltatype, chain, chainsize
805 return compsize, uncompsize, deltatype, chain, chainsize
806
806
807 fm = ui.formatter(b'debugdeltachain', opts)
807 fm = ui.formatter(b'debugdeltachain', opts)
808
808
809 fm.plain(
809 fm.plain(
810 b' rev chain# chainlen prev delta '
810 b' rev chain# chainlen prev delta '
811 b'size rawsize chainsize ratio lindist extradist '
811 b'size rawsize chainsize ratio lindist extradist '
812 b'extraratio'
812 b'extraratio'
813 )
813 )
814 if withsparseread:
814 if withsparseread:
815 fm.plain(b' readsize largestblk rddensity srchunks')
815 fm.plain(b' readsize largestblk rddensity srchunks')
816 fm.plain(b'\n')
816 fm.plain(b'\n')
817
817
818 chainbases = {}
818 chainbases = {}
819 for rev in r:
819 for rev in r:
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 chainbase = chain[0]
821 chainbase = chain[0]
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 basestart = start(chainbase)
823 basestart = start(chainbase)
824 revstart = start(rev)
824 revstart = start(rev)
825 lineardist = revstart + comp - basestart
825 lineardist = revstart + comp - basestart
826 extradist = lineardist - chainsize
826 extradist = lineardist - chainsize
827 try:
827 try:
828 prevrev = chain[-2]
828 prevrev = chain[-2]
829 except IndexError:
829 except IndexError:
830 prevrev = -1
830 prevrev = -1
831
831
832 if uncomp != 0:
832 if uncomp != 0:
833 chainratio = float(chainsize) / float(uncomp)
833 chainratio = float(chainsize) / float(uncomp)
834 else:
834 else:
835 chainratio = chainsize
835 chainratio = chainsize
836
836
837 if chainsize != 0:
837 if chainsize != 0:
838 extraratio = float(extradist) / float(chainsize)
838 extraratio = float(extradist) / float(chainsize)
839 else:
839 else:
840 extraratio = extradist
840 extraratio = extradist
841
841
842 fm.startitem()
842 fm.startitem()
843 fm.write(
843 fm.write(
844 b'rev chainid chainlen prevrev deltatype compsize '
844 b'rev chainid chainlen prevrev deltatype compsize '
845 b'uncompsize chainsize chainratio lindist extradist '
845 b'uncompsize chainsize chainratio lindist extradist '
846 b'extraratio',
846 b'extraratio',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 rev,
848 rev,
849 chainid,
849 chainid,
850 len(chain),
850 len(chain),
851 prevrev,
851 prevrev,
852 deltatype,
852 deltatype,
853 comp,
853 comp,
854 uncomp,
854 uncomp,
855 chainsize,
855 chainsize,
856 chainratio,
856 chainratio,
857 lineardist,
857 lineardist,
858 extradist,
858 extradist,
859 extraratio,
859 extraratio,
860 rev=rev,
860 rev=rev,
861 chainid=chainid,
861 chainid=chainid,
862 chainlen=len(chain),
862 chainlen=len(chain),
863 prevrev=prevrev,
863 prevrev=prevrev,
864 deltatype=deltatype,
864 deltatype=deltatype,
865 compsize=comp,
865 compsize=comp,
866 uncompsize=uncomp,
866 uncompsize=uncomp,
867 chainsize=chainsize,
867 chainsize=chainsize,
868 chainratio=chainratio,
868 chainratio=chainratio,
869 lindist=lineardist,
869 lindist=lineardist,
870 extradist=extradist,
870 extradist=extradist,
871 extraratio=extraratio,
871 extraratio=extraratio,
872 )
872 )
873 if withsparseread:
873 if withsparseread:
874 readsize = 0
874 readsize = 0
875 largestblock = 0
875 largestblock = 0
876 srchunks = 0
876 srchunks = 0
877
877
878 for revschunk in deltautil.slicechunk(r, chain):
878 for revschunk in deltautil.slicechunk(r, chain):
879 srchunks += 1
879 srchunks += 1
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 blksize = blkend - start(revschunk[0])
881 blksize = blkend - start(revschunk[0])
882
882
883 readsize += blksize
883 readsize += blksize
884 if largestblock < blksize:
884 if largestblock < blksize:
885 largestblock = blksize
885 largestblock = blksize
886
886
887 if readsize:
887 if readsize:
888 readdensity = float(chainsize) / float(readsize)
888 readdensity = float(chainsize) / float(readsize)
889 else:
889 else:
890 readdensity = 1
890 readdensity = 1
891
891
892 fm.write(
892 fm.write(
893 b'readsize largestblock readdensity srchunks',
893 b'readsize largestblock readdensity srchunks',
894 b' %10d %10d %9.5f %8d',
894 b' %10d %10d %9.5f %8d',
895 readsize,
895 readsize,
896 largestblock,
896 largestblock,
897 readdensity,
897 readdensity,
898 srchunks,
898 srchunks,
899 readsize=readsize,
899 readsize=readsize,
900 largestblock=largestblock,
900 largestblock=largestblock,
901 readdensity=readdensity,
901 readdensity=readdensity,
902 srchunks=srchunks,
902 srchunks=srchunks,
903 )
903 )
904
904
905 fm.plain(b'\n')
905 fm.plain(b'\n')
906
906
907 fm.end()
907 fm.end()
908
908
909
909
910 @command(
910 @command(
911 b'debugdirstate|debugstate',
911 b'debugdirstate|debugstate',
912 [
912 [
913 (
913 (
914 b'',
914 b'',
915 b'nodates',
915 b'nodates',
916 None,
916 None,
917 _(b'do not display the saved mtime (DEPRECATED)'),
917 _(b'do not display the saved mtime (DEPRECATED)'),
918 ),
918 ),
919 (b'', b'dates', True, _(b'display the saved mtime')),
919 (b'', b'dates', True, _(b'display the saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 ],
921 ],
922 _(b'[OPTION]...'),
922 _(b'[OPTION]...'),
923 )
923 )
924 def debugstate(ui, repo, **opts):
924 def debugstate(ui, repo, **opts):
925 """show the contents of the current dirstate"""
925 """show the contents of the current dirstate"""
926
926
927 nodates = not opts['dates']
927 nodates = not opts['dates']
928 if opts.get('nodates') is not None:
928 if opts.get('nodates') is not None:
929 nodates = True
929 nodates = True
930 datesort = opts.get('datesort')
930 datesort = opts.get('datesort')
931
931
932 if datesort:
932 if datesort:
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 else:
934 else:
935 keyfunc = None # sort by filename
935 keyfunc = None # sort by filename
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 if ent[3] == -1:
937 if ent[3] == -1:
938 timestr = b'unset '
938 timestr = b'unset '
939 elif nodates:
939 elif nodates:
940 timestr = b'set '
940 timestr = b'set '
941 else:
941 else:
942 timestr = time.strftime(
942 timestr = time.strftime(
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 )
944 )
945 timestr = encoding.strtolocal(timestr)
945 timestr = encoding.strtolocal(timestr)
946 if ent[1] & 0o20000:
946 if ent[1] & 0o20000:
947 mode = b'lnk'
947 mode = b'lnk'
948 else:
948 else:
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 for f in repo.dirstate.copies():
951 for f in repo.dirstate.copies():
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953
953
954
954
955 @command(
955 @command(
956 b'debugdiscovery',
956 b'debugdiscovery',
957 [
957 [
958 (b'', b'old', None, _(b'use old-style discovery')),
958 (b'', b'old', None, _(b'use old-style discovery')),
959 (
959 (
960 b'',
960 b'',
961 b'nonheads',
961 b'nonheads',
962 None,
962 None,
963 _(b'use old-style discovery with non-heads included'),
963 _(b'use old-style discovery with non-heads included'),
964 ),
964 ),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 ]
967 ]
968 + cmdutil.remoteopts,
968 + cmdutil.remoteopts,
969 _(b'[--rev REV] [OTHER]'),
969 _(b'[--rev REV] [OTHER]'),
970 )
970 )
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 """runs the changeset discovery protocol in isolation"""
972 """runs the changeset discovery protocol in isolation"""
973 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 remote = hg.peer(repo, opts, remoteurl)
975 remote = hg.peer(repo, opts, remoteurl)
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977
977
978 # make sure tests are repeatable
978 # make sure tests are repeatable
979 random.seed(int(opts[b'seed']))
979 random.seed(int(opts[b'seed']))
980
980
981 data = {}
981 if opts.get(b'old'):
982 if opts.get(b'old'):
982
983
983 def doit(pushedrevs, remoteheads, remote=remote):
984 def doit(pushedrevs, remoteheads, remote=remote):
984 if not util.safehasattr(remote, b'branches'):
985 if not util.safehasattr(remote, b'branches'):
985 # enable in-client legacy support
986 # enable in-client legacy support
986 remote = localrepo.locallegacypeer(remote.local())
987 remote = localrepo.locallegacypeer(remote.local())
987 common, _in, hds = treediscovery.findcommonincoming(
988 common, _in, hds = treediscovery.findcommonincoming(
988 repo, remote, force=True
989 repo, remote, force=True, audit=data
989 )
990 )
990 common = set(common)
991 common = set(common)
991 if not opts.get(b'nonheads'):
992 if not opts.get(b'nonheads'):
992 ui.writenoi18n(
993 ui.writenoi18n(
993 b"unpruned common: %s\n"
994 b"unpruned common: %s\n"
994 % b" ".join(sorted(short(n) for n in common))
995 % b" ".join(sorted(short(n) for n in common))
995 )
996 )
996
997
997 clnode = repo.changelog.node
998 clnode = repo.changelog.node
998 common = repo.revs(b'heads(::%ln)', common)
999 common = repo.revs(b'heads(::%ln)', common)
999 common = {clnode(r) for r in common}
1000 common = {clnode(r) for r in common}
1000 return common, hds
1001 return common, hds
1001
1002
1002 else:
1003 else:
1003
1004
1004 def doit(pushedrevs, remoteheads, remote=remote):
1005 def doit(pushedrevs, remoteheads, remote=remote):
1005 nodes = None
1006 nodes = None
1006 if pushedrevs:
1007 if pushedrevs:
1007 revs = scmutil.revrange(repo, pushedrevs)
1008 revs = scmutil.revrange(repo, pushedrevs)
1008 nodes = [repo[r].node() for r in revs]
1009 nodes = [repo[r].node() for r in revs]
1009 common, any, hds = setdiscovery.findcommonheads(
1010 common, any, hds = setdiscovery.findcommonheads(
1010 ui, repo, remote, ancestorsof=nodes
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1011 )
1012 )
1012 return common, hds
1013 return common, hds
1013
1014
1014 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 localrevs = opts[b'rev']
1016 localrevs = opts[b'rev']
1016 with util.timedcm('debug-discovery') as t:
1017 with util.timedcm('debug-discovery') as t:
1017 common, hds = doit(localrevs, remoterevs)
1018 common, hds = doit(localrevs, remoterevs)
1018
1019
1019 # compute all statistics
1020 # compute all statistics
1020 heads_common = set(common)
1021 heads_common = set(common)
1021 heads_remote = set(hds)
1022 heads_remote = set(hds)
1022 heads_local = set(repo.heads())
1023 heads_local = set(repo.heads())
1023 # note: they cannot be a local or remote head that is in common and not
1024 # note: they cannot be a local or remote head that is in common and not
1024 # itself a head of common.
1025 # itself a head of common.
1025 heads_common_local = heads_common & heads_local
1026 heads_common_local = heads_common & heads_local
1026 heads_common_remote = heads_common & heads_remote
1027 heads_common_remote = heads_common & heads_remote
1027 heads_common_both = heads_common & heads_remote & heads_local
1028 heads_common_both = heads_common & heads_remote & heads_local
1028
1029
1029 all = repo.revs(b'all()')
1030 all = repo.revs(b'all()')
1030 common = repo.revs(b'::%ln', common)
1031 common = repo.revs(b'::%ln', common)
1031 roots_common = repo.revs(b'roots(::%ld)', common)
1032 roots_common = repo.revs(b'roots(::%ld)', common)
1032 missing = repo.revs(b'not ::%ld', common)
1033 missing = repo.revs(b'not ::%ld', common)
1033 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 assert len(common) + len(missing) == len(all)
1036 assert len(common) + len(missing) == len(all)
1036
1037
1037 initial_undecided = repo.revs(
1038 initial_undecided = repo.revs(
1038 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 )
1040 )
1040 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 common_initial_undecided = initial_undecided & common
1043 common_initial_undecided = initial_undecided & common
1043 missing_initial_undecided = initial_undecided & missing
1044 missing_initial_undecided = initial_undecided & missing
1044
1045
1045 data = {}
1046 data[b'elapsed'] = t.elapsed
1046 data[b'elapsed'] = t.elapsed
1047 data[b'nb-common-heads'] = len(heads_common)
1047 data[b'nb-common-heads'] = len(heads_common)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 data[b'nb-common-roots'] = len(roots_common)
1051 data[b'nb-common-roots'] = len(roots_common)
1052 data[b'nb-head-local'] = len(heads_local)
1052 data[b'nb-head-local'] = len(heads_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 data[b'nb-head-remote'] = len(heads_remote)
1054 data[b'nb-head-remote'] = len(heads_remote)
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 heads_common_remote
1056 heads_common_remote
1057 )
1057 )
1058 data[b'nb-revs'] = len(all)
1058 data[b'nb-revs'] = len(all)
1059 data[b'nb-revs-common'] = len(common)
1059 data[b'nb-revs-common'] = len(common)
1060 data[b'nb-revs-missing'] = len(missing)
1060 data[b'nb-revs-missing'] = len(missing)
1061 data[b'nb-missing-heads'] = len(roots_missing)
1061 data[b'nb-missing-heads'] = len(roots_missing)
1062 data[b'nb-missing-roots'] = len(heads_missing)
1062 data[b'nb-missing-roots'] = len(heads_missing)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068
1068
1069 # display discovery summary
1069 # display discovery summary
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1071 ui.writenoi18n(b"heads summary:\n")
1072 ui.writenoi18n(b"heads summary:\n")
1072 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1073 ui.writenoi18n(
1074 ui.writenoi18n(
1074 b" also local heads: %(nb-common-heads-local)9d\n" % data
1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1075 )
1076 )
1076 ui.writenoi18n(
1077 ui.writenoi18n(
1077 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1078 )
1079 )
1079 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1080 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1081 ui.writenoi18n(
1082 ui.writenoi18n(
1082 b" common: %(nb-common-heads-local)9d\n" % data
1083 b" common: %(nb-common-heads-local)9d\n" % data
1083 )
1084 )
1084 ui.writenoi18n(
1085 ui.writenoi18n(
1085 b" missing: %(nb-head-local-missing)9d\n" % data
1086 b" missing: %(nb-head-local-missing)9d\n" % data
1086 )
1087 )
1087 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1088 ui.writenoi18n(
1089 ui.writenoi18n(
1089 b" common: %(nb-common-heads-remote)9d\n" % data
1090 b" common: %(nb-common-heads-remote)9d\n" % data
1090 )
1091 )
1091 ui.writenoi18n(
1092 ui.writenoi18n(
1092 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1093 )
1094 )
1094 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1095 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1096 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1097 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1098 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1099 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1100 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1106
1107
1107 if ui.verbose:
1108 if ui.verbose:
1108 ui.writenoi18n(
1109 ui.writenoi18n(
1109 b"common heads: %s\n"
1110 b"common heads: %s\n"
1110 % b" ".join(sorted(short(n) for n in heads_common))
1111 % b" ".join(sorted(short(n) for n in heads_common))
1111 )
1112 )
1112
1113
1113
1114
1114 _chunksize = 4 << 10
1115 _chunksize = 4 << 10
1115
1116
1116
1117
1117 @command(
1118 @command(
1118 b'debugdownload',
1119 b'debugdownload',
1119 [
1120 [
1120 (b'o', b'output', b'', _(b'path')),
1121 (b'o', b'output', b'', _(b'path')),
1121 ],
1122 ],
1122 optionalrepo=True,
1123 optionalrepo=True,
1123 )
1124 )
1124 def debugdownload(ui, repo, url, output=None, **opts):
1125 def debugdownload(ui, repo, url, output=None, **opts):
1125 """download a resource using Mercurial logic and config"""
1126 """download a resource using Mercurial logic and config"""
1126 fh = urlmod.open(ui, url, output)
1127 fh = urlmod.open(ui, url, output)
1127
1128
1128 dest = ui
1129 dest = ui
1129 if output:
1130 if output:
1130 dest = open(output, b"wb", _chunksize)
1131 dest = open(output, b"wb", _chunksize)
1131 try:
1132 try:
1132 data = fh.read(_chunksize)
1133 data = fh.read(_chunksize)
1133 while data:
1134 while data:
1134 dest.write(data)
1135 dest.write(data)
1135 data = fh.read(_chunksize)
1136 data = fh.read(_chunksize)
1136 finally:
1137 finally:
1137 if output:
1138 if output:
1138 dest.close()
1139 dest.close()
1139
1140
1140
1141
1141 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1142 def debugextensions(ui, repo, **opts):
1143 def debugextensions(ui, repo, **opts):
1143 '''show information about active extensions'''
1144 '''show information about active extensions'''
1144 opts = pycompat.byteskwargs(opts)
1145 opts = pycompat.byteskwargs(opts)
1145 exts = extensions.extensions(ui)
1146 exts = extensions.extensions(ui)
1146 hgver = util.version()
1147 hgver = util.version()
1147 fm = ui.formatter(b'debugextensions', opts)
1148 fm = ui.formatter(b'debugextensions', opts)
1148 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1149 isinternal = extensions.ismoduleinternal(extmod)
1150 isinternal = extensions.ismoduleinternal(extmod)
1150 extsource = None
1151 extsource = None
1151
1152
1152 if util.safehasattr(extmod, '__file__'):
1153 if util.safehasattr(extmod, '__file__'):
1153 extsource = pycompat.fsencode(extmod.__file__)
1154 extsource = pycompat.fsencode(extmod.__file__)
1154 elif getattr(sys, 'oxidized', False):
1155 elif getattr(sys, 'oxidized', False):
1155 extsource = pycompat.sysexecutable
1156 extsource = pycompat.sysexecutable
1156 if isinternal:
1157 if isinternal:
1157 exttestedwith = [] # never expose magic string to users
1158 exttestedwith = [] # never expose magic string to users
1158 else:
1159 else:
1159 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1160 extbuglink = getattr(extmod, 'buglink', None)
1161 extbuglink = getattr(extmod, 'buglink', None)
1161
1162
1162 fm.startitem()
1163 fm.startitem()
1163
1164
1164 if ui.quiet or ui.verbose:
1165 if ui.quiet or ui.verbose:
1165 fm.write(b'name', b'%s\n', extname)
1166 fm.write(b'name', b'%s\n', extname)
1166 else:
1167 else:
1167 fm.write(b'name', b'%s', extname)
1168 fm.write(b'name', b'%s', extname)
1168 if isinternal or hgver in exttestedwith:
1169 if isinternal or hgver in exttestedwith:
1169 fm.plain(b'\n')
1170 fm.plain(b'\n')
1170 elif not exttestedwith:
1171 elif not exttestedwith:
1171 fm.plain(_(b' (untested!)\n'))
1172 fm.plain(_(b' (untested!)\n'))
1172 else:
1173 else:
1173 lasttestedversion = exttestedwith[-1]
1174 lasttestedversion = exttestedwith[-1]
1174 fm.plain(b' (%s!)\n' % lasttestedversion)
1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1175
1176
1176 fm.condwrite(
1177 fm.condwrite(
1177 ui.verbose and extsource,
1178 ui.verbose and extsource,
1178 b'source',
1179 b'source',
1179 _(b' location: %s\n'),
1180 _(b' location: %s\n'),
1180 extsource or b"",
1181 extsource or b"",
1181 )
1182 )
1182
1183
1183 if ui.verbose:
1184 if ui.verbose:
1184 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1185 fm.data(bundled=isinternal)
1186 fm.data(bundled=isinternal)
1186
1187
1187 fm.condwrite(
1188 fm.condwrite(
1188 ui.verbose and exttestedwith,
1189 ui.verbose and exttestedwith,
1189 b'testedwith',
1190 b'testedwith',
1190 _(b' tested with: %s\n'),
1191 _(b' tested with: %s\n'),
1191 fm.formatlist(exttestedwith, name=b'ver'),
1192 fm.formatlist(exttestedwith, name=b'ver'),
1192 )
1193 )
1193
1194
1194 fm.condwrite(
1195 fm.condwrite(
1195 ui.verbose and extbuglink,
1196 ui.verbose and extbuglink,
1196 b'buglink',
1197 b'buglink',
1197 _(b' bug reporting: %s\n'),
1198 _(b' bug reporting: %s\n'),
1198 extbuglink or b"",
1199 extbuglink or b"",
1199 )
1200 )
1200
1201
1201 fm.end()
1202 fm.end()
1202
1203
1203
1204
1204 @command(
1205 @command(
1205 b'debugfileset',
1206 b'debugfileset',
1206 [
1207 [
1207 (
1208 (
1208 b'r',
1209 b'r',
1209 b'rev',
1210 b'rev',
1210 b'',
1211 b'',
1211 _(b'apply the filespec on this revision'),
1212 _(b'apply the filespec on this revision'),
1212 _(b'REV'),
1213 _(b'REV'),
1213 ),
1214 ),
1214 (
1215 (
1215 b'',
1216 b'',
1216 b'all-files',
1217 b'all-files',
1217 False,
1218 False,
1218 _(b'test files from all revisions and working directory'),
1219 _(b'test files from all revisions and working directory'),
1219 ),
1220 ),
1220 (
1221 (
1221 b's',
1222 b's',
1222 b'show-matcher',
1223 b'show-matcher',
1223 None,
1224 None,
1224 _(b'print internal representation of matcher'),
1225 _(b'print internal representation of matcher'),
1225 ),
1226 ),
1226 (
1227 (
1227 b'p',
1228 b'p',
1228 b'show-stage',
1229 b'show-stage',
1229 [],
1230 [],
1230 _(b'print parsed tree at the given stage'),
1231 _(b'print parsed tree at the given stage'),
1231 _(b'NAME'),
1232 _(b'NAME'),
1232 ),
1233 ),
1233 ],
1234 ],
1234 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1235 )
1236 )
1236 def debugfileset(ui, repo, expr, **opts):
1237 def debugfileset(ui, repo, expr, **opts):
1237 '''parse and apply a fileset specification'''
1238 '''parse and apply a fileset specification'''
1238 from . import fileset
1239 from . import fileset
1239
1240
1240 fileset.symbols # force import of fileset so we have predicates to optimize
1241 fileset.symbols # force import of fileset so we have predicates to optimize
1241 opts = pycompat.byteskwargs(opts)
1242 opts = pycompat.byteskwargs(opts)
1242 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1243
1244
1244 stages = [
1245 stages = [
1245 (b'parsed', pycompat.identity),
1246 (b'parsed', pycompat.identity),
1246 (b'analyzed', filesetlang.analyze),
1247 (b'analyzed', filesetlang.analyze),
1247 (b'optimized', filesetlang.optimize),
1248 (b'optimized', filesetlang.optimize),
1248 ]
1249 ]
1249 stagenames = {n for n, f in stages}
1250 stagenames = {n for n, f in stages}
1250
1251
1251 showalways = set()
1252 showalways = set()
1252 if ui.verbose and not opts[b'show_stage']:
1253 if ui.verbose and not opts[b'show_stage']:
1253 # show parsed tree by --verbose (deprecated)
1254 # show parsed tree by --verbose (deprecated)
1254 showalways.add(b'parsed')
1255 showalways.add(b'parsed')
1255 if opts[b'show_stage'] == [b'all']:
1256 if opts[b'show_stage'] == [b'all']:
1256 showalways.update(stagenames)
1257 showalways.update(stagenames)
1257 else:
1258 else:
1258 for n in opts[b'show_stage']:
1259 for n in opts[b'show_stage']:
1259 if n not in stagenames:
1260 if n not in stagenames:
1260 raise error.Abort(_(b'invalid stage name: %s') % n)
1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1261 showalways.update(opts[b'show_stage'])
1262 showalways.update(opts[b'show_stage'])
1262
1263
1263 tree = filesetlang.parse(expr)
1264 tree = filesetlang.parse(expr)
1264 for n, f in stages:
1265 for n, f in stages:
1265 tree = f(tree)
1266 tree = f(tree)
1266 if n in showalways:
1267 if n in showalways:
1267 if opts[b'show_stage'] or n != b'parsed':
1268 if opts[b'show_stage'] or n != b'parsed':
1268 ui.write(b"* %s:\n" % n)
1269 ui.write(b"* %s:\n" % n)
1269 ui.write(filesetlang.prettyformat(tree), b"\n")
1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1270
1271
1271 files = set()
1272 files = set()
1272 if opts[b'all_files']:
1273 if opts[b'all_files']:
1273 for r in repo:
1274 for r in repo:
1274 c = repo[r]
1275 c = repo[r]
1275 files.update(c.files())
1276 files.update(c.files())
1276 files.update(c.substate)
1277 files.update(c.substate)
1277 if opts[b'all_files'] or ctx.rev() is None:
1278 if opts[b'all_files'] or ctx.rev() is None:
1278 wctx = repo[None]
1279 wctx = repo[None]
1279 files.update(
1280 files.update(
1280 repo.dirstate.walk(
1281 repo.dirstate.walk(
1281 scmutil.matchall(repo),
1282 scmutil.matchall(repo),
1282 subrepos=list(wctx.substate),
1283 subrepos=list(wctx.substate),
1283 unknown=True,
1284 unknown=True,
1284 ignored=True,
1285 ignored=True,
1285 )
1286 )
1286 )
1287 )
1287 files.update(wctx.substate)
1288 files.update(wctx.substate)
1288 else:
1289 else:
1289 files.update(ctx.files())
1290 files.update(ctx.files())
1290 files.update(ctx.substate)
1291 files.update(ctx.substate)
1291
1292
1292 m = ctx.matchfileset(repo.getcwd(), expr)
1293 m = ctx.matchfileset(repo.getcwd(), expr)
1293 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1294 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1295 for f in sorted(files):
1296 for f in sorted(files):
1296 if not m(f):
1297 if not m(f):
1297 continue
1298 continue
1298 ui.write(b"%s\n" % f)
1299 ui.write(b"%s\n" % f)
1299
1300
1300
1301
1301 @command(b'debugformat', [] + cmdutil.formatteropts)
1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1302 def debugformat(ui, repo, **opts):
1303 def debugformat(ui, repo, **opts):
1303 """display format information about the current repository
1304 """display format information about the current repository
1304
1305
1305 Use --verbose to get extra information about current config value and
1306 Use --verbose to get extra information about current config value and
1306 Mercurial default."""
1307 Mercurial default."""
1307 opts = pycompat.byteskwargs(opts)
1308 opts = pycompat.byteskwargs(opts)
1308 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1309 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1310
1311
1311 def makeformatname(name):
1312 def makeformatname(name):
1312 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1313
1314
1314 fm = ui.formatter(b'debugformat', opts)
1315 fm = ui.formatter(b'debugformat', opts)
1315 if fm.isplain():
1316 if fm.isplain():
1316
1317
1317 def formatvalue(value):
1318 def formatvalue(value):
1318 if util.safehasattr(value, b'startswith'):
1319 if util.safehasattr(value, b'startswith'):
1319 return value
1320 return value
1320 if value:
1321 if value:
1321 return b'yes'
1322 return b'yes'
1322 else:
1323 else:
1323 return b'no'
1324 return b'no'
1324
1325
1325 else:
1326 else:
1326 formatvalue = pycompat.identity
1327 formatvalue = pycompat.identity
1327
1328
1328 fm.plain(b'format-variant')
1329 fm.plain(b'format-variant')
1329 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1330 fm.plain(b' repo')
1331 fm.plain(b' repo')
1331 if ui.verbose:
1332 if ui.verbose:
1332 fm.plain(b' config default')
1333 fm.plain(b' config default')
1333 fm.plain(b'\n')
1334 fm.plain(b'\n')
1334 for fv in upgrade.allformatvariant:
1335 for fv in upgrade.allformatvariant:
1335 fm.startitem()
1336 fm.startitem()
1336 repovalue = fv.fromrepo(repo)
1337 repovalue = fv.fromrepo(repo)
1337 configvalue = fv.fromconfig(repo)
1338 configvalue = fv.fromconfig(repo)
1338
1339
1339 if repovalue != configvalue:
1340 if repovalue != configvalue:
1340 namelabel = b'formatvariant.name.mismatchconfig'
1341 namelabel = b'formatvariant.name.mismatchconfig'
1341 repolabel = b'formatvariant.repo.mismatchconfig'
1342 repolabel = b'formatvariant.repo.mismatchconfig'
1342 elif repovalue != fv.default:
1343 elif repovalue != fv.default:
1343 namelabel = b'formatvariant.name.mismatchdefault'
1344 namelabel = b'formatvariant.name.mismatchdefault'
1344 repolabel = b'formatvariant.repo.mismatchdefault'
1345 repolabel = b'formatvariant.repo.mismatchdefault'
1345 else:
1346 else:
1346 namelabel = b'formatvariant.name.uptodate'
1347 namelabel = b'formatvariant.name.uptodate'
1347 repolabel = b'formatvariant.repo.uptodate'
1348 repolabel = b'formatvariant.repo.uptodate'
1348
1349
1349 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1350 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1351 if fv.default != configvalue:
1352 if fv.default != configvalue:
1352 configlabel = b'formatvariant.config.special'
1353 configlabel = b'formatvariant.config.special'
1353 else:
1354 else:
1354 configlabel = b'formatvariant.config.default'
1355 configlabel = b'formatvariant.config.default'
1355 fm.condwrite(
1356 fm.condwrite(
1356 ui.verbose,
1357 ui.verbose,
1357 b'config',
1358 b'config',
1358 b' %6s',
1359 b' %6s',
1359 formatvalue(configvalue),
1360 formatvalue(configvalue),
1360 label=configlabel,
1361 label=configlabel,
1361 )
1362 )
1362 fm.condwrite(
1363 fm.condwrite(
1363 ui.verbose,
1364 ui.verbose,
1364 b'default',
1365 b'default',
1365 b' %7s',
1366 b' %7s',
1366 formatvalue(fv.default),
1367 formatvalue(fv.default),
1367 label=b'formatvariant.default',
1368 label=b'formatvariant.default',
1368 )
1369 )
1369 fm.plain(b'\n')
1370 fm.plain(b'\n')
1370 fm.end()
1371 fm.end()
1371
1372
1372
1373
1373 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1374 def debugfsinfo(ui, path=b"."):
1375 def debugfsinfo(ui, path=b"."):
1375 """show information detected about current filesystem"""
1376 """show information detected about current filesystem"""
1376 ui.writenoi18n(b'path: %s\n' % path)
1377 ui.writenoi18n(b'path: %s\n' % path)
1377 ui.writenoi18n(
1378 ui.writenoi18n(
1378 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1379 )
1380 )
1380 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1381 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1382 ui.writenoi18n(
1383 ui.writenoi18n(
1383 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1384 )
1385 )
1385 ui.writenoi18n(
1386 ui.writenoi18n(
1386 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1387 )
1388 )
1388 casesensitive = b'(unknown)'
1389 casesensitive = b'(unknown)'
1389 try:
1390 try:
1390 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1391 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1392 except OSError:
1393 except OSError:
1393 pass
1394 pass
1394 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1395
1396
1396
1397
1397 @command(
1398 @command(
1398 b'debuggetbundle',
1399 b'debuggetbundle',
1399 [
1400 [
1400 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1401 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1402 (
1403 (
1403 b't',
1404 b't',
1404 b'type',
1405 b'type',
1405 b'bzip2',
1406 b'bzip2',
1406 _(b'bundle compression type to use'),
1407 _(b'bundle compression type to use'),
1407 _(b'TYPE'),
1408 _(b'TYPE'),
1408 ),
1409 ),
1409 ],
1410 ],
1410 _(b'REPO FILE [-H|-C ID]...'),
1411 _(b'REPO FILE [-H|-C ID]...'),
1411 norepo=True,
1412 norepo=True,
1412 )
1413 )
1413 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1414 """retrieves a bundle from a repo
1415 """retrieves a bundle from a repo
1415
1416
1416 Every ID must be a full-length hex node id string. Saves the bundle to the
1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1417 given file.
1418 given file.
1418 """
1419 """
1419 opts = pycompat.byteskwargs(opts)
1420 opts = pycompat.byteskwargs(opts)
1420 repo = hg.peer(ui, opts, repopath)
1421 repo = hg.peer(ui, opts, repopath)
1421 if not repo.capable(b'getbundle'):
1422 if not repo.capable(b'getbundle'):
1422 raise error.Abort(b"getbundle() not supported by target repository")
1423 raise error.Abort(b"getbundle() not supported by target repository")
1423 args = {}
1424 args = {}
1424 if common:
1425 if common:
1425 args['common'] = [bin(s) for s in common]
1426 args['common'] = [bin(s) for s in common]
1426 if head:
1427 if head:
1427 args['heads'] = [bin(s) for s in head]
1428 args['heads'] = [bin(s) for s in head]
1428 # TODO: get desired bundlecaps from command line.
1429 # TODO: get desired bundlecaps from command line.
1429 args['bundlecaps'] = None
1430 args['bundlecaps'] = None
1430 bundle = repo.getbundle(b'debug', **args)
1431 bundle = repo.getbundle(b'debug', **args)
1431
1432
1432 bundletype = opts.get(b'type', b'bzip2').lower()
1433 bundletype = opts.get(b'type', b'bzip2').lower()
1433 btypes = {
1434 btypes = {
1434 b'none': b'HG10UN',
1435 b'none': b'HG10UN',
1435 b'bzip2': b'HG10BZ',
1436 b'bzip2': b'HG10BZ',
1436 b'gzip': b'HG10GZ',
1437 b'gzip': b'HG10GZ',
1437 b'bundle2': b'HG20',
1438 b'bundle2': b'HG20',
1438 }
1439 }
1439 bundletype = btypes.get(bundletype)
1440 bundletype = btypes.get(bundletype)
1440 if bundletype not in bundle2.bundletypes:
1441 if bundletype not in bundle2.bundletypes:
1441 raise error.Abort(_(b'unknown bundle type specified with --type'))
1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1442 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1443
1444
1444
1445
1445 @command(b'debugignore', [], b'[FILE]')
1446 @command(b'debugignore', [], b'[FILE]')
1446 def debugignore(ui, repo, *files, **opts):
1447 def debugignore(ui, repo, *files, **opts):
1447 """display the combined ignore pattern and information about ignored files
1448 """display the combined ignore pattern and information about ignored files
1448
1449
1449 With no argument display the combined ignore pattern.
1450 With no argument display the combined ignore pattern.
1450
1451
1451 Given space separated file names, shows if the given file is ignored and
1452 Given space separated file names, shows if the given file is ignored and
1452 if so, show the ignore rule (file and line number) that matched it.
1453 if so, show the ignore rule (file and line number) that matched it.
1453 """
1454 """
1454 ignore = repo.dirstate._ignore
1455 ignore = repo.dirstate._ignore
1455 if not files:
1456 if not files:
1456 # Show all the patterns
1457 # Show all the patterns
1457 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1458 else:
1459 else:
1459 m = scmutil.match(repo[None], pats=files)
1460 m = scmutil.match(repo[None], pats=files)
1460 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1461 for f in m.files():
1462 for f in m.files():
1462 nf = util.normpath(f)
1463 nf = util.normpath(f)
1463 ignored = None
1464 ignored = None
1464 ignoredata = None
1465 ignoredata = None
1465 if nf != b'.':
1466 if nf != b'.':
1466 if ignore(nf):
1467 if ignore(nf):
1467 ignored = nf
1468 ignored = nf
1468 ignoredata = repo.dirstate._ignorefileandline(nf)
1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1469 else:
1470 else:
1470 for p in pathutil.finddirs(nf):
1471 for p in pathutil.finddirs(nf):
1471 if ignore(p):
1472 if ignore(p):
1472 ignored = p
1473 ignored = p
1473 ignoredata = repo.dirstate._ignorefileandline(p)
1474 ignoredata = repo.dirstate._ignorefileandline(p)
1474 break
1475 break
1475 if ignored:
1476 if ignored:
1476 if ignored == nf:
1477 if ignored == nf:
1477 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1478 else:
1479 else:
1479 ui.write(
1480 ui.write(
1480 _(
1481 _(
1481 b"%s is ignored because of "
1482 b"%s is ignored because of "
1482 b"containing directory %s\n"
1483 b"containing directory %s\n"
1483 )
1484 )
1484 % (uipathfn(f), ignored)
1485 % (uipathfn(f), ignored)
1485 )
1486 )
1486 ignorefile, lineno, line = ignoredata
1487 ignorefile, lineno, line = ignoredata
1487 ui.write(
1488 ui.write(
1488 _(b"(ignore rule in %s, line %d: '%s')\n")
1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1489 % (ignorefile, lineno, line)
1490 % (ignorefile, lineno, line)
1490 )
1491 )
1491 else:
1492 else:
1492 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1493
1494
1494
1495
1495 @command(
1496 @command(
1496 b'debugindex',
1497 b'debugindex',
1497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1498 _(b'-c|-m|FILE'),
1499 _(b'-c|-m|FILE'),
1499 )
1500 )
1500 def debugindex(ui, repo, file_=None, **opts):
1501 def debugindex(ui, repo, file_=None, **opts):
1501 """dump index data for a storage primitive"""
1502 """dump index data for a storage primitive"""
1502 opts = pycompat.byteskwargs(opts)
1503 opts = pycompat.byteskwargs(opts)
1503 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1504
1505
1505 if ui.debugflag:
1506 if ui.debugflag:
1506 shortfn = hex
1507 shortfn = hex
1507 else:
1508 else:
1508 shortfn = short
1509 shortfn = short
1509
1510
1510 idlen = 12
1511 idlen = 12
1511 for i in store:
1512 for i in store:
1512 idlen = len(shortfn(store.node(i)))
1513 idlen = len(shortfn(store.node(i)))
1513 break
1514 break
1514
1515
1515 fm = ui.formatter(b'debugindex', opts)
1516 fm = ui.formatter(b'debugindex', opts)
1516 fm.plain(
1517 fm.plain(
1517 b' rev linkrev %s %s p2\n'
1518 b' rev linkrev %s %s p2\n'
1518 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1519 )
1520 )
1520
1521
1521 for rev in store:
1522 for rev in store:
1522 node = store.node(rev)
1523 node = store.node(rev)
1523 parents = store.parents(node)
1524 parents = store.parents(node)
1524
1525
1525 fm.startitem()
1526 fm.startitem()
1526 fm.write(b'rev', b'%6d ', rev)
1527 fm.write(b'rev', b'%6d ', rev)
1527 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1528 fm.write(b'node', b'%s ', shortfn(node))
1529 fm.write(b'node', b'%s ', shortfn(node))
1529 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1530 fm.write(b'p2', b'%s', shortfn(parents[1]))
1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1531 fm.plain(b'\n')
1532 fm.plain(b'\n')
1532
1533
1533 fm.end()
1534 fm.end()
1534
1535
1535
1536
1536 @command(
1537 @command(
1537 b'debugindexdot',
1538 b'debugindexdot',
1538 cmdutil.debugrevlogopts,
1539 cmdutil.debugrevlogopts,
1539 _(b'-c|-m|FILE'),
1540 _(b'-c|-m|FILE'),
1540 optionalrepo=True,
1541 optionalrepo=True,
1541 )
1542 )
1542 def debugindexdot(ui, repo, file_=None, **opts):
1543 def debugindexdot(ui, repo, file_=None, **opts):
1543 """dump an index DAG as a graphviz dot file"""
1544 """dump an index DAG as a graphviz dot file"""
1544 opts = pycompat.byteskwargs(opts)
1545 opts = pycompat.byteskwargs(opts)
1545 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1546 ui.writenoi18n(b"digraph G {\n")
1547 ui.writenoi18n(b"digraph G {\n")
1547 for i in r:
1548 for i in r:
1548 node = r.node(i)
1549 node = r.node(i)
1549 pp = r.parents(node)
1550 pp = r.parents(node)
1550 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1551 if pp[1] != nullid:
1552 if pp[1] != nullid:
1552 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1553 ui.write(b"}\n")
1554 ui.write(b"}\n")
1554
1555
1555
1556
1556 @command(b'debugindexstats', [])
1557 @command(b'debugindexstats', [])
1557 def debugindexstats(ui, repo):
1558 def debugindexstats(ui, repo):
1558 """show stats related to the changelog index"""
1559 """show stats related to the changelog index"""
1559 repo.changelog.shortest(nullid, 1)
1560 repo.changelog.shortest(nullid, 1)
1560 index = repo.changelog.index
1561 index = repo.changelog.index
1561 if not util.safehasattr(index, b'stats'):
1562 if not util.safehasattr(index, b'stats'):
1562 raise error.Abort(_(b'debugindexstats only works with native code'))
1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1563 for k, v in sorted(index.stats().items()):
1564 for k, v in sorted(index.stats().items()):
1564 ui.write(b'%s: %d\n' % (k, v))
1565 ui.write(b'%s: %d\n' % (k, v))
1565
1566
1566
1567
1567 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1568 def debuginstall(ui, **opts):
1569 def debuginstall(ui, **opts):
1569 """test Mercurial installation
1570 """test Mercurial installation
1570
1571
1571 Returns 0 on success.
1572 Returns 0 on success.
1572 """
1573 """
1573 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1574
1575
1575 problems = 0
1576 problems = 0
1576
1577
1577 fm = ui.formatter(b'debuginstall', opts)
1578 fm = ui.formatter(b'debuginstall', opts)
1578 fm.startitem()
1579 fm.startitem()
1579
1580
1580 # encoding might be unknown or wrong. don't translate these messages.
1581 # encoding might be unknown or wrong. don't translate these messages.
1581 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1582 err = None
1583 err = None
1583 try:
1584 try:
1584 codecs.lookup(pycompat.sysstr(encoding.encoding))
1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1585 except LookupError as inst:
1586 except LookupError as inst:
1586 err = stringutil.forcebytestr(inst)
1587 err = stringutil.forcebytestr(inst)
1587 problems += 1
1588 problems += 1
1588 fm.condwrite(
1589 fm.condwrite(
1589 err,
1590 err,
1590 b'encodingerror',
1591 b'encodingerror',
1591 b" %s\n (check that your locale is properly set)\n",
1592 b" %s\n (check that your locale is properly set)\n",
1592 err,
1593 err,
1593 )
1594 )
1594
1595
1595 # Python
1596 # Python
1596 pythonlib = None
1597 pythonlib = None
1597 if util.safehasattr(os, '__file__'):
1598 if util.safehasattr(os, '__file__'):
1598 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1599 elif getattr(sys, 'oxidized', False):
1600 elif getattr(sys, 'oxidized', False):
1600 pythonlib = pycompat.sysexecutable
1601 pythonlib = pycompat.sysexecutable
1601
1602
1602 fm.write(
1603 fm.write(
1603 b'pythonexe',
1604 b'pythonexe',
1604 _(b"checking Python executable (%s)\n"),
1605 _(b"checking Python executable (%s)\n"),
1605 pycompat.sysexecutable or _(b"unknown"),
1606 pycompat.sysexecutable or _(b"unknown"),
1606 )
1607 )
1607 fm.write(
1608 fm.write(
1608 b'pythonimplementation',
1609 b'pythonimplementation',
1609 _(b"checking Python implementation (%s)\n"),
1610 _(b"checking Python implementation (%s)\n"),
1610 pycompat.sysbytes(platform.python_implementation()),
1611 pycompat.sysbytes(platform.python_implementation()),
1611 )
1612 )
1612 fm.write(
1613 fm.write(
1613 b'pythonver',
1614 b'pythonver',
1614 _(b"checking Python version (%s)\n"),
1615 _(b"checking Python version (%s)\n"),
1615 (b"%d.%d.%d" % sys.version_info[:3]),
1616 (b"%d.%d.%d" % sys.version_info[:3]),
1616 )
1617 )
1617 fm.write(
1618 fm.write(
1618 b'pythonlib',
1619 b'pythonlib',
1619 _(b"checking Python lib (%s)...\n"),
1620 _(b"checking Python lib (%s)...\n"),
1620 pythonlib or _(b"unknown"),
1621 pythonlib or _(b"unknown"),
1621 )
1622 )
1622
1623
1623 try:
1624 try:
1624 from . import rustext
1625 from . import rustext
1625
1626
1626 rustext.__doc__ # trigger lazy import
1627 rustext.__doc__ # trigger lazy import
1627 except ImportError:
1628 except ImportError:
1628 rustext = None
1629 rustext = None
1629
1630
1630 security = set(sslutil.supportedprotocols)
1631 security = set(sslutil.supportedprotocols)
1631 if sslutil.hassni:
1632 if sslutil.hassni:
1632 security.add(b'sni')
1633 security.add(b'sni')
1633
1634
1634 fm.write(
1635 fm.write(
1635 b'pythonsecurity',
1636 b'pythonsecurity',
1636 _(b"checking Python security support (%s)\n"),
1637 _(b"checking Python security support (%s)\n"),
1637 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1638 )
1639 )
1639
1640
1640 # These are warnings, not errors. So don't increment problem count. This
1641 # These are warnings, not errors. So don't increment problem count. This
1641 # may change in the future.
1642 # may change in the future.
1642 if b'tls1.2' not in security:
1643 if b'tls1.2' not in security:
1643 fm.plain(
1644 fm.plain(
1644 _(
1645 _(
1645 b' TLS 1.2 not supported by Python install; '
1646 b' TLS 1.2 not supported by Python install; '
1646 b'network connections lack modern security\n'
1647 b'network connections lack modern security\n'
1647 )
1648 )
1648 )
1649 )
1649 if b'sni' not in security:
1650 if b'sni' not in security:
1650 fm.plain(
1651 fm.plain(
1651 _(
1652 _(
1652 b' SNI not supported by Python install; may have '
1653 b' SNI not supported by Python install; may have '
1653 b'connectivity issues with some servers\n'
1654 b'connectivity issues with some servers\n'
1654 )
1655 )
1655 )
1656 )
1656
1657
1657 fm.plain(
1658 fm.plain(
1658 _(
1659 _(
1659 b"checking Rust extensions (%s)\n"
1660 b"checking Rust extensions (%s)\n"
1660 % (b'missing' if rustext is None else b'installed')
1661 % (b'missing' if rustext is None else b'installed')
1661 ),
1662 ),
1662 )
1663 )
1663
1664
1664 # TODO print CA cert info
1665 # TODO print CA cert info
1665
1666
1666 # hg version
1667 # hg version
1667 hgver = util.version()
1668 hgver = util.version()
1668 fm.write(
1669 fm.write(
1669 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1670 )
1671 )
1671 fm.write(
1672 fm.write(
1672 b'hgverextra',
1673 b'hgverextra',
1673 _(b"checking Mercurial custom build (%s)\n"),
1674 _(b"checking Mercurial custom build (%s)\n"),
1674 b'+'.join(hgver.split(b'+')[1:]),
1675 b'+'.join(hgver.split(b'+')[1:]),
1675 )
1676 )
1676
1677
1677 # compiled modules
1678 # compiled modules
1678 hgmodules = None
1679 hgmodules = None
1679 if util.safehasattr(sys.modules[__name__], '__file__'):
1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1680 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1681 elif getattr(sys, 'oxidized', False):
1682 elif getattr(sys, 'oxidized', False):
1682 hgmodules = pycompat.sysexecutable
1683 hgmodules = pycompat.sysexecutable
1683
1684
1684 fm.write(
1685 fm.write(
1685 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1686 )
1687 )
1687 fm.write(
1688 fm.write(
1688 b'hgmodules',
1689 b'hgmodules',
1689 _(b"checking installed modules (%s)...\n"),
1690 _(b"checking installed modules (%s)...\n"),
1690 hgmodules or _(b"unknown"),
1691 hgmodules or _(b"unknown"),
1691 )
1692 )
1692
1693
1693 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1694 rustext = rustandc # for now, that's the only case
1695 rustext = rustandc # for now, that's the only case
1695 cext = policy.policy in (b'c', b'allow') or rustandc
1696 cext = policy.policy in (b'c', b'allow') or rustandc
1696 nopure = cext or rustext
1697 nopure = cext or rustext
1697 if nopure:
1698 if nopure:
1698 err = None
1699 err = None
1699 try:
1700 try:
1700 if cext:
1701 if cext:
1701 from .cext import ( # pytype: disable=import-error
1702 from .cext import ( # pytype: disable=import-error
1702 base85,
1703 base85,
1703 bdiff,
1704 bdiff,
1704 mpatch,
1705 mpatch,
1705 osutil,
1706 osutil,
1706 )
1707 )
1707
1708
1708 # quiet pyflakes
1709 # quiet pyflakes
1709 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1710 if rustext:
1711 if rustext:
1711 from .rustext import ( # pytype: disable=import-error
1712 from .rustext import ( # pytype: disable=import-error
1712 ancestor,
1713 ancestor,
1713 dirstate,
1714 dirstate,
1714 )
1715 )
1715
1716
1716 dir(ancestor), dir(dirstate) # quiet pyflakes
1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1717 except Exception as inst:
1718 except Exception as inst:
1718 err = stringutil.forcebytestr(inst)
1719 err = stringutil.forcebytestr(inst)
1719 problems += 1
1720 problems += 1
1720 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1721
1722
1722 compengines = util.compengines._engines.values()
1723 compengines = util.compengines._engines.values()
1723 fm.write(
1724 fm.write(
1724 b'compengines',
1725 b'compengines',
1725 _(b'checking registered compression engines (%s)\n'),
1726 _(b'checking registered compression engines (%s)\n'),
1726 fm.formatlist(
1727 fm.formatlist(
1727 sorted(e.name() for e in compengines),
1728 sorted(e.name() for e in compengines),
1728 name=b'compengine',
1729 name=b'compengine',
1729 fmt=b'%s',
1730 fmt=b'%s',
1730 sep=b', ',
1731 sep=b', ',
1731 ),
1732 ),
1732 )
1733 )
1733 fm.write(
1734 fm.write(
1734 b'compenginesavail',
1735 b'compenginesavail',
1735 _(b'checking available compression engines (%s)\n'),
1736 _(b'checking available compression engines (%s)\n'),
1736 fm.formatlist(
1737 fm.formatlist(
1737 sorted(e.name() for e in compengines if e.available()),
1738 sorted(e.name() for e in compengines if e.available()),
1738 name=b'compengine',
1739 name=b'compengine',
1739 fmt=b'%s',
1740 fmt=b'%s',
1740 sep=b', ',
1741 sep=b', ',
1741 ),
1742 ),
1742 )
1743 )
1743 wirecompengines = compression.compengines.supportedwireengines(
1744 wirecompengines = compression.compengines.supportedwireengines(
1744 compression.SERVERROLE
1745 compression.SERVERROLE
1745 )
1746 )
1746 fm.write(
1747 fm.write(
1747 b'compenginesserver',
1748 b'compenginesserver',
1748 _(
1749 _(
1749 b'checking available compression engines '
1750 b'checking available compression engines '
1750 b'for wire protocol (%s)\n'
1751 b'for wire protocol (%s)\n'
1751 ),
1752 ),
1752 fm.formatlist(
1753 fm.formatlist(
1753 [e.name() for e in wirecompengines if e.wireprotosupport()],
1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1754 name=b'compengine',
1755 name=b'compengine',
1755 fmt=b'%s',
1756 fmt=b'%s',
1756 sep=b', ',
1757 sep=b', ',
1757 ),
1758 ),
1758 )
1759 )
1759 re2 = b'missing'
1760 re2 = b'missing'
1760 if util._re2:
1761 if util._re2:
1761 re2 = b'available'
1762 re2 = b'available'
1762 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1763 fm.data(re2=bool(util._re2))
1764 fm.data(re2=bool(util._re2))
1764
1765
1765 # templates
1766 # templates
1766 p = templater.templatedir()
1767 p = templater.templatedir()
1767 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1768 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1769 if p:
1770 if p:
1770 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1771 if m:
1772 if m:
1772 # template found, check if it is working
1773 # template found, check if it is working
1773 err = None
1774 err = None
1774 try:
1775 try:
1775 templater.templater.frommapfile(m)
1776 templater.templater.frommapfile(m)
1776 except Exception as inst:
1777 except Exception as inst:
1777 err = stringutil.forcebytestr(inst)
1778 err = stringutil.forcebytestr(inst)
1778 p = None
1779 p = None
1779 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1780 else:
1781 else:
1781 p = None
1782 p = None
1782 fm.condwrite(
1783 fm.condwrite(
1783 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1784 )
1785 )
1785 fm.condwrite(
1786 fm.condwrite(
1786 not m,
1787 not m,
1787 b'defaulttemplatenotfound',
1788 b'defaulttemplatenotfound',
1788 _(b" template '%s' not found\n"),
1789 _(b" template '%s' not found\n"),
1789 b"default",
1790 b"default",
1790 )
1791 )
1791 if not p:
1792 if not p:
1792 problems += 1
1793 problems += 1
1793 fm.condwrite(
1794 fm.condwrite(
1794 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1795 )
1796 )
1796
1797
1797 # editor
1798 # editor
1798 editor = ui.geteditor()
1799 editor = ui.geteditor()
1799 editor = util.expandpath(editor)
1800 editor = util.expandpath(editor)
1800 editorbin = procutil.shellsplit(editor)[0]
1801 editorbin = procutil.shellsplit(editor)[0]
1801 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1802 cmdpath = procutil.findexe(editorbin)
1803 cmdpath = procutil.findexe(editorbin)
1803 fm.condwrite(
1804 fm.condwrite(
1804 not cmdpath and editor == b'vi',
1805 not cmdpath and editor == b'vi',
1805 b'vinotfound',
1806 b'vinotfound',
1806 _(
1807 _(
1807 b" No commit editor set and can't find %s in PATH\n"
1808 b" No commit editor set and can't find %s in PATH\n"
1808 b" (specify a commit editor in your configuration"
1809 b" (specify a commit editor in your configuration"
1809 b" file)\n"
1810 b" file)\n"
1810 ),
1811 ),
1811 not cmdpath and editor == b'vi' and editorbin,
1812 not cmdpath and editor == b'vi' and editorbin,
1812 )
1813 )
1813 fm.condwrite(
1814 fm.condwrite(
1814 not cmdpath and editor != b'vi',
1815 not cmdpath and editor != b'vi',
1815 b'editornotfound',
1816 b'editornotfound',
1816 _(
1817 _(
1817 b" Can't find editor '%s' in PATH\n"
1818 b" Can't find editor '%s' in PATH\n"
1818 b" (specify a commit editor in your configuration"
1819 b" (specify a commit editor in your configuration"
1819 b" file)\n"
1820 b" file)\n"
1820 ),
1821 ),
1821 not cmdpath and editorbin,
1822 not cmdpath and editorbin,
1822 )
1823 )
1823 if not cmdpath and editor != b'vi':
1824 if not cmdpath and editor != b'vi':
1824 problems += 1
1825 problems += 1
1825
1826
1826 # check username
1827 # check username
1827 username = None
1828 username = None
1828 err = None
1829 err = None
1829 try:
1830 try:
1830 username = ui.username()
1831 username = ui.username()
1831 except error.Abort as e:
1832 except error.Abort as e:
1832 err = e.message
1833 err = e.message
1833 problems += 1
1834 problems += 1
1834
1835
1835 fm.condwrite(
1836 fm.condwrite(
1836 username, b'username', _(b"checking username (%s)\n"), username
1837 username, b'username', _(b"checking username (%s)\n"), username
1837 )
1838 )
1838 fm.condwrite(
1839 fm.condwrite(
1839 err,
1840 err,
1840 b'usernameerror',
1841 b'usernameerror',
1841 _(
1842 _(
1842 b"checking username...\n %s\n"
1843 b"checking username...\n %s\n"
1843 b" (specify a username in your configuration file)\n"
1844 b" (specify a username in your configuration file)\n"
1844 ),
1845 ),
1845 err,
1846 err,
1846 )
1847 )
1847
1848
1848 for name, mod in extensions.extensions():
1849 for name, mod in extensions.extensions():
1849 handler = getattr(mod, 'debuginstall', None)
1850 handler = getattr(mod, 'debuginstall', None)
1850 if handler is not None:
1851 if handler is not None:
1851 problems += handler(ui, fm)
1852 problems += handler(ui, fm)
1852
1853
1853 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1854 if not problems:
1855 if not problems:
1855 fm.data(problems=problems)
1856 fm.data(problems=problems)
1856 fm.condwrite(
1857 fm.condwrite(
1857 problems,
1858 problems,
1858 b'problems',
1859 b'problems',
1859 _(b"%d problems detected, please check your install!\n"),
1860 _(b"%d problems detected, please check your install!\n"),
1860 problems,
1861 problems,
1861 )
1862 )
1862 fm.end()
1863 fm.end()
1863
1864
1864 return problems
1865 return problems
1865
1866
1866
1867
1867 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1868 def debugknown(ui, repopath, *ids, **opts):
1869 def debugknown(ui, repopath, *ids, **opts):
1869 """test whether node ids are known to a repo
1870 """test whether node ids are known to a repo
1870
1871
1871 Every ID must be a full-length hex node id string. Returns a list of 0s
1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1872 and 1s indicating unknown/known.
1873 and 1s indicating unknown/known.
1873 """
1874 """
1874 opts = pycompat.byteskwargs(opts)
1875 opts = pycompat.byteskwargs(opts)
1875 repo = hg.peer(ui, opts, repopath)
1876 repo = hg.peer(ui, opts, repopath)
1876 if not repo.capable(b'known'):
1877 if not repo.capable(b'known'):
1877 raise error.Abort(b"known() not supported by target repository")
1878 raise error.Abort(b"known() not supported by target repository")
1878 flags = repo.known([bin(s) for s in ids])
1879 flags = repo.known([bin(s) for s in ids])
1879 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1880
1881
1881
1882
1882 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1883 def debuglabelcomplete(ui, repo, *args):
1884 def debuglabelcomplete(ui, repo, *args):
1884 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1885 debugnamecomplete(ui, repo, *args)
1886 debugnamecomplete(ui, repo, *args)
1886
1887
1887
1888
1888 @command(
1889 @command(
1889 b'debuglocks',
1890 b'debuglocks',
1890 [
1891 [
1891 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1892 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1892 (
1893 (
1893 b'W',
1894 b'W',
1894 b'force-wlock',
1895 b'force-wlock',
1895 None,
1896 None,
1896 _(b'free the working state lock (DANGEROUS)'),
1897 _(b'free the working state lock (DANGEROUS)'),
1897 ),
1898 ),
1898 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1899 (
1900 (
1900 b'S',
1901 b'S',
1901 b'set-wlock',
1902 b'set-wlock',
1902 None,
1903 None,
1903 _(b'set the working state lock until stopped'),
1904 _(b'set the working state lock until stopped'),
1904 ),
1905 ),
1905 ],
1906 ],
1906 _(b'[OPTION]...'),
1907 _(b'[OPTION]...'),
1907 )
1908 )
1908 def debuglocks(ui, repo, **opts):
1909 def debuglocks(ui, repo, **opts):
1909 """show or modify state of locks
1910 """show or modify state of locks
1910
1911
1911 By default, this command will show which locks are held. This
1912 By default, this command will show which locks are held. This
1912 includes the user and process holding the lock, the amount of time
1913 includes the user and process holding the lock, the amount of time
1913 the lock has been held, and the machine name where the process is
1914 the lock has been held, and the machine name where the process is
1914 running if it's not local.
1915 running if it's not local.
1915
1916
1916 Locks protect the integrity of Mercurial's data, so should be
1917 Locks protect the integrity of Mercurial's data, so should be
1917 treated with care. System crashes or other interruptions may cause
1918 treated with care. System crashes or other interruptions may cause
1918 locks to not be properly released, though Mercurial will usually
1919 locks to not be properly released, though Mercurial will usually
1919 detect and remove such stale locks automatically.
1920 detect and remove such stale locks automatically.
1920
1921
1921 However, detecting stale locks may not always be possible (for
1922 However, detecting stale locks may not always be possible (for
1922 instance, on a shared filesystem). Removing locks may also be
1923 instance, on a shared filesystem). Removing locks may also be
1923 blocked by filesystem permissions.
1924 blocked by filesystem permissions.
1924
1925
1925 Setting a lock will prevent other commands from changing the data.
1926 Setting a lock will prevent other commands from changing the data.
1926 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1927 The set locks are removed when the command exits.
1928 The set locks are removed when the command exits.
1928
1929
1929 Returns 0 if no locks are held.
1930 Returns 0 if no locks are held.
1930
1931
1931 """
1932 """
1932
1933
1933 if opts.get('force_lock'):
1934 if opts.get('force_lock'):
1934 repo.svfs.unlink(b'lock')
1935 repo.svfs.unlink(b'lock')
1935 if opts.get('force_wlock'):
1936 if opts.get('force_wlock'):
1936 repo.vfs.unlink(b'wlock')
1937 repo.vfs.unlink(b'wlock')
1937 if opts.get('force_lock') or opts.get('force_wlock'):
1938 if opts.get('force_lock') or opts.get('force_wlock'):
1938 return 0
1939 return 0
1939
1940
1940 locks = []
1941 locks = []
1941 try:
1942 try:
1942 if opts.get('set_wlock'):
1943 if opts.get('set_wlock'):
1943 try:
1944 try:
1944 locks.append(repo.wlock(False))
1945 locks.append(repo.wlock(False))
1945 except error.LockHeld:
1946 except error.LockHeld:
1946 raise error.Abort(_(b'wlock is already held'))
1947 raise error.Abort(_(b'wlock is already held'))
1947 if opts.get('set_lock'):
1948 if opts.get('set_lock'):
1948 try:
1949 try:
1949 locks.append(repo.lock(False))
1950 locks.append(repo.lock(False))
1950 except error.LockHeld:
1951 except error.LockHeld:
1951 raise error.Abort(_(b'lock is already held'))
1952 raise error.Abort(_(b'lock is already held'))
1952 if len(locks):
1953 if len(locks):
1953 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1954 return 0
1955 return 0
1955 finally:
1956 finally:
1956 release(*locks)
1957 release(*locks)
1957
1958
1958 now = time.time()
1959 now = time.time()
1959 held = 0
1960 held = 0
1960
1961
1961 def report(vfs, name, method):
1962 def report(vfs, name, method):
1962 # this causes stale locks to get reaped for more accurate reporting
1963 # this causes stale locks to get reaped for more accurate reporting
1963 try:
1964 try:
1964 l = method(False)
1965 l = method(False)
1965 except error.LockHeld:
1966 except error.LockHeld:
1966 l = None
1967 l = None
1967
1968
1968 if l:
1969 if l:
1969 l.release()
1970 l.release()
1970 else:
1971 else:
1971 try:
1972 try:
1972 st = vfs.lstat(name)
1973 st = vfs.lstat(name)
1973 age = now - st[stat.ST_MTIME]
1974 age = now - st[stat.ST_MTIME]
1974 user = util.username(st.st_uid)
1975 user = util.username(st.st_uid)
1975 locker = vfs.readlock(name)
1976 locker = vfs.readlock(name)
1976 if b":" in locker:
1977 if b":" in locker:
1977 host, pid = locker.split(b':')
1978 host, pid = locker.split(b':')
1978 if host == socket.gethostname():
1979 if host == socket.gethostname():
1979 locker = b'user %s, process %s' % (user or b'None', pid)
1980 locker = b'user %s, process %s' % (user or b'None', pid)
1980 else:
1981 else:
1981 locker = b'user %s, process %s, host %s' % (
1982 locker = b'user %s, process %s, host %s' % (
1982 user or b'None',
1983 user or b'None',
1983 pid,
1984 pid,
1984 host,
1985 host,
1985 )
1986 )
1986 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1987 return 1
1988 return 1
1988 except OSError as e:
1989 except OSError as e:
1989 if e.errno != errno.ENOENT:
1990 if e.errno != errno.ENOENT:
1990 raise
1991 raise
1991
1992
1992 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1993 return 0
1994 return 0
1994
1995
1995 held += report(repo.svfs, b"lock", repo.lock)
1996 held += report(repo.svfs, b"lock", repo.lock)
1996 held += report(repo.vfs, b"wlock", repo.wlock)
1997 held += report(repo.vfs, b"wlock", repo.wlock)
1997
1998
1998 return held
1999 return held
1999
2000
2000
2001
2001 @command(
2002 @command(
2002 b'debugmanifestfulltextcache',
2003 b'debugmanifestfulltextcache',
2003 [
2004 [
2004 (b'', b'clear', False, _(b'clear the cache')),
2005 (b'', b'clear', False, _(b'clear the cache')),
2005 (
2006 (
2006 b'a',
2007 b'a',
2007 b'add',
2008 b'add',
2008 [],
2009 [],
2009 _(b'add the given manifest nodes to the cache'),
2010 _(b'add the given manifest nodes to the cache'),
2010 _(b'NODE'),
2011 _(b'NODE'),
2011 ),
2012 ),
2012 ],
2013 ],
2013 b'',
2014 b'',
2014 )
2015 )
2015 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2016 """show, clear or amend the contents of the manifest fulltext cache"""
2017 """show, clear or amend the contents of the manifest fulltext cache"""
2017
2018
2018 def getcache():
2019 def getcache():
2019 r = repo.manifestlog.getstorage(b'')
2020 r = repo.manifestlog.getstorage(b'')
2020 try:
2021 try:
2021 return r._fulltextcache
2022 return r._fulltextcache
2022 except AttributeError:
2023 except AttributeError:
2023 msg = _(
2024 msg = _(
2024 b"Current revlog implementation doesn't appear to have a "
2025 b"Current revlog implementation doesn't appear to have a "
2025 b"manifest fulltext cache\n"
2026 b"manifest fulltext cache\n"
2026 )
2027 )
2027 raise error.Abort(msg)
2028 raise error.Abort(msg)
2028
2029
2029 if opts.get('clear'):
2030 if opts.get('clear'):
2030 with repo.wlock():
2031 with repo.wlock():
2031 cache = getcache()
2032 cache = getcache()
2032 cache.clear(clear_persisted_data=True)
2033 cache.clear(clear_persisted_data=True)
2033 return
2034 return
2034
2035
2035 if add:
2036 if add:
2036 with repo.wlock():
2037 with repo.wlock():
2037 m = repo.manifestlog
2038 m = repo.manifestlog
2038 store = m.getstorage(b'')
2039 store = m.getstorage(b'')
2039 for n in add:
2040 for n in add:
2040 try:
2041 try:
2041 manifest = m[store.lookup(n)]
2042 manifest = m[store.lookup(n)]
2042 except error.LookupError as e:
2043 except error.LookupError as e:
2043 raise error.Abort(e, hint=b"Check your manifest node id")
2044 raise error.Abort(e, hint=b"Check your manifest node id")
2044 manifest.read() # stores revisision in cache too
2045 manifest.read() # stores revisision in cache too
2045 return
2046 return
2046
2047
2047 cache = getcache()
2048 cache = getcache()
2048 if not len(cache):
2049 if not len(cache):
2049 ui.write(_(b'cache empty\n'))
2050 ui.write(_(b'cache empty\n'))
2050 else:
2051 else:
2051 ui.write(
2052 ui.write(
2052 _(
2053 _(
2053 b'cache contains %d manifest entries, in order of most to '
2054 b'cache contains %d manifest entries, in order of most to '
2054 b'least recent:\n'
2055 b'least recent:\n'
2055 )
2056 )
2056 % (len(cache),)
2057 % (len(cache),)
2057 )
2058 )
2058 totalsize = 0
2059 totalsize = 0
2059 for nodeid in cache:
2060 for nodeid in cache:
2060 # Use cache.get to not update the LRU order
2061 # Use cache.get to not update the LRU order
2061 data = cache.peek(nodeid)
2062 data = cache.peek(nodeid)
2062 size = len(data)
2063 size = len(data)
2063 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2064 ui.write(
2065 ui.write(
2065 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2066 )
2067 )
2067 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2068 ui.write(
2069 ui.write(
2069 _(b'total cache data size %s, on-disk %s\n')
2070 _(b'total cache data size %s, on-disk %s\n')
2070 % (util.bytecount(totalsize), util.bytecount(ondisk))
2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2071 )
2072 )
2072
2073
2073
2074
2074 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2075 def debugmergestate(ui, repo, *args, **opts):
2076 def debugmergestate(ui, repo, *args, **opts):
2076 """print merge state
2077 """print merge state
2077
2078
2078 Use --verbose to print out information about whether v1 or v2 merge state
2079 Use --verbose to print out information about whether v1 or v2 merge state
2079 was chosen."""
2080 was chosen."""
2080
2081
2081 if ui.verbose:
2082 if ui.verbose:
2082 ms = mergestatemod.mergestate(repo)
2083 ms = mergestatemod.mergestate(repo)
2083
2084
2084 # sort so that reasonable information is on top
2085 # sort so that reasonable information is on top
2085 v1records = ms._readrecordsv1()
2086 v1records = ms._readrecordsv1()
2086 v2records = ms._readrecordsv2()
2087 v2records = ms._readrecordsv2()
2087
2088
2088 if not v1records and not v2records:
2089 if not v1records and not v2records:
2089 pass
2090 pass
2090 elif not v2records:
2091 elif not v2records:
2091 ui.writenoi18n(b'no version 2 merge state\n')
2092 ui.writenoi18n(b'no version 2 merge state\n')
2092 elif ms._v1v2match(v1records, v2records):
2093 elif ms._v1v2match(v1records, v2records):
2093 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2094 else:
2095 else:
2095 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2096
2097
2097 opts = pycompat.byteskwargs(opts)
2098 opts = pycompat.byteskwargs(opts)
2098 if not opts[b'template']:
2099 if not opts[b'template']:
2099 opts[b'template'] = (
2100 opts[b'template'] = (
2100 b'{if(commits, "", "no merge state found\n")}'
2101 b'{if(commits, "", "no merge state found\n")}'
2101 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2102 b'{files % "file: {path} (state \\"{state}\\")\n'
2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2103 b'{if(local_path, "'
2104 b'{if(local_path, "'
2104 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2105 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2106 b' other path: {other_path} (node {other_node})\n'
2107 b' other path: {other_path} (node {other_node})\n'
2107 b'")}'
2108 b'")}'
2108 b'{if(rename_side, "'
2109 b'{if(rename_side, "'
2109 b' rename side: {rename_side}\n'
2110 b' rename side: {rename_side}\n'
2110 b' renamed path: {renamed_path}\n'
2111 b' renamed path: {renamed_path}\n'
2111 b'")}'
2112 b'")}'
2112 b'{extras % " extra: {key} = {value}\n"}'
2113 b'{extras % " extra: {key} = {value}\n"}'
2113 b'"}'
2114 b'"}'
2114 b'{extras % "extra: {file} ({key} = {value})\n"}'
2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2115 )
2116 )
2116
2117
2117 ms = mergestatemod.mergestate.read(repo)
2118 ms = mergestatemod.mergestate.read(repo)
2118
2119
2119 fm = ui.formatter(b'debugmergestate', opts)
2120 fm = ui.formatter(b'debugmergestate', opts)
2120 fm.startitem()
2121 fm.startitem()
2121
2122
2122 fm_commits = fm.nested(b'commits')
2123 fm_commits = fm.nested(b'commits')
2123 if ms.active():
2124 if ms.active():
2124 for name, node, label_index in (
2125 for name, node, label_index in (
2125 (b'local', ms.local, 0),
2126 (b'local', ms.local, 0),
2126 (b'other', ms.other, 1),
2127 (b'other', ms.other, 1),
2127 ):
2128 ):
2128 fm_commits.startitem()
2129 fm_commits.startitem()
2129 fm_commits.data(name=name)
2130 fm_commits.data(name=name)
2130 fm_commits.data(node=hex(node))
2131 fm_commits.data(node=hex(node))
2131 if ms._labels and len(ms._labels) > label_index:
2132 if ms._labels and len(ms._labels) > label_index:
2132 fm_commits.data(label=ms._labels[label_index])
2133 fm_commits.data(label=ms._labels[label_index])
2133 fm_commits.end()
2134 fm_commits.end()
2134
2135
2135 fm_files = fm.nested(b'files')
2136 fm_files = fm.nested(b'files')
2136 if ms.active():
2137 if ms.active():
2137 for f in ms:
2138 for f in ms:
2138 fm_files.startitem()
2139 fm_files.startitem()
2139 fm_files.data(path=f)
2140 fm_files.data(path=f)
2140 state = ms._state[f]
2141 state = ms._state[f]
2141 fm_files.data(state=state[0])
2142 fm_files.data(state=state[0])
2142 if state[0] in (
2143 if state[0] in (
2143 mergestatemod.MERGE_RECORD_UNRESOLVED,
2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2144 mergestatemod.MERGE_RECORD_RESOLVED,
2145 mergestatemod.MERGE_RECORD_RESOLVED,
2145 ):
2146 ):
2146 fm_files.data(local_key=state[1])
2147 fm_files.data(local_key=state[1])
2147 fm_files.data(local_path=state[2])
2148 fm_files.data(local_path=state[2])
2148 fm_files.data(ancestor_path=state[3])
2149 fm_files.data(ancestor_path=state[3])
2149 fm_files.data(ancestor_node=state[4])
2150 fm_files.data(ancestor_node=state[4])
2150 fm_files.data(other_path=state[5])
2151 fm_files.data(other_path=state[5])
2151 fm_files.data(other_node=state[6])
2152 fm_files.data(other_node=state[6])
2152 fm_files.data(local_flags=state[7])
2153 fm_files.data(local_flags=state[7])
2153 elif state[0] in (
2154 elif state[0] in (
2154 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2155 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2156 ):
2157 ):
2157 fm_files.data(renamed_path=state[1])
2158 fm_files.data(renamed_path=state[1])
2158 fm_files.data(rename_side=state[2])
2159 fm_files.data(rename_side=state[2])
2159 fm_extras = fm_files.nested(b'extras')
2160 fm_extras = fm_files.nested(b'extras')
2160 for k, v in sorted(ms.extras(f).items()):
2161 for k, v in sorted(ms.extras(f).items()):
2161 fm_extras.startitem()
2162 fm_extras.startitem()
2162 fm_extras.data(key=k)
2163 fm_extras.data(key=k)
2163 fm_extras.data(value=v)
2164 fm_extras.data(value=v)
2164 fm_extras.end()
2165 fm_extras.end()
2165
2166
2166 fm_files.end()
2167 fm_files.end()
2167
2168
2168 fm_extras = fm.nested(b'extras')
2169 fm_extras = fm.nested(b'extras')
2169 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2170 if f in ms:
2171 if f in ms:
2171 # If file is in mergestate, we have already processed it's extras
2172 # If file is in mergestate, we have already processed it's extras
2172 continue
2173 continue
2173 for k, v in pycompat.iteritems(d):
2174 for k, v in pycompat.iteritems(d):
2174 fm_extras.startitem()
2175 fm_extras.startitem()
2175 fm_extras.data(file=f)
2176 fm_extras.data(file=f)
2176 fm_extras.data(key=k)
2177 fm_extras.data(key=k)
2177 fm_extras.data(value=v)
2178 fm_extras.data(value=v)
2178 fm_extras.end()
2179 fm_extras.end()
2179
2180
2180 fm.end()
2181 fm.end()
2181
2182
2182
2183
2183 @command(b'debugnamecomplete', [], _(b'NAME...'))
2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2184 def debugnamecomplete(ui, repo, *args):
2185 def debugnamecomplete(ui, repo, *args):
2185 '''complete "names" - tags, open branch names, bookmark names'''
2186 '''complete "names" - tags, open branch names, bookmark names'''
2186
2187
2187 names = set()
2188 names = set()
2188 # since we previously only listed open branches, we will handle that
2189 # since we previously only listed open branches, we will handle that
2189 # specially (after this for loop)
2190 # specially (after this for loop)
2190 for name, ns in pycompat.iteritems(repo.names):
2191 for name, ns in pycompat.iteritems(repo.names):
2191 if name != b'branches':
2192 if name != b'branches':
2192 names.update(ns.listnames(repo))
2193 names.update(ns.listnames(repo))
2193 names.update(
2194 names.update(
2194 tag
2195 tag
2195 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2196 if not closed
2197 if not closed
2197 )
2198 )
2198 completions = set()
2199 completions = set()
2199 if not args:
2200 if not args:
2200 args = [b'']
2201 args = [b'']
2201 for a in args:
2202 for a in args:
2202 completions.update(n for n in names if n.startswith(a))
2203 completions.update(n for n in names if n.startswith(a))
2203 ui.write(b'\n'.join(sorted(completions)))
2204 ui.write(b'\n'.join(sorted(completions)))
2204 ui.write(b'\n')
2205 ui.write(b'\n')
2205
2206
2206
2207
2207 @command(
2208 @command(
2208 b'debugnodemap',
2209 b'debugnodemap',
2209 [
2210 [
2210 (
2211 (
2211 b'',
2212 b'',
2212 b'dump-new',
2213 b'dump-new',
2213 False,
2214 False,
2214 _(b'write a (new) persistent binary nodemap on stdin'),
2215 _(b'write a (new) persistent binary nodemap on stdin'),
2215 ),
2216 ),
2216 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2217 (
2218 (
2218 b'',
2219 b'',
2219 b'check',
2220 b'check',
2220 False,
2221 False,
2221 _(b'check that the data on disk data are correct.'),
2222 _(b'check that the data on disk data are correct.'),
2222 ),
2223 ),
2223 (
2224 (
2224 b'',
2225 b'',
2225 b'metadata',
2226 b'metadata',
2226 False,
2227 False,
2227 _(b'display the on disk meta data for the nodemap'),
2228 _(b'display the on disk meta data for the nodemap'),
2228 ),
2229 ),
2229 ],
2230 ],
2230 )
2231 )
2231 def debugnodemap(ui, repo, **opts):
2232 def debugnodemap(ui, repo, **opts):
2232 """write and inspect on disk nodemap"""
2233 """write and inspect on disk nodemap"""
2233 if opts['dump_new']:
2234 if opts['dump_new']:
2234 unfi = repo.unfiltered()
2235 unfi = repo.unfiltered()
2235 cl = unfi.changelog
2236 cl = unfi.changelog
2236 if util.safehasattr(cl.index, "nodemap_data_all"):
2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2237 data = cl.index.nodemap_data_all()
2238 data = cl.index.nodemap_data_all()
2238 else:
2239 else:
2239 data = nodemap.persistent_data(cl.index)
2240 data = nodemap.persistent_data(cl.index)
2240 ui.write(data)
2241 ui.write(data)
2241 elif opts['dump_disk']:
2242 elif opts['dump_disk']:
2242 unfi = repo.unfiltered()
2243 unfi = repo.unfiltered()
2243 cl = unfi.changelog
2244 cl = unfi.changelog
2244 nm_data = nodemap.persisted_data(cl)
2245 nm_data = nodemap.persisted_data(cl)
2245 if nm_data is not None:
2246 if nm_data is not None:
2246 docket, data = nm_data
2247 docket, data = nm_data
2247 ui.write(data[:])
2248 ui.write(data[:])
2248 elif opts['check']:
2249 elif opts['check']:
2249 unfi = repo.unfiltered()
2250 unfi = repo.unfiltered()
2250 cl = unfi.changelog
2251 cl = unfi.changelog
2251 nm_data = nodemap.persisted_data(cl)
2252 nm_data = nodemap.persisted_data(cl)
2252 if nm_data is not None:
2253 if nm_data is not None:
2253 docket, data = nm_data
2254 docket, data = nm_data
2254 return nodemap.check_data(ui, cl.index, data)
2255 return nodemap.check_data(ui, cl.index, data)
2255 elif opts['metadata']:
2256 elif opts['metadata']:
2256 unfi = repo.unfiltered()
2257 unfi = repo.unfiltered()
2257 cl = unfi.changelog
2258 cl = unfi.changelog
2258 nm_data = nodemap.persisted_data(cl)
2259 nm_data = nodemap.persisted_data(cl)
2259 if nm_data is not None:
2260 if nm_data is not None:
2260 docket, data = nm_data
2261 docket, data = nm_data
2261 ui.write((b"uid: %s\n") % docket.uid)
2262 ui.write((b"uid: %s\n") % docket.uid)
2262 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2263 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2264 ui.write((b"data-length: %d\n") % docket.data_length)
2265 ui.write((b"data-length: %d\n") % docket.data_length)
2265 ui.write((b"data-unused: %d\n") % docket.data_unused)
2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2266 unused_perc = docket.data_unused * 100.0 / docket.data_length
2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2267 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2268
2269
2269
2270
2270 @command(
2271 @command(
2271 b'debugobsolete',
2272 b'debugobsolete',
2272 [
2273 [
2273 (b'', b'flags', 0, _(b'markers flag')),
2274 (b'', b'flags', 0, _(b'markers flag')),
2274 (
2275 (
2275 b'',
2276 b'',
2276 b'record-parents',
2277 b'record-parents',
2277 False,
2278 False,
2278 _(b'record parent information for the precursor'),
2279 _(b'record parent information for the precursor'),
2279 ),
2280 ),
2280 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2281 (
2282 (
2282 b'',
2283 b'',
2283 b'exclusive',
2284 b'exclusive',
2284 False,
2285 False,
2285 _(b'restrict display to markers only relevant to REV'),
2286 _(b'restrict display to markers only relevant to REV'),
2286 ),
2287 ),
2287 (b'', b'index', False, _(b'display index of the marker')),
2288 (b'', b'index', False, _(b'display index of the marker')),
2288 (b'', b'delete', [], _(b'delete markers specified by indices')),
2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2289 ]
2290 ]
2290 + cmdutil.commitopts2
2291 + cmdutil.commitopts2
2291 + cmdutil.formatteropts,
2292 + cmdutil.formatteropts,
2292 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2293 )
2294 )
2294 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2295 """create arbitrary obsolete marker
2296 """create arbitrary obsolete marker
2296
2297
2297 With no arguments, displays the list of obsolescence markers."""
2298 With no arguments, displays the list of obsolescence markers."""
2298
2299
2299 opts = pycompat.byteskwargs(opts)
2300 opts = pycompat.byteskwargs(opts)
2300
2301
2301 def parsenodeid(s):
2302 def parsenodeid(s):
2302 try:
2303 try:
2303 # We do not use revsingle/revrange functions here to accept
2304 # We do not use revsingle/revrange functions here to accept
2304 # arbitrary node identifiers, possibly not present in the
2305 # arbitrary node identifiers, possibly not present in the
2305 # local repository.
2306 # local repository.
2306 n = bin(s)
2307 n = bin(s)
2307 if len(n) != len(nullid):
2308 if len(n) != len(nullid):
2308 raise TypeError()
2309 raise TypeError()
2309 return n
2310 return n
2310 except TypeError:
2311 except TypeError:
2311 raise error.InputError(
2312 raise error.InputError(
2312 b'changeset references must be full hexadecimal '
2313 b'changeset references must be full hexadecimal '
2313 b'node identifiers'
2314 b'node identifiers'
2314 )
2315 )
2315
2316
2316 if opts.get(b'delete'):
2317 if opts.get(b'delete'):
2317 indices = []
2318 indices = []
2318 for v in opts.get(b'delete'):
2319 for v in opts.get(b'delete'):
2319 try:
2320 try:
2320 indices.append(int(v))
2321 indices.append(int(v))
2321 except ValueError:
2322 except ValueError:
2322 raise error.InputError(
2323 raise error.InputError(
2323 _(b'invalid index value: %r') % v,
2324 _(b'invalid index value: %r') % v,
2324 hint=_(b'use integers for indices'),
2325 hint=_(b'use integers for indices'),
2325 )
2326 )
2326
2327
2327 if repo.currenttransaction():
2328 if repo.currenttransaction():
2328 raise error.Abort(
2329 raise error.Abort(
2329 _(b'cannot delete obsmarkers in the middle of transaction.')
2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2330 )
2331 )
2331
2332
2332 with repo.lock():
2333 with repo.lock():
2333 n = repair.deleteobsmarkers(repo.obsstore, indices)
2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2334 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2335
2336
2336 return
2337 return
2337
2338
2338 if precursor is not None:
2339 if precursor is not None:
2339 if opts[b'rev']:
2340 if opts[b'rev']:
2340 raise error.InputError(
2341 raise error.InputError(
2341 b'cannot select revision when creating marker'
2342 b'cannot select revision when creating marker'
2342 )
2343 )
2343 metadata = {}
2344 metadata = {}
2344 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2345 succs = tuple(parsenodeid(succ) for succ in successors)
2346 succs = tuple(parsenodeid(succ) for succ in successors)
2346 l = repo.lock()
2347 l = repo.lock()
2347 try:
2348 try:
2348 tr = repo.transaction(b'debugobsolete')
2349 tr = repo.transaction(b'debugobsolete')
2349 try:
2350 try:
2350 date = opts.get(b'date')
2351 date = opts.get(b'date')
2351 if date:
2352 if date:
2352 date = dateutil.parsedate(date)
2353 date = dateutil.parsedate(date)
2353 else:
2354 else:
2354 date = None
2355 date = None
2355 prec = parsenodeid(precursor)
2356 prec = parsenodeid(precursor)
2356 parents = None
2357 parents = None
2357 if opts[b'record_parents']:
2358 if opts[b'record_parents']:
2358 if prec not in repo.unfiltered():
2359 if prec not in repo.unfiltered():
2359 raise error.Abort(
2360 raise error.Abort(
2360 b'cannot used --record-parents on '
2361 b'cannot used --record-parents on '
2361 b'unknown changesets'
2362 b'unknown changesets'
2362 )
2363 )
2363 parents = repo.unfiltered()[prec].parents()
2364 parents = repo.unfiltered()[prec].parents()
2364 parents = tuple(p.node() for p in parents)
2365 parents = tuple(p.node() for p in parents)
2365 repo.obsstore.create(
2366 repo.obsstore.create(
2366 tr,
2367 tr,
2367 prec,
2368 prec,
2368 succs,
2369 succs,
2369 opts[b'flags'],
2370 opts[b'flags'],
2370 parents=parents,
2371 parents=parents,
2371 date=date,
2372 date=date,
2372 metadata=metadata,
2373 metadata=metadata,
2373 ui=ui,
2374 ui=ui,
2374 )
2375 )
2375 tr.close()
2376 tr.close()
2376 except ValueError as exc:
2377 except ValueError as exc:
2377 raise error.Abort(
2378 raise error.Abort(
2378 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2379 )
2380 )
2380 finally:
2381 finally:
2381 tr.release()
2382 tr.release()
2382 finally:
2383 finally:
2383 l.release()
2384 l.release()
2384 else:
2385 else:
2385 if opts[b'rev']:
2386 if opts[b'rev']:
2386 revs = scmutil.revrange(repo, opts[b'rev'])
2387 revs = scmutil.revrange(repo, opts[b'rev'])
2387 nodes = [repo[r].node() for r in revs]
2388 nodes = [repo[r].node() for r in revs]
2388 markers = list(
2389 markers = list(
2389 obsutil.getmarkers(
2390 obsutil.getmarkers(
2390 repo, nodes=nodes, exclusive=opts[b'exclusive']
2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2391 )
2392 )
2392 )
2393 )
2393 markers.sort(key=lambda x: x._data)
2394 markers.sort(key=lambda x: x._data)
2394 else:
2395 else:
2395 markers = obsutil.getmarkers(repo)
2396 markers = obsutil.getmarkers(repo)
2396
2397
2397 markerstoiter = markers
2398 markerstoiter = markers
2398 isrelevant = lambda m: True
2399 isrelevant = lambda m: True
2399 if opts.get(b'rev') and opts.get(b'index'):
2400 if opts.get(b'rev') and opts.get(b'index'):
2400 markerstoiter = obsutil.getmarkers(repo)
2401 markerstoiter = obsutil.getmarkers(repo)
2401 markerset = set(markers)
2402 markerset = set(markers)
2402 isrelevant = lambda m: m in markerset
2403 isrelevant = lambda m: m in markerset
2403
2404
2404 fm = ui.formatter(b'debugobsolete', opts)
2405 fm = ui.formatter(b'debugobsolete', opts)
2405 for i, m in enumerate(markerstoiter):
2406 for i, m in enumerate(markerstoiter):
2406 if not isrelevant(m):
2407 if not isrelevant(m):
2407 # marker can be irrelevant when we're iterating over a set
2408 # marker can be irrelevant when we're iterating over a set
2408 # of markers (markerstoiter) which is bigger than the set
2409 # of markers (markerstoiter) which is bigger than the set
2409 # of markers we want to display (markers)
2410 # of markers we want to display (markers)
2410 # this can happen if both --index and --rev options are
2411 # this can happen if both --index and --rev options are
2411 # provided and thus we need to iterate over all of the markers
2412 # provided and thus we need to iterate over all of the markers
2412 # to get the correct indices, but only display the ones that
2413 # to get the correct indices, but only display the ones that
2413 # are relevant to --rev value
2414 # are relevant to --rev value
2414 continue
2415 continue
2415 fm.startitem()
2416 fm.startitem()
2416 ind = i if opts.get(b'index') else None
2417 ind = i if opts.get(b'index') else None
2417 cmdutil.showmarker(fm, m, index=ind)
2418 cmdutil.showmarker(fm, m, index=ind)
2418 fm.end()
2419 fm.end()
2419
2420
2420
2421
2421 @command(
2422 @command(
2422 b'debugp1copies',
2423 b'debugp1copies',
2423 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2424 _(b'[-r REV]'),
2425 _(b'[-r REV]'),
2425 )
2426 )
2426 def debugp1copies(ui, repo, **opts):
2427 def debugp1copies(ui, repo, **opts):
2427 """dump copy information compared to p1"""
2428 """dump copy information compared to p1"""
2428
2429
2429 opts = pycompat.byteskwargs(opts)
2430 opts = pycompat.byteskwargs(opts)
2430 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2431 for dst, src in ctx.p1copies().items():
2432 for dst, src in ctx.p1copies().items():
2432 ui.write(b'%s -> %s\n' % (src, dst))
2433 ui.write(b'%s -> %s\n' % (src, dst))
2433
2434
2434
2435
2435 @command(
2436 @command(
2436 b'debugp2copies',
2437 b'debugp2copies',
2437 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2438 _(b'[-r REV]'),
2439 _(b'[-r REV]'),
2439 )
2440 )
2440 def debugp1copies(ui, repo, **opts):
2441 def debugp1copies(ui, repo, **opts):
2441 """dump copy information compared to p2"""
2442 """dump copy information compared to p2"""
2442
2443
2443 opts = pycompat.byteskwargs(opts)
2444 opts = pycompat.byteskwargs(opts)
2444 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2445 for dst, src in ctx.p2copies().items():
2446 for dst, src in ctx.p2copies().items():
2446 ui.write(b'%s -> %s\n' % (src, dst))
2447 ui.write(b'%s -> %s\n' % (src, dst))
2447
2448
2448
2449
2449 @command(
2450 @command(
2450 b'debugpathcomplete',
2451 b'debugpathcomplete',
2451 [
2452 [
2452 (b'f', b'full', None, _(b'complete an entire path')),
2453 (b'f', b'full', None, _(b'complete an entire path')),
2453 (b'n', b'normal', None, _(b'show only normal files')),
2454 (b'n', b'normal', None, _(b'show only normal files')),
2454 (b'a', b'added', None, _(b'show only added files')),
2455 (b'a', b'added', None, _(b'show only added files')),
2455 (b'r', b'removed', None, _(b'show only removed files')),
2456 (b'r', b'removed', None, _(b'show only removed files')),
2456 ],
2457 ],
2457 _(b'FILESPEC...'),
2458 _(b'FILESPEC...'),
2458 )
2459 )
2459 def debugpathcomplete(ui, repo, *specs, **opts):
2460 def debugpathcomplete(ui, repo, *specs, **opts):
2460 """complete part or all of a tracked path
2461 """complete part or all of a tracked path
2461
2462
2462 This command supports shells that offer path name completion. It
2463 This command supports shells that offer path name completion. It
2463 currently completes only files already known to the dirstate.
2464 currently completes only files already known to the dirstate.
2464
2465
2465 Completion extends only to the next path segment unless
2466 Completion extends only to the next path segment unless
2466 --full is specified, in which case entire paths are used."""
2467 --full is specified, in which case entire paths are used."""
2467
2468
2468 def complete(path, acceptable):
2469 def complete(path, acceptable):
2469 dirstate = repo.dirstate
2470 dirstate = repo.dirstate
2470 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2471 rootdir = repo.root + pycompat.ossep
2472 rootdir = repo.root + pycompat.ossep
2472 if spec != repo.root and not spec.startswith(rootdir):
2473 if spec != repo.root and not spec.startswith(rootdir):
2473 return [], []
2474 return [], []
2474 if os.path.isdir(spec):
2475 if os.path.isdir(spec):
2475 spec += b'/'
2476 spec += b'/'
2476 spec = spec[len(rootdir) :]
2477 spec = spec[len(rootdir) :]
2477 fixpaths = pycompat.ossep != b'/'
2478 fixpaths = pycompat.ossep != b'/'
2478 if fixpaths:
2479 if fixpaths:
2479 spec = spec.replace(pycompat.ossep, b'/')
2480 spec = spec.replace(pycompat.ossep, b'/')
2480 speclen = len(spec)
2481 speclen = len(spec)
2481 fullpaths = opts['full']
2482 fullpaths = opts['full']
2482 files, dirs = set(), set()
2483 files, dirs = set(), set()
2483 adddir, addfile = dirs.add, files.add
2484 adddir, addfile = dirs.add, files.add
2484 for f, st in pycompat.iteritems(dirstate):
2485 for f, st in pycompat.iteritems(dirstate):
2485 if f.startswith(spec) and st[0] in acceptable:
2486 if f.startswith(spec) and st[0] in acceptable:
2486 if fixpaths:
2487 if fixpaths:
2487 f = f.replace(b'/', pycompat.ossep)
2488 f = f.replace(b'/', pycompat.ossep)
2488 if fullpaths:
2489 if fullpaths:
2489 addfile(f)
2490 addfile(f)
2490 continue
2491 continue
2491 s = f.find(pycompat.ossep, speclen)
2492 s = f.find(pycompat.ossep, speclen)
2492 if s >= 0:
2493 if s >= 0:
2493 adddir(f[:s])
2494 adddir(f[:s])
2494 else:
2495 else:
2495 addfile(f)
2496 addfile(f)
2496 return files, dirs
2497 return files, dirs
2497
2498
2498 acceptable = b''
2499 acceptable = b''
2499 if opts['normal']:
2500 if opts['normal']:
2500 acceptable += b'nm'
2501 acceptable += b'nm'
2501 if opts['added']:
2502 if opts['added']:
2502 acceptable += b'a'
2503 acceptable += b'a'
2503 if opts['removed']:
2504 if opts['removed']:
2504 acceptable += b'r'
2505 acceptable += b'r'
2505 cwd = repo.getcwd()
2506 cwd = repo.getcwd()
2506 if not specs:
2507 if not specs:
2507 specs = [b'.']
2508 specs = [b'.']
2508
2509
2509 files, dirs = set(), set()
2510 files, dirs = set(), set()
2510 for spec in specs:
2511 for spec in specs:
2511 f, d = complete(spec, acceptable or b'nmar')
2512 f, d = complete(spec, acceptable or b'nmar')
2512 files.update(f)
2513 files.update(f)
2513 dirs.update(d)
2514 dirs.update(d)
2514 files.update(dirs)
2515 files.update(dirs)
2515 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2516 ui.write(b'\n')
2517 ui.write(b'\n')
2517
2518
2518
2519
2519 @command(
2520 @command(
2520 b'debugpathcopies',
2521 b'debugpathcopies',
2521 cmdutil.walkopts,
2522 cmdutil.walkopts,
2522 b'hg debugpathcopies REV1 REV2 [FILE]',
2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2523 inferrepo=True,
2524 inferrepo=True,
2524 )
2525 )
2525 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2526 """show copies between two revisions"""
2527 """show copies between two revisions"""
2527 ctx1 = scmutil.revsingle(repo, rev1)
2528 ctx1 = scmutil.revsingle(repo, rev1)
2528 ctx2 = scmutil.revsingle(repo, rev2)
2529 ctx2 = scmutil.revsingle(repo, rev2)
2529 m = scmutil.match(ctx1, pats, opts)
2530 m = scmutil.match(ctx1, pats, opts)
2530 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2531 ui.write(b'%s -> %s\n' % (src, dst))
2532 ui.write(b'%s -> %s\n' % (src, dst))
2532
2533
2533
2534
2534 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2535 def debugpeer(ui, path):
2536 def debugpeer(ui, path):
2536 """establish a connection to a peer repository"""
2537 """establish a connection to a peer repository"""
2537 # Always enable peer request logging. Requires --debug to display
2538 # Always enable peer request logging. Requires --debug to display
2538 # though.
2539 # though.
2539 overrides = {
2540 overrides = {
2540 (b'devel', b'debug.peer-request'): True,
2541 (b'devel', b'debug.peer-request'): True,
2541 }
2542 }
2542
2543
2543 with ui.configoverride(overrides):
2544 with ui.configoverride(overrides):
2544 peer = hg.peer(ui, {}, path)
2545 peer = hg.peer(ui, {}, path)
2545
2546
2546 local = peer.local() is not None
2547 local = peer.local() is not None
2547 canpush = peer.canpush()
2548 canpush = peer.canpush()
2548
2549
2549 ui.write(_(b'url: %s\n') % peer.url())
2550 ui.write(_(b'url: %s\n') % peer.url())
2550 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2551 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2552
2553
2553
2554
2554 @command(
2555 @command(
2555 b'debugpickmergetool',
2556 b'debugpickmergetool',
2556 [
2557 [
2557 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2558 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2559 ]
2560 ]
2560 + cmdutil.walkopts
2561 + cmdutil.walkopts
2561 + cmdutil.mergetoolopts,
2562 + cmdutil.mergetoolopts,
2562 _(b'[PATTERN]...'),
2563 _(b'[PATTERN]...'),
2563 inferrepo=True,
2564 inferrepo=True,
2564 )
2565 )
2565 def debugpickmergetool(ui, repo, *pats, **opts):
2566 def debugpickmergetool(ui, repo, *pats, **opts):
2566 """examine which merge tool is chosen for specified file
2567 """examine which merge tool is chosen for specified file
2567
2568
2568 As described in :hg:`help merge-tools`, Mercurial examines
2569 As described in :hg:`help merge-tools`, Mercurial examines
2569 configurations below in this order to decide which merge tool is
2570 configurations below in this order to decide which merge tool is
2570 chosen for specified file.
2571 chosen for specified file.
2571
2572
2572 1. ``--tool`` option
2573 1. ``--tool`` option
2573 2. ``HGMERGE`` environment variable
2574 2. ``HGMERGE`` environment variable
2574 3. configurations in ``merge-patterns`` section
2575 3. configurations in ``merge-patterns`` section
2575 4. configuration of ``ui.merge``
2576 4. configuration of ``ui.merge``
2576 5. configurations in ``merge-tools`` section
2577 5. configurations in ``merge-tools`` section
2577 6. ``hgmerge`` tool (for historical reason only)
2578 6. ``hgmerge`` tool (for historical reason only)
2578 7. default tool for fallback (``:merge`` or ``:prompt``)
2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2579
2580
2580 This command writes out examination result in the style below::
2581 This command writes out examination result in the style below::
2581
2582
2582 FILE = MERGETOOL
2583 FILE = MERGETOOL
2583
2584
2584 By default, all files known in the first parent context of the
2585 By default, all files known in the first parent context of the
2585 working directory are examined. Use file patterns and/or -I/-X
2586 working directory are examined. Use file patterns and/or -I/-X
2586 options to limit target files. -r/--rev is also useful to examine
2587 options to limit target files. -r/--rev is also useful to examine
2587 files in another context without actual updating to it.
2588 files in another context without actual updating to it.
2588
2589
2589 With --debug, this command shows warning messages while matching
2590 With --debug, this command shows warning messages while matching
2590 against ``merge-patterns`` and so on, too. It is recommended to
2591 against ``merge-patterns`` and so on, too. It is recommended to
2591 use this option with explicit file patterns and/or -I/-X options,
2592 use this option with explicit file patterns and/or -I/-X options,
2592 because this option increases amount of output per file according
2593 because this option increases amount of output per file according
2593 to configurations in hgrc.
2594 to configurations in hgrc.
2594
2595
2595 With -v/--verbose, this command shows configurations below at
2596 With -v/--verbose, this command shows configurations below at
2596 first (only if specified).
2597 first (only if specified).
2597
2598
2598 - ``--tool`` option
2599 - ``--tool`` option
2599 - ``HGMERGE`` environment variable
2600 - ``HGMERGE`` environment variable
2600 - configuration of ``ui.merge``
2601 - configuration of ``ui.merge``
2601
2602
2602 If merge tool is chosen before matching against
2603 If merge tool is chosen before matching against
2603 ``merge-patterns``, this command can't show any helpful
2604 ``merge-patterns``, this command can't show any helpful
2604 information, even with --debug. In such case, information above is
2605 information, even with --debug. In such case, information above is
2605 useful to know why a merge tool is chosen.
2606 useful to know why a merge tool is chosen.
2606 """
2607 """
2607 opts = pycompat.byteskwargs(opts)
2608 opts = pycompat.byteskwargs(opts)
2608 overrides = {}
2609 overrides = {}
2609 if opts[b'tool']:
2610 if opts[b'tool']:
2610 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2611 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2612
2613
2613 with ui.configoverride(overrides, b'debugmergepatterns'):
2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2614 hgmerge = encoding.environ.get(b"HGMERGE")
2615 hgmerge = encoding.environ.get(b"HGMERGE")
2615 if hgmerge is not None:
2616 if hgmerge is not None:
2616 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2617 uimerge = ui.config(b"ui", b"merge")
2618 uimerge = ui.config(b"ui", b"merge")
2618 if uimerge:
2619 if uimerge:
2619 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2620
2621
2621 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2622 m = scmutil.match(ctx, pats, opts)
2623 m = scmutil.match(ctx, pats, opts)
2623 changedelete = opts[b'changedelete']
2624 changedelete = opts[b'changedelete']
2624 for path in ctx.walk(m):
2625 for path in ctx.walk(m):
2625 fctx = ctx[path]
2626 fctx = ctx[path]
2626 try:
2627 try:
2627 if not ui.debugflag:
2628 if not ui.debugflag:
2628 ui.pushbuffer(error=True)
2629 ui.pushbuffer(error=True)
2629 tool, toolpath = filemerge._picktool(
2630 tool, toolpath = filemerge._picktool(
2630 repo,
2631 repo,
2631 ui,
2632 ui,
2632 path,
2633 path,
2633 fctx.isbinary(),
2634 fctx.isbinary(),
2634 b'l' in fctx.flags(),
2635 b'l' in fctx.flags(),
2635 changedelete,
2636 changedelete,
2636 )
2637 )
2637 finally:
2638 finally:
2638 if not ui.debugflag:
2639 if not ui.debugflag:
2639 ui.popbuffer()
2640 ui.popbuffer()
2640 ui.write(b'%s = %s\n' % (path, tool))
2641 ui.write(b'%s = %s\n' % (path, tool))
2641
2642
2642
2643
2643 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2644 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2645 """access the pushkey key/value protocol
2646 """access the pushkey key/value protocol
2646
2647
2647 With two args, list the keys in the given namespace.
2648 With two args, list the keys in the given namespace.
2648
2649
2649 With five args, set a key to new if it currently is set to old.
2650 With five args, set a key to new if it currently is set to old.
2650 Reports success or failure.
2651 Reports success or failure.
2651 """
2652 """
2652
2653
2653 target = hg.peer(ui, {}, repopath)
2654 target = hg.peer(ui, {}, repopath)
2654 if keyinfo:
2655 if keyinfo:
2655 key, old, new = keyinfo
2656 key, old, new = keyinfo
2656 with target.commandexecutor() as e:
2657 with target.commandexecutor() as e:
2657 r = e.callcommand(
2658 r = e.callcommand(
2658 b'pushkey',
2659 b'pushkey',
2659 {
2660 {
2660 b'namespace': namespace,
2661 b'namespace': namespace,
2661 b'key': key,
2662 b'key': key,
2662 b'old': old,
2663 b'old': old,
2663 b'new': new,
2664 b'new': new,
2664 },
2665 },
2665 ).result()
2666 ).result()
2666
2667
2667 ui.status(pycompat.bytestr(r) + b'\n')
2668 ui.status(pycompat.bytestr(r) + b'\n')
2668 return not r
2669 return not r
2669 else:
2670 else:
2670 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2671 ui.write(
2672 ui.write(
2672 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2673 )
2674 )
2674
2675
2675
2676
2676 @command(b'debugpvec', [], _(b'A B'))
2677 @command(b'debugpvec', [], _(b'A B'))
2677 def debugpvec(ui, repo, a, b=None):
2678 def debugpvec(ui, repo, a, b=None):
2678 ca = scmutil.revsingle(repo, a)
2679 ca = scmutil.revsingle(repo, a)
2679 cb = scmutil.revsingle(repo, b)
2680 cb = scmutil.revsingle(repo, b)
2680 pa = pvec.ctxpvec(ca)
2681 pa = pvec.ctxpvec(ca)
2681 pb = pvec.ctxpvec(cb)
2682 pb = pvec.ctxpvec(cb)
2682 if pa == pb:
2683 if pa == pb:
2683 rel = b"="
2684 rel = b"="
2684 elif pa > pb:
2685 elif pa > pb:
2685 rel = b">"
2686 rel = b">"
2686 elif pa < pb:
2687 elif pa < pb:
2687 rel = b"<"
2688 rel = b"<"
2688 elif pa | pb:
2689 elif pa | pb:
2689 rel = b"|"
2690 rel = b"|"
2690 ui.write(_(b"a: %s\n") % pa)
2691 ui.write(_(b"a: %s\n") % pa)
2691 ui.write(_(b"b: %s\n") % pb)
2692 ui.write(_(b"b: %s\n") % pb)
2692 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2693 ui.write(
2694 ui.write(
2694 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2695 % (
2696 % (
2696 abs(pa._depth - pb._depth),
2697 abs(pa._depth - pb._depth),
2697 pvec._hamming(pa._vec, pb._vec),
2698 pvec._hamming(pa._vec, pb._vec),
2698 pa.distance(pb),
2699 pa.distance(pb),
2699 rel,
2700 rel,
2700 )
2701 )
2701 )
2702 )
2702
2703
2703
2704
2704 @command(
2705 @command(
2705 b'debugrebuilddirstate|debugrebuildstate',
2706 b'debugrebuilddirstate|debugrebuildstate',
2706 [
2707 [
2707 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2708 (
2709 (
2709 b'',
2710 b'',
2710 b'minimal',
2711 b'minimal',
2711 None,
2712 None,
2712 _(
2713 _(
2713 b'only rebuild files that are inconsistent with '
2714 b'only rebuild files that are inconsistent with '
2714 b'the working copy parent'
2715 b'the working copy parent'
2715 ),
2716 ),
2716 ),
2717 ),
2717 ],
2718 ],
2718 _(b'[-r REV]'),
2719 _(b'[-r REV]'),
2719 )
2720 )
2720 def debugrebuilddirstate(ui, repo, rev, **opts):
2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2721 """rebuild the dirstate as it would look like for the given revision
2722 """rebuild the dirstate as it would look like for the given revision
2722
2723
2723 If no revision is specified the first current parent will be used.
2724 If no revision is specified the first current parent will be used.
2724
2725
2725 The dirstate will be set to the files of the given revision.
2726 The dirstate will be set to the files of the given revision.
2726 The actual working directory content or existing dirstate
2727 The actual working directory content or existing dirstate
2727 information such as adds or removes is not considered.
2728 information such as adds or removes is not considered.
2728
2729
2729 ``minimal`` will only rebuild the dirstate status for files that claim to be
2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2730 tracked but are not in the parent manifest, or that exist in the parent
2731 tracked but are not in the parent manifest, or that exist in the parent
2731 manifest but are not in the dirstate. It will not change adds, removes, or
2732 manifest but are not in the dirstate. It will not change adds, removes, or
2732 modified files that are in the working copy parent.
2733 modified files that are in the working copy parent.
2733
2734
2734 One use of this command is to make the next :hg:`status` invocation
2735 One use of this command is to make the next :hg:`status` invocation
2735 check the actual file content.
2736 check the actual file content.
2736 """
2737 """
2737 ctx = scmutil.revsingle(repo, rev)
2738 ctx = scmutil.revsingle(repo, rev)
2738 with repo.wlock():
2739 with repo.wlock():
2739 dirstate = repo.dirstate
2740 dirstate = repo.dirstate
2740 changedfiles = None
2741 changedfiles = None
2741 # See command doc for what minimal does.
2742 # See command doc for what minimal does.
2742 if opts.get('minimal'):
2743 if opts.get('minimal'):
2743 manifestfiles = set(ctx.manifest().keys())
2744 manifestfiles = set(ctx.manifest().keys())
2744 dirstatefiles = set(dirstate)
2745 dirstatefiles = set(dirstate)
2745 manifestonly = manifestfiles - dirstatefiles
2746 manifestonly = manifestfiles - dirstatefiles
2746 dsonly = dirstatefiles - manifestfiles
2747 dsonly = dirstatefiles - manifestfiles
2747 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2748 changedfiles = manifestonly | dsnotadded
2749 changedfiles = manifestonly | dsnotadded
2749
2750
2750 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2751
2752
2752
2753
2753 @command(b'debugrebuildfncache', [], b'')
2754 @command(b'debugrebuildfncache', [], b'')
2754 def debugrebuildfncache(ui, repo):
2755 def debugrebuildfncache(ui, repo):
2755 """rebuild the fncache file"""
2756 """rebuild the fncache file"""
2756 repair.rebuildfncache(ui, repo)
2757 repair.rebuildfncache(ui, repo)
2757
2758
2758
2759
2759 @command(
2760 @command(
2760 b'debugrename',
2761 b'debugrename',
2761 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2762 _(b'[-r REV] [FILE]...'),
2763 _(b'[-r REV] [FILE]...'),
2763 )
2764 )
2764 def debugrename(ui, repo, *pats, **opts):
2765 def debugrename(ui, repo, *pats, **opts):
2765 """dump rename information"""
2766 """dump rename information"""
2766
2767
2767 opts = pycompat.byteskwargs(opts)
2768 opts = pycompat.byteskwargs(opts)
2768 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2769 m = scmutil.match(ctx, pats, opts)
2770 m = scmutil.match(ctx, pats, opts)
2770 for abs in ctx.walk(m):
2771 for abs in ctx.walk(m):
2771 fctx = ctx[abs]
2772 fctx = ctx[abs]
2772 o = fctx.filelog().renamed(fctx.filenode())
2773 o = fctx.filelog().renamed(fctx.filenode())
2773 rel = repo.pathto(abs)
2774 rel = repo.pathto(abs)
2774 if o:
2775 if o:
2775 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2776 else:
2777 else:
2777 ui.write(_(b"%s not renamed\n") % rel)
2778 ui.write(_(b"%s not renamed\n") % rel)
2778
2779
2779
2780
2780 @command(b'debugrequires|debugrequirements', [], b'')
2781 @command(b'debugrequires|debugrequirements', [], b'')
2781 def debugrequirements(ui, repo):
2782 def debugrequirements(ui, repo):
2782 """ print the current repo requirements """
2783 """ print the current repo requirements """
2783 for r in sorted(repo.requirements):
2784 for r in sorted(repo.requirements):
2784 ui.write(b"%s\n" % r)
2785 ui.write(b"%s\n" % r)
2785
2786
2786
2787
2787 @command(
2788 @command(
2788 b'debugrevlog',
2789 b'debugrevlog',
2789 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2790 _(b'-c|-m|FILE'),
2791 _(b'-c|-m|FILE'),
2791 optionalrepo=True,
2792 optionalrepo=True,
2792 )
2793 )
2793 def debugrevlog(ui, repo, file_=None, **opts):
2794 def debugrevlog(ui, repo, file_=None, **opts):
2794 """show data and statistics about a revlog"""
2795 """show data and statistics about a revlog"""
2795 opts = pycompat.byteskwargs(opts)
2796 opts = pycompat.byteskwargs(opts)
2796 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2797
2798
2798 if opts.get(b"dump"):
2799 if opts.get(b"dump"):
2799 numrevs = len(r)
2800 numrevs = len(r)
2800 ui.write(
2801 ui.write(
2801 (
2802 (
2802 b"# rev p1rev p2rev start end deltastart base p1 p2"
2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2803 b" rawsize totalsize compression heads chainlen\n"
2804 b" rawsize totalsize compression heads chainlen\n"
2804 )
2805 )
2805 )
2806 )
2806 ts = 0
2807 ts = 0
2807 heads = set()
2808 heads = set()
2808
2809
2809 for rev in pycompat.xrange(numrevs):
2810 for rev in pycompat.xrange(numrevs):
2810 dbase = r.deltaparent(rev)
2811 dbase = r.deltaparent(rev)
2811 if dbase == -1:
2812 if dbase == -1:
2812 dbase = rev
2813 dbase = rev
2813 cbase = r.chainbase(rev)
2814 cbase = r.chainbase(rev)
2814 clen = r.chainlen(rev)
2815 clen = r.chainlen(rev)
2815 p1, p2 = r.parentrevs(rev)
2816 p1, p2 = r.parentrevs(rev)
2816 rs = r.rawsize(rev)
2817 rs = r.rawsize(rev)
2817 ts = ts + rs
2818 ts = ts + rs
2818 heads -= set(r.parentrevs(rev))
2819 heads -= set(r.parentrevs(rev))
2819 heads.add(rev)
2820 heads.add(rev)
2820 try:
2821 try:
2821 compression = ts / r.end(rev)
2822 compression = ts / r.end(rev)
2822 except ZeroDivisionError:
2823 except ZeroDivisionError:
2823 compression = 0
2824 compression = 0
2824 ui.write(
2825 ui.write(
2825 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2826 b"%11d %5d %8d\n"
2827 b"%11d %5d %8d\n"
2827 % (
2828 % (
2828 rev,
2829 rev,
2829 p1,
2830 p1,
2830 p2,
2831 p2,
2831 r.start(rev),
2832 r.start(rev),
2832 r.end(rev),
2833 r.end(rev),
2833 r.start(dbase),
2834 r.start(dbase),
2834 r.start(cbase),
2835 r.start(cbase),
2835 r.start(p1),
2836 r.start(p1),
2836 r.start(p2),
2837 r.start(p2),
2837 rs,
2838 rs,
2838 ts,
2839 ts,
2839 compression,
2840 compression,
2840 len(heads),
2841 len(heads),
2841 clen,
2842 clen,
2842 )
2843 )
2843 )
2844 )
2844 return 0
2845 return 0
2845
2846
2846 v = r.version
2847 v = r.version
2847 format = v & 0xFFFF
2848 format = v & 0xFFFF
2848 flags = []
2849 flags = []
2849 gdelta = False
2850 gdelta = False
2850 if v & revlog.FLAG_INLINE_DATA:
2851 if v & revlog.FLAG_INLINE_DATA:
2851 flags.append(b'inline')
2852 flags.append(b'inline')
2852 if v & revlog.FLAG_GENERALDELTA:
2853 if v & revlog.FLAG_GENERALDELTA:
2853 gdelta = True
2854 gdelta = True
2854 flags.append(b'generaldelta')
2855 flags.append(b'generaldelta')
2855 if not flags:
2856 if not flags:
2856 flags = [b'(none)']
2857 flags = [b'(none)']
2857
2858
2858 ### tracks merge vs single parent
2859 ### tracks merge vs single parent
2859 nummerges = 0
2860 nummerges = 0
2860
2861
2861 ### tracks ways the "delta" are build
2862 ### tracks ways the "delta" are build
2862 # nodelta
2863 # nodelta
2863 numempty = 0
2864 numempty = 0
2864 numemptytext = 0
2865 numemptytext = 0
2865 numemptydelta = 0
2866 numemptydelta = 0
2866 # full file content
2867 # full file content
2867 numfull = 0
2868 numfull = 0
2868 # intermediate snapshot against a prior snapshot
2869 # intermediate snapshot against a prior snapshot
2869 numsemi = 0
2870 numsemi = 0
2870 # snapshot count per depth
2871 # snapshot count per depth
2871 numsnapdepth = collections.defaultdict(lambda: 0)
2872 numsnapdepth = collections.defaultdict(lambda: 0)
2872 # delta against previous revision
2873 # delta against previous revision
2873 numprev = 0
2874 numprev = 0
2874 # delta against first or second parent (not prev)
2875 # delta against first or second parent (not prev)
2875 nump1 = 0
2876 nump1 = 0
2876 nump2 = 0
2877 nump2 = 0
2877 # delta against neither prev nor parents
2878 # delta against neither prev nor parents
2878 numother = 0
2879 numother = 0
2879 # delta against prev that are also first or second parent
2880 # delta against prev that are also first or second parent
2880 # (details of `numprev`)
2881 # (details of `numprev`)
2881 nump1prev = 0
2882 nump1prev = 0
2882 nump2prev = 0
2883 nump2prev = 0
2883
2884
2884 # data about delta chain of each revs
2885 # data about delta chain of each revs
2885 chainlengths = []
2886 chainlengths = []
2886 chainbases = []
2887 chainbases = []
2887 chainspans = []
2888 chainspans = []
2888
2889
2889 # data about each revision
2890 # data about each revision
2890 datasize = [None, 0, 0]
2891 datasize = [None, 0, 0]
2891 fullsize = [None, 0, 0]
2892 fullsize = [None, 0, 0]
2892 semisize = [None, 0, 0]
2893 semisize = [None, 0, 0]
2893 # snapshot count per depth
2894 # snapshot count per depth
2894 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2895 deltasize = [None, 0, 0]
2896 deltasize = [None, 0, 0]
2896 chunktypecounts = {}
2897 chunktypecounts = {}
2897 chunktypesizes = {}
2898 chunktypesizes = {}
2898
2899
2899 def addsize(size, l):
2900 def addsize(size, l):
2900 if l[0] is None or size < l[0]:
2901 if l[0] is None or size < l[0]:
2901 l[0] = size
2902 l[0] = size
2902 if size > l[1]:
2903 if size > l[1]:
2903 l[1] = size
2904 l[1] = size
2904 l[2] += size
2905 l[2] += size
2905
2906
2906 numrevs = len(r)
2907 numrevs = len(r)
2907 for rev in pycompat.xrange(numrevs):
2908 for rev in pycompat.xrange(numrevs):
2908 p1, p2 = r.parentrevs(rev)
2909 p1, p2 = r.parentrevs(rev)
2909 delta = r.deltaparent(rev)
2910 delta = r.deltaparent(rev)
2910 if format > 0:
2911 if format > 0:
2911 addsize(r.rawsize(rev), datasize)
2912 addsize(r.rawsize(rev), datasize)
2912 if p2 != nullrev:
2913 if p2 != nullrev:
2913 nummerges += 1
2914 nummerges += 1
2914 size = r.length(rev)
2915 size = r.length(rev)
2915 if delta == nullrev:
2916 if delta == nullrev:
2916 chainlengths.append(0)
2917 chainlengths.append(0)
2917 chainbases.append(r.start(rev))
2918 chainbases.append(r.start(rev))
2918 chainspans.append(size)
2919 chainspans.append(size)
2919 if size == 0:
2920 if size == 0:
2920 numempty += 1
2921 numempty += 1
2921 numemptytext += 1
2922 numemptytext += 1
2922 else:
2923 else:
2923 numfull += 1
2924 numfull += 1
2924 numsnapdepth[0] += 1
2925 numsnapdepth[0] += 1
2925 addsize(size, fullsize)
2926 addsize(size, fullsize)
2926 addsize(size, snapsizedepth[0])
2927 addsize(size, snapsizedepth[0])
2927 else:
2928 else:
2928 chainlengths.append(chainlengths[delta] + 1)
2929 chainlengths.append(chainlengths[delta] + 1)
2929 baseaddr = chainbases[delta]
2930 baseaddr = chainbases[delta]
2930 revaddr = r.start(rev)
2931 revaddr = r.start(rev)
2931 chainbases.append(baseaddr)
2932 chainbases.append(baseaddr)
2932 chainspans.append((revaddr - baseaddr) + size)
2933 chainspans.append((revaddr - baseaddr) + size)
2933 if size == 0:
2934 if size == 0:
2934 numempty += 1
2935 numempty += 1
2935 numemptydelta += 1
2936 numemptydelta += 1
2936 elif r.issnapshot(rev):
2937 elif r.issnapshot(rev):
2937 addsize(size, semisize)
2938 addsize(size, semisize)
2938 numsemi += 1
2939 numsemi += 1
2939 depth = r.snapshotdepth(rev)
2940 depth = r.snapshotdepth(rev)
2940 numsnapdepth[depth] += 1
2941 numsnapdepth[depth] += 1
2941 addsize(size, snapsizedepth[depth])
2942 addsize(size, snapsizedepth[depth])
2942 else:
2943 else:
2943 addsize(size, deltasize)
2944 addsize(size, deltasize)
2944 if delta == rev - 1:
2945 if delta == rev - 1:
2945 numprev += 1
2946 numprev += 1
2946 if delta == p1:
2947 if delta == p1:
2947 nump1prev += 1
2948 nump1prev += 1
2948 elif delta == p2:
2949 elif delta == p2:
2949 nump2prev += 1
2950 nump2prev += 1
2950 elif delta == p1:
2951 elif delta == p1:
2951 nump1 += 1
2952 nump1 += 1
2952 elif delta == p2:
2953 elif delta == p2:
2953 nump2 += 1
2954 nump2 += 1
2954 elif delta != nullrev:
2955 elif delta != nullrev:
2955 numother += 1
2956 numother += 1
2956
2957
2957 # Obtain data on the raw chunks in the revlog.
2958 # Obtain data on the raw chunks in the revlog.
2958 if util.safehasattr(r, b'_getsegmentforrevs'):
2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2959 segment = r._getsegmentforrevs(rev, rev)[1]
2960 segment = r._getsegmentforrevs(rev, rev)[1]
2960 else:
2961 else:
2961 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2962 if segment:
2963 if segment:
2963 chunktype = bytes(segment[0:1])
2964 chunktype = bytes(segment[0:1])
2964 else:
2965 else:
2965 chunktype = b'empty'
2966 chunktype = b'empty'
2966
2967
2967 if chunktype not in chunktypecounts:
2968 if chunktype not in chunktypecounts:
2968 chunktypecounts[chunktype] = 0
2969 chunktypecounts[chunktype] = 0
2969 chunktypesizes[chunktype] = 0
2970 chunktypesizes[chunktype] = 0
2970
2971
2971 chunktypecounts[chunktype] += 1
2972 chunktypecounts[chunktype] += 1
2972 chunktypesizes[chunktype] += size
2973 chunktypesizes[chunktype] += size
2973
2974
2974 # Adjust size min value for empty cases
2975 # Adjust size min value for empty cases
2975 for size in (datasize, fullsize, semisize, deltasize):
2976 for size in (datasize, fullsize, semisize, deltasize):
2976 if size[0] is None:
2977 if size[0] is None:
2977 size[0] = 0
2978 size[0] = 0
2978
2979
2979 numdeltas = numrevs - numfull - numempty - numsemi
2980 numdeltas = numrevs - numfull - numempty - numsemi
2980 numoprev = numprev - nump1prev - nump2prev
2981 numoprev = numprev - nump1prev - nump2prev
2981 totalrawsize = datasize[2]
2982 totalrawsize = datasize[2]
2982 datasize[2] /= numrevs
2983 datasize[2] /= numrevs
2983 fulltotal = fullsize[2]
2984 fulltotal = fullsize[2]
2984 if numfull == 0:
2985 if numfull == 0:
2985 fullsize[2] = 0
2986 fullsize[2] = 0
2986 else:
2987 else:
2987 fullsize[2] /= numfull
2988 fullsize[2] /= numfull
2988 semitotal = semisize[2]
2989 semitotal = semisize[2]
2989 snaptotal = {}
2990 snaptotal = {}
2990 if numsemi > 0:
2991 if numsemi > 0:
2991 semisize[2] /= numsemi
2992 semisize[2] /= numsemi
2992 for depth in snapsizedepth:
2993 for depth in snapsizedepth:
2993 snaptotal[depth] = snapsizedepth[depth][2]
2994 snaptotal[depth] = snapsizedepth[depth][2]
2994 snapsizedepth[depth][2] /= numsnapdepth[depth]
2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2995
2996
2996 deltatotal = deltasize[2]
2997 deltatotal = deltasize[2]
2997 if numdeltas > 0:
2998 if numdeltas > 0:
2998 deltasize[2] /= numdeltas
2999 deltasize[2] /= numdeltas
2999 totalsize = fulltotal + semitotal + deltatotal
3000 totalsize = fulltotal + semitotal + deltatotal
3000 avgchainlen = sum(chainlengths) / numrevs
3001 avgchainlen = sum(chainlengths) / numrevs
3001 maxchainlen = max(chainlengths)
3002 maxchainlen = max(chainlengths)
3002 maxchainspan = max(chainspans)
3003 maxchainspan = max(chainspans)
3003 compratio = 1
3004 compratio = 1
3004 if totalsize:
3005 if totalsize:
3005 compratio = totalrawsize / totalsize
3006 compratio = totalrawsize / totalsize
3006
3007
3007 basedfmtstr = b'%%%dd\n'
3008 basedfmtstr = b'%%%dd\n'
3008 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3009
3010
3010 def dfmtstr(max):
3011 def dfmtstr(max):
3011 return basedfmtstr % len(str(max))
3012 return basedfmtstr % len(str(max))
3012
3013
3013 def pcfmtstr(max, padding=0):
3014 def pcfmtstr(max, padding=0):
3014 return basepcfmtstr % (len(str(max)), b' ' * padding)
3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3015
3016
3016 def pcfmt(value, total):
3017 def pcfmt(value, total):
3017 if total:
3018 if total:
3018 return (value, 100 * float(value) / total)
3019 return (value, 100 * float(value) / total)
3019 else:
3020 else:
3020 return value, 100.0
3021 return value, 100.0
3021
3022
3022 ui.writenoi18n(b'format : %d\n' % format)
3023 ui.writenoi18n(b'format : %d\n' % format)
3023 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3024
3025
3025 ui.write(b'\n')
3026 ui.write(b'\n')
3026 fmt = pcfmtstr(totalsize)
3027 fmt = pcfmtstr(totalsize)
3027 fmt2 = dfmtstr(totalsize)
3028 fmt2 = dfmtstr(totalsize)
3028 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3029 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3030 ui.writenoi18n(
3031 ui.writenoi18n(
3031 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3032 )
3033 )
3033 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3034 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3035 ui.writenoi18n(
3036 ui.writenoi18n(
3036 b' text : '
3037 b' text : '
3037 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3038 )
3039 )
3039 ui.writenoi18n(
3040 ui.writenoi18n(
3040 b' delta : '
3041 b' delta : '
3041 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3042 )
3043 )
3043 ui.writenoi18n(
3044 ui.writenoi18n(
3044 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3045 )
3046 )
3046 for depth in sorted(numsnapdepth):
3047 for depth in sorted(numsnapdepth):
3047 ui.write(
3048 ui.write(
3048 (b' lvl-%-3d : ' % depth)
3049 (b' lvl-%-3d : ' % depth)
3049 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3050 )
3051 )
3051 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3052 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3053 ui.writenoi18n(
3054 ui.writenoi18n(
3054 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3055 )
3056 )
3056 for depth in sorted(numsnapdepth):
3057 for depth in sorted(numsnapdepth):
3057 ui.write(
3058 ui.write(
3058 (b' lvl-%-3d : ' % depth)
3059 (b' lvl-%-3d : ' % depth)
3059 + fmt % pcfmt(snaptotal[depth], totalsize)
3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3060 )
3061 )
3061 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3062
3063
3063 def fmtchunktype(chunktype):
3064 def fmtchunktype(chunktype):
3064 if chunktype == b'empty':
3065 if chunktype == b'empty':
3065 return b' %s : ' % chunktype
3066 return b' %s : ' % chunktype
3066 elif chunktype in pycompat.bytestr(string.ascii_letters):
3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3067 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3068 else:
3069 else:
3069 return b' 0x%s : ' % hex(chunktype)
3070 return b' 0x%s : ' % hex(chunktype)
3070
3071
3071 ui.write(b'\n')
3072 ui.write(b'\n')
3072 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3073 for chunktype in sorted(chunktypecounts):
3074 for chunktype in sorted(chunktypecounts):
3074 ui.write(fmtchunktype(chunktype))
3075 ui.write(fmtchunktype(chunktype))
3075 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3076 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3077 for chunktype in sorted(chunktypecounts):
3078 for chunktype in sorted(chunktypecounts):
3078 ui.write(fmtchunktype(chunktype))
3079 ui.write(fmtchunktype(chunktype))
3079 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3080
3081
3081 ui.write(b'\n')
3082 ui.write(b'\n')
3082 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3083 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3084 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3085 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3086 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3087
3088
3088 if format > 0:
3089 if format > 0:
3089 ui.write(b'\n')
3090 ui.write(b'\n')
3090 ui.writenoi18n(
3091 ui.writenoi18n(
3091 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3092 % tuple(datasize)
3093 % tuple(datasize)
3093 )
3094 )
3094 ui.writenoi18n(
3095 ui.writenoi18n(
3095 b'full revision size (min/max/avg) : %d / %d / %d\n'
3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3096 % tuple(fullsize)
3097 % tuple(fullsize)
3097 )
3098 )
3098 ui.writenoi18n(
3099 ui.writenoi18n(
3099 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3100 % tuple(semisize)
3101 % tuple(semisize)
3101 )
3102 )
3102 for depth in sorted(snapsizedepth):
3103 for depth in sorted(snapsizedepth):
3103 if depth == 0:
3104 if depth == 0:
3104 continue
3105 continue
3105 ui.writenoi18n(
3106 ui.writenoi18n(
3106 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3107 % ((depth,) + tuple(snapsizedepth[depth]))
3108 % ((depth,) + tuple(snapsizedepth[depth]))
3108 )
3109 )
3109 ui.writenoi18n(
3110 ui.writenoi18n(
3110 b'delta size (min/max/avg) : %d / %d / %d\n'
3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3111 % tuple(deltasize)
3112 % tuple(deltasize)
3112 )
3113 )
3113
3114
3114 if numdeltas > 0:
3115 if numdeltas > 0:
3115 ui.write(b'\n')
3116 ui.write(b'\n')
3116 fmt = pcfmtstr(numdeltas)
3117 fmt = pcfmtstr(numdeltas)
3117 fmt2 = pcfmtstr(numdeltas, 4)
3118 fmt2 = pcfmtstr(numdeltas, 4)
3118 ui.writenoi18n(
3119 ui.writenoi18n(
3119 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3120 )
3121 )
3121 if numprev > 0:
3122 if numprev > 0:
3122 ui.writenoi18n(
3123 ui.writenoi18n(
3123 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3124 )
3125 )
3125 ui.writenoi18n(
3126 ui.writenoi18n(
3126 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3127 )
3128 )
3128 ui.writenoi18n(
3129 ui.writenoi18n(
3129 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3130 )
3131 )
3131 if gdelta:
3132 if gdelta:
3132 ui.writenoi18n(
3133 ui.writenoi18n(
3133 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3134 )
3135 )
3135 ui.writenoi18n(
3136 ui.writenoi18n(
3136 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3137 )
3138 )
3138 ui.writenoi18n(
3139 ui.writenoi18n(
3139 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3140 )
3141 )
3141
3142
3142
3143
3143 @command(
3144 @command(
3144 b'debugrevlogindex',
3145 b'debugrevlogindex',
3145 cmdutil.debugrevlogopts
3146 cmdutil.debugrevlogopts
3146 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3147 _(b'[-f FORMAT] -c|-m|FILE'),
3148 _(b'[-f FORMAT] -c|-m|FILE'),
3148 optionalrepo=True,
3149 optionalrepo=True,
3149 )
3150 )
3150 def debugrevlogindex(ui, repo, file_=None, **opts):
3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3151 """dump the contents of a revlog index"""
3152 """dump the contents of a revlog index"""
3152 opts = pycompat.byteskwargs(opts)
3153 opts = pycompat.byteskwargs(opts)
3153 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3154 format = opts.get(b'format', 0)
3155 format = opts.get(b'format', 0)
3155 if format not in (0, 1):
3156 if format not in (0, 1):
3156 raise error.Abort(_(b"unknown format %d") % format)
3157 raise error.Abort(_(b"unknown format %d") % format)
3157
3158
3158 if ui.debugflag:
3159 if ui.debugflag:
3159 shortfn = hex
3160 shortfn = hex
3160 else:
3161 else:
3161 shortfn = short
3162 shortfn = short
3162
3163
3163 # There might not be anything in r, so have a sane default
3164 # There might not be anything in r, so have a sane default
3164 idlen = 12
3165 idlen = 12
3165 for i in r:
3166 for i in r:
3166 idlen = len(shortfn(r.node(i)))
3167 idlen = len(shortfn(r.node(i)))
3167 break
3168 break
3168
3169
3169 if format == 0:
3170 if format == 0:
3170 if ui.verbose:
3171 if ui.verbose:
3171 ui.writenoi18n(
3172 ui.writenoi18n(
3172 b" rev offset length linkrev %s %s p2\n"
3173 b" rev offset length linkrev %s %s p2\n"
3173 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3174 )
3175 )
3175 else:
3176 else:
3176 ui.writenoi18n(
3177 ui.writenoi18n(
3177 b" rev linkrev %s %s p2\n"
3178 b" rev linkrev %s %s p2\n"
3178 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3179 )
3180 )
3180 elif format == 1:
3181 elif format == 1:
3181 if ui.verbose:
3182 if ui.verbose:
3182 ui.writenoi18n(
3183 ui.writenoi18n(
3183 (
3184 (
3184 b" rev flag offset length size link p1"
3185 b" rev flag offset length size link p1"
3185 b" p2 %s\n"
3186 b" p2 %s\n"
3186 )
3187 )
3187 % b"nodeid".rjust(idlen)
3188 % b"nodeid".rjust(idlen)
3188 )
3189 )
3189 else:
3190 else:
3190 ui.writenoi18n(
3191 ui.writenoi18n(
3191 b" rev flag size link p1 p2 %s\n"
3192 b" rev flag size link p1 p2 %s\n"
3192 % b"nodeid".rjust(idlen)
3193 % b"nodeid".rjust(idlen)
3193 )
3194 )
3194
3195
3195 for i in r:
3196 for i in r:
3196 node = r.node(i)
3197 node = r.node(i)
3197 if format == 0:
3198 if format == 0:
3198 try:
3199 try:
3199 pp = r.parents(node)
3200 pp = r.parents(node)
3200 except Exception:
3201 except Exception:
3201 pp = [nullid, nullid]
3202 pp = [nullid, nullid]
3202 if ui.verbose:
3203 if ui.verbose:
3203 ui.write(
3204 ui.write(
3204 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3205 % (
3206 % (
3206 i,
3207 i,
3207 r.start(i),
3208 r.start(i),
3208 r.length(i),
3209 r.length(i),
3209 r.linkrev(i),
3210 r.linkrev(i),
3210 shortfn(node),
3211 shortfn(node),
3211 shortfn(pp[0]),
3212 shortfn(pp[0]),
3212 shortfn(pp[1]),
3213 shortfn(pp[1]),
3213 )
3214 )
3214 )
3215 )
3215 else:
3216 else:
3216 ui.write(
3217 ui.write(
3217 b"% 6d % 7d %s %s %s\n"
3218 b"% 6d % 7d %s %s %s\n"
3218 % (
3219 % (
3219 i,
3220 i,
3220 r.linkrev(i),
3221 r.linkrev(i),
3221 shortfn(node),
3222 shortfn(node),
3222 shortfn(pp[0]),
3223 shortfn(pp[0]),
3223 shortfn(pp[1]),
3224 shortfn(pp[1]),
3224 )
3225 )
3225 )
3226 )
3226 elif format == 1:
3227 elif format == 1:
3227 pr = r.parentrevs(i)
3228 pr = r.parentrevs(i)
3228 if ui.verbose:
3229 if ui.verbose:
3229 ui.write(
3230 ui.write(
3230 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3231 % (
3232 % (
3232 i,
3233 i,
3233 r.flags(i),
3234 r.flags(i),
3234 r.start(i),
3235 r.start(i),
3235 r.length(i),
3236 r.length(i),
3236 r.rawsize(i),
3237 r.rawsize(i),
3237 r.linkrev(i),
3238 r.linkrev(i),
3238 pr[0],
3239 pr[0],
3239 pr[1],
3240 pr[1],
3240 shortfn(node),
3241 shortfn(node),
3241 )
3242 )
3242 )
3243 )
3243 else:
3244 else:
3244 ui.write(
3245 ui.write(
3245 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3246 % (
3247 % (
3247 i,
3248 i,
3248 r.flags(i),
3249 r.flags(i),
3249 r.rawsize(i),
3250 r.rawsize(i),
3250 r.linkrev(i),
3251 r.linkrev(i),
3251 pr[0],
3252 pr[0],
3252 pr[1],
3253 pr[1],
3253 shortfn(node),
3254 shortfn(node),
3254 )
3255 )
3255 )
3256 )
3256
3257
3257
3258
3258 @command(
3259 @command(
3259 b'debugrevspec',
3260 b'debugrevspec',
3260 [
3261 [
3261 (
3262 (
3262 b'',
3263 b'',
3263 b'optimize',
3264 b'optimize',
3264 None,
3265 None,
3265 _(b'print parsed tree after optimizing (DEPRECATED)'),
3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3266 ),
3267 ),
3267 (
3268 (
3268 b'',
3269 b'',
3269 b'show-revs',
3270 b'show-revs',
3270 True,
3271 True,
3271 _(b'print list of result revisions (default)'),
3272 _(b'print list of result revisions (default)'),
3272 ),
3273 ),
3273 (
3274 (
3274 b's',
3275 b's',
3275 b'show-set',
3276 b'show-set',
3276 None,
3277 None,
3277 _(b'print internal representation of result set'),
3278 _(b'print internal representation of result set'),
3278 ),
3279 ),
3279 (
3280 (
3280 b'p',
3281 b'p',
3281 b'show-stage',
3282 b'show-stage',
3282 [],
3283 [],
3283 _(b'print parsed tree at the given stage'),
3284 _(b'print parsed tree at the given stage'),
3284 _(b'NAME'),
3285 _(b'NAME'),
3285 ),
3286 ),
3286 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3287 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3288 ],
3289 ],
3289 b'REVSPEC',
3290 b'REVSPEC',
3290 )
3291 )
3291 def debugrevspec(ui, repo, expr, **opts):
3292 def debugrevspec(ui, repo, expr, **opts):
3292 """parse and apply a revision specification
3293 """parse and apply a revision specification
3293
3294
3294 Use -p/--show-stage option to print the parsed tree at the given stages.
3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3295 Use -p all to print tree at every stage.
3296 Use -p all to print tree at every stage.
3296
3297
3297 Use --no-show-revs option with -s or -p to print only the set
3298 Use --no-show-revs option with -s or -p to print only the set
3298 representation or the parsed tree respectively.
3299 representation or the parsed tree respectively.
3299
3300
3300 Use --verify-optimized to compare the optimized result with the unoptimized
3301 Use --verify-optimized to compare the optimized result with the unoptimized
3301 one. Returns 1 if the optimized result differs.
3302 one. Returns 1 if the optimized result differs.
3302 """
3303 """
3303 opts = pycompat.byteskwargs(opts)
3304 opts = pycompat.byteskwargs(opts)
3304 aliases = ui.configitems(b'revsetalias')
3305 aliases = ui.configitems(b'revsetalias')
3305 stages = [
3306 stages = [
3306 (b'parsed', lambda tree: tree),
3307 (b'parsed', lambda tree: tree),
3307 (
3308 (
3308 b'expanded',
3309 b'expanded',
3309 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3310 ),
3311 ),
3311 (b'concatenated', revsetlang.foldconcat),
3312 (b'concatenated', revsetlang.foldconcat),
3312 (b'analyzed', revsetlang.analyze),
3313 (b'analyzed', revsetlang.analyze),
3313 (b'optimized', revsetlang.optimize),
3314 (b'optimized', revsetlang.optimize),
3314 ]
3315 ]
3315 if opts[b'no_optimized']:
3316 if opts[b'no_optimized']:
3316 stages = stages[:-1]
3317 stages = stages[:-1]
3317 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3318 raise error.Abort(
3319 raise error.Abort(
3319 _(b'cannot use --verify-optimized with --no-optimized')
3320 _(b'cannot use --verify-optimized with --no-optimized')
3320 )
3321 )
3321 stagenames = {n for n, f in stages}
3322 stagenames = {n for n, f in stages}
3322
3323
3323 showalways = set()
3324 showalways = set()
3324 showchanged = set()
3325 showchanged = set()
3325 if ui.verbose and not opts[b'show_stage']:
3326 if ui.verbose and not opts[b'show_stage']:
3326 # show parsed tree by --verbose (deprecated)
3327 # show parsed tree by --verbose (deprecated)
3327 showalways.add(b'parsed')
3328 showalways.add(b'parsed')
3328 showchanged.update([b'expanded', b'concatenated'])
3329 showchanged.update([b'expanded', b'concatenated'])
3329 if opts[b'optimize']:
3330 if opts[b'optimize']:
3330 showalways.add(b'optimized')
3331 showalways.add(b'optimized')
3331 if opts[b'show_stage'] and opts[b'optimize']:
3332 if opts[b'show_stage'] and opts[b'optimize']:
3332 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3333 if opts[b'show_stage'] == [b'all']:
3334 if opts[b'show_stage'] == [b'all']:
3334 showalways.update(stagenames)
3335 showalways.update(stagenames)
3335 else:
3336 else:
3336 for n in opts[b'show_stage']:
3337 for n in opts[b'show_stage']:
3337 if n not in stagenames:
3338 if n not in stagenames:
3338 raise error.Abort(_(b'invalid stage name: %s') % n)
3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3339 showalways.update(opts[b'show_stage'])
3340 showalways.update(opts[b'show_stage'])
3340
3341
3341 treebystage = {}
3342 treebystage = {}
3342 printedtree = None
3343 printedtree = None
3343 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3344 for n, f in stages:
3345 for n, f in stages:
3345 treebystage[n] = tree = f(tree)
3346 treebystage[n] = tree = f(tree)
3346 if n in showalways or (n in showchanged and tree != printedtree):
3347 if n in showalways or (n in showchanged and tree != printedtree):
3347 if opts[b'show_stage'] or n != b'parsed':
3348 if opts[b'show_stage'] or n != b'parsed':
3348 ui.write(b"* %s:\n" % n)
3349 ui.write(b"* %s:\n" % n)
3349 ui.write(revsetlang.prettyformat(tree), b"\n")
3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3350 printedtree = tree
3351 printedtree = tree
3351
3352
3352 if opts[b'verify_optimized']:
3353 if opts[b'verify_optimized']:
3353 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3354 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3355 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3356 ui.writenoi18n(
3357 ui.writenoi18n(
3357 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3358 )
3359 )
3359 ui.writenoi18n(
3360 ui.writenoi18n(
3360 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3361 )
3362 )
3362 arevs = list(arevs)
3363 arevs = list(arevs)
3363 brevs = list(brevs)
3364 brevs = list(brevs)
3364 if arevs == brevs:
3365 if arevs == brevs:
3365 return 0
3366 return 0
3366 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3367 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3368 sm = difflib.SequenceMatcher(None, arevs, brevs)
3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3369 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3370 if tag in ('delete', 'replace'):
3371 if tag in ('delete', 'replace'):
3371 for c in arevs[alo:ahi]:
3372 for c in arevs[alo:ahi]:
3372 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3373 if tag in ('insert', 'replace'):
3374 if tag in ('insert', 'replace'):
3374 for c in brevs[blo:bhi]:
3375 for c in brevs[blo:bhi]:
3375 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3376 if tag == 'equal':
3377 if tag == 'equal':
3377 for c in arevs[alo:ahi]:
3378 for c in arevs[alo:ahi]:
3378 ui.write(b' %d\n' % c)
3379 ui.write(b' %d\n' % c)
3379 return 1
3380 return 1
3380
3381
3381 func = revset.makematcher(tree)
3382 func = revset.makematcher(tree)
3382 revs = func(repo)
3383 revs = func(repo)
3383 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3384 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3385 if not opts[b'show_revs']:
3386 if not opts[b'show_revs']:
3386 return
3387 return
3387 for c in revs:
3388 for c in revs:
3388 ui.write(b"%d\n" % c)
3389 ui.write(b"%d\n" % c)
3389
3390
3390
3391
3391 @command(
3392 @command(
3392 b'debugserve',
3393 b'debugserve',
3393 [
3394 [
3394 (
3395 (
3395 b'',
3396 b'',
3396 b'sshstdio',
3397 b'sshstdio',
3397 False,
3398 False,
3398 _(b'run an SSH server bound to process handles'),
3399 _(b'run an SSH server bound to process handles'),
3399 ),
3400 ),
3400 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3401 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3402 ],
3403 ],
3403 b'',
3404 b'',
3404 )
3405 )
3405 def debugserve(ui, repo, **opts):
3406 def debugserve(ui, repo, **opts):
3406 """run a server with advanced settings
3407 """run a server with advanced settings
3407
3408
3408 This command is similar to :hg:`serve`. It exists partially as a
3409 This command is similar to :hg:`serve`. It exists partially as a
3409 workaround to the fact that ``hg serve --stdio`` must have specific
3410 workaround to the fact that ``hg serve --stdio`` must have specific
3410 arguments for security reasons.
3411 arguments for security reasons.
3411 """
3412 """
3412 opts = pycompat.byteskwargs(opts)
3413 opts = pycompat.byteskwargs(opts)
3413
3414
3414 if not opts[b'sshstdio']:
3415 if not opts[b'sshstdio']:
3415 raise error.Abort(_(b'only --sshstdio is currently supported'))
3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3416
3417
3417 logfh = None
3418 logfh = None
3418
3419
3419 if opts[b'logiofd'] and opts[b'logiofile']:
3420 if opts[b'logiofd'] and opts[b'logiofile']:
3420 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3421
3422
3422 if opts[b'logiofd']:
3423 if opts[b'logiofd']:
3423 # Ideally we would be line buffered. But line buffering in binary
3424 # Ideally we would be line buffered. But line buffering in binary
3424 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3425 # buffering could have performance impacts. But since this isn't
3426 # buffering could have performance impacts. But since this isn't
3426 # performance critical code, it should be fine.
3427 # performance critical code, it should be fine.
3427 try:
3428 try:
3428 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3429 except OSError as e:
3430 except OSError as e:
3430 if e.errno != errno.ESPIPE:
3431 if e.errno != errno.ESPIPE:
3431 raise
3432 raise
3432 # can't seek a pipe, so `ab` mode fails on py3
3433 # can't seek a pipe, so `ab` mode fails on py3
3433 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3434 elif opts[b'logiofile']:
3435 elif opts[b'logiofile']:
3435 logfh = open(opts[b'logiofile'], b'ab', 0)
3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3436
3437
3437 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3438 s.serve_forever()
3439 s.serve_forever()
3439
3440
3440
3441
3441 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3442 def debugsetparents(ui, repo, rev1, rev2=None):
3443 def debugsetparents(ui, repo, rev1, rev2=None):
3443 """manually set the parents of the current working directory
3444 """manually set the parents of the current working directory
3444
3445
3445 This is useful for writing repository conversion tools, but should
3446 This is useful for writing repository conversion tools, but should
3446 be used with care. For example, neither the working directory nor the
3447 be used with care. For example, neither the working directory nor the
3447 dirstate is updated, so file status may be incorrect after running this
3448 dirstate is updated, so file status may be incorrect after running this
3448 command.
3449 command.
3449
3450
3450 Returns 0 on success.
3451 Returns 0 on success.
3451 """
3452 """
3452
3453
3453 node1 = scmutil.revsingle(repo, rev1).node()
3454 node1 = scmutil.revsingle(repo, rev1).node()
3454 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3455 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3455
3456
3456 with repo.wlock():
3457 with repo.wlock():
3457 repo.setparents(node1, node2)
3458 repo.setparents(node1, node2)
3458
3459
3459
3460
3460 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3461 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3461 def debugsidedata(ui, repo, file_, rev=None, **opts):
3462 def debugsidedata(ui, repo, file_, rev=None, **opts):
3462 """dump the side data for a cl/manifest/file revision
3463 """dump the side data for a cl/manifest/file revision
3463
3464
3464 Use --verbose to dump the sidedata content."""
3465 Use --verbose to dump the sidedata content."""
3465 opts = pycompat.byteskwargs(opts)
3466 opts = pycompat.byteskwargs(opts)
3466 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3467 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3467 if rev is not None:
3468 if rev is not None:
3468 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3469 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3469 file_, rev = None, file_
3470 file_, rev = None, file_
3470 elif rev is None:
3471 elif rev is None:
3471 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3472 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3472 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3473 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3473 r = getattr(r, '_revlog', r)
3474 r = getattr(r, '_revlog', r)
3474 try:
3475 try:
3475 sidedata = r.sidedata(r.lookup(rev))
3476 sidedata = r.sidedata(r.lookup(rev))
3476 except KeyError:
3477 except KeyError:
3477 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3478 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3478 if sidedata:
3479 if sidedata:
3479 sidedata = list(sidedata.items())
3480 sidedata = list(sidedata.items())
3480 sidedata.sort()
3481 sidedata.sort()
3481 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3482 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3482 for key, value in sidedata:
3483 for key, value in sidedata:
3483 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3484 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3484 if ui.verbose:
3485 if ui.verbose:
3485 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3486 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3486
3487
3487
3488
3488 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3489 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3489 def debugssl(ui, repo, source=None, **opts):
3490 def debugssl(ui, repo, source=None, **opts):
3490 """test a secure connection to a server
3491 """test a secure connection to a server
3491
3492
3492 This builds the certificate chain for the server on Windows, installing the
3493 This builds the certificate chain for the server on Windows, installing the
3493 missing intermediates and trusted root via Windows Update if necessary. It
3494 missing intermediates and trusted root via Windows Update if necessary. It
3494 does nothing on other platforms.
3495 does nothing on other platforms.
3495
3496
3496 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3497 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3497 that server is used. See :hg:`help urls` for more information.
3498 that server is used. See :hg:`help urls` for more information.
3498
3499
3499 If the update succeeds, retry the original operation. Otherwise, the cause
3500 If the update succeeds, retry the original operation. Otherwise, the cause
3500 of the SSL error is likely another issue.
3501 of the SSL error is likely another issue.
3501 """
3502 """
3502 if not pycompat.iswindows:
3503 if not pycompat.iswindows:
3503 raise error.Abort(
3504 raise error.Abort(
3504 _(b'certificate chain building is only possible on Windows')
3505 _(b'certificate chain building is only possible on Windows')
3505 )
3506 )
3506
3507
3507 if not source:
3508 if not source:
3508 if not repo:
3509 if not repo:
3509 raise error.Abort(
3510 raise error.Abort(
3510 _(
3511 _(
3511 b"there is no Mercurial repository here, and no "
3512 b"there is no Mercurial repository here, and no "
3512 b"server specified"
3513 b"server specified"
3513 )
3514 )
3514 )
3515 )
3515 source = b"default"
3516 source = b"default"
3516
3517
3517 source, branches = hg.parseurl(ui.expandpath(source))
3518 source, branches = hg.parseurl(ui.expandpath(source))
3518 url = util.url(source)
3519 url = util.url(source)
3519
3520
3520 defaultport = {b'https': 443, b'ssh': 22}
3521 defaultport = {b'https': 443, b'ssh': 22}
3521 if url.scheme in defaultport:
3522 if url.scheme in defaultport:
3522 try:
3523 try:
3523 addr = (url.host, int(url.port or defaultport[url.scheme]))
3524 addr = (url.host, int(url.port or defaultport[url.scheme]))
3524 except ValueError:
3525 except ValueError:
3525 raise error.Abort(_(b"malformed port number in URL"))
3526 raise error.Abort(_(b"malformed port number in URL"))
3526 else:
3527 else:
3527 raise error.Abort(_(b"only https and ssh connections are supported"))
3528 raise error.Abort(_(b"only https and ssh connections are supported"))
3528
3529
3529 from . import win32
3530 from . import win32
3530
3531
3531 s = ssl.wrap_socket(
3532 s = ssl.wrap_socket(
3532 socket.socket(),
3533 socket.socket(),
3533 ssl_version=ssl.PROTOCOL_TLS,
3534 ssl_version=ssl.PROTOCOL_TLS,
3534 cert_reqs=ssl.CERT_NONE,
3535 cert_reqs=ssl.CERT_NONE,
3535 ca_certs=None,
3536 ca_certs=None,
3536 )
3537 )
3537
3538
3538 try:
3539 try:
3539 s.connect(addr)
3540 s.connect(addr)
3540 cert = s.getpeercert(True)
3541 cert = s.getpeercert(True)
3541
3542
3542 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3543 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3543
3544
3544 complete = win32.checkcertificatechain(cert, build=False)
3545 complete = win32.checkcertificatechain(cert, build=False)
3545
3546
3546 if not complete:
3547 if not complete:
3547 ui.status(_(b'certificate chain is incomplete, updating... '))
3548 ui.status(_(b'certificate chain is incomplete, updating... '))
3548
3549
3549 if not win32.checkcertificatechain(cert):
3550 if not win32.checkcertificatechain(cert):
3550 ui.status(_(b'failed.\n'))
3551 ui.status(_(b'failed.\n'))
3551 else:
3552 else:
3552 ui.status(_(b'done.\n'))
3553 ui.status(_(b'done.\n'))
3553 else:
3554 else:
3554 ui.status(_(b'full certificate chain is available\n'))
3555 ui.status(_(b'full certificate chain is available\n'))
3555 finally:
3556 finally:
3556 s.close()
3557 s.close()
3557
3558
3558
3559
3559 @command(
3560 @command(
3560 b"debugbackupbundle",
3561 b"debugbackupbundle",
3561 [
3562 [
3562 (
3563 (
3563 b"",
3564 b"",
3564 b"recover",
3565 b"recover",
3565 b"",
3566 b"",
3566 b"brings the specified changeset back into the repository",
3567 b"brings the specified changeset back into the repository",
3567 )
3568 )
3568 ]
3569 ]
3569 + cmdutil.logopts,
3570 + cmdutil.logopts,
3570 _(b"hg debugbackupbundle [--recover HASH]"),
3571 _(b"hg debugbackupbundle [--recover HASH]"),
3571 )
3572 )
3572 def debugbackupbundle(ui, repo, *pats, **opts):
3573 def debugbackupbundle(ui, repo, *pats, **opts):
3573 """lists the changesets available in backup bundles
3574 """lists the changesets available in backup bundles
3574
3575
3575 Without any arguments, this command prints a list of the changesets in each
3576 Without any arguments, this command prints a list of the changesets in each
3576 backup bundle.
3577 backup bundle.
3577
3578
3578 --recover takes a changeset hash and unbundles the first bundle that
3579 --recover takes a changeset hash and unbundles the first bundle that
3579 contains that hash, which puts that changeset back in your repository.
3580 contains that hash, which puts that changeset back in your repository.
3580
3581
3581 --verbose will print the entire commit message and the bundle path for that
3582 --verbose will print the entire commit message and the bundle path for that
3582 backup.
3583 backup.
3583 """
3584 """
3584 backups = list(
3585 backups = list(
3585 filter(
3586 filter(
3586 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3587 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3587 )
3588 )
3588 )
3589 )
3589 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3590 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3590
3591
3591 opts = pycompat.byteskwargs(opts)
3592 opts = pycompat.byteskwargs(opts)
3592 opts[b"bundle"] = b""
3593 opts[b"bundle"] = b""
3593 opts[b"force"] = None
3594 opts[b"force"] = None
3594 limit = logcmdutil.getlimit(opts)
3595 limit = logcmdutil.getlimit(opts)
3595
3596
3596 def display(other, chlist, displayer):
3597 def display(other, chlist, displayer):
3597 if opts.get(b"newest_first"):
3598 if opts.get(b"newest_first"):
3598 chlist.reverse()
3599 chlist.reverse()
3599 count = 0
3600 count = 0
3600 for n in chlist:
3601 for n in chlist:
3601 if limit is not None and count >= limit:
3602 if limit is not None and count >= limit:
3602 break
3603 break
3603 parents = [True for p in other.changelog.parents(n) if p != nullid]
3604 parents = [True for p in other.changelog.parents(n) if p != nullid]
3604 if opts.get(b"no_merges") and len(parents) == 2:
3605 if opts.get(b"no_merges") and len(parents) == 2:
3605 continue
3606 continue
3606 count += 1
3607 count += 1
3607 displayer.show(other[n])
3608 displayer.show(other[n])
3608
3609
3609 recovernode = opts.get(b"recover")
3610 recovernode = opts.get(b"recover")
3610 if recovernode:
3611 if recovernode:
3611 if scmutil.isrevsymbol(repo, recovernode):
3612 if scmutil.isrevsymbol(repo, recovernode):
3612 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3613 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3613 return
3614 return
3614 elif backups:
3615 elif backups:
3615 msg = _(
3616 msg = _(
3616 b"Recover changesets using: hg debugbackupbundle --recover "
3617 b"Recover changesets using: hg debugbackupbundle --recover "
3617 b"<changeset hash>\n\nAvailable backup changesets:"
3618 b"<changeset hash>\n\nAvailable backup changesets:"
3618 )
3619 )
3619 ui.status(msg, label=b"status.removed")
3620 ui.status(msg, label=b"status.removed")
3620 else:
3621 else:
3621 ui.status(_(b"no backup changesets found\n"))
3622 ui.status(_(b"no backup changesets found\n"))
3622 return
3623 return
3623
3624
3624 for backup in backups:
3625 for backup in backups:
3625 # Much of this is copied from the hg incoming logic
3626 # Much of this is copied from the hg incoming logic
3626 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3627 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3627 source, branches = hg.parseurl(source, opts.get(b"branch"))
3628 source, branches = hg.parseurl(source, opts.get(b"branch"))
3628 try:
3629 try:
3629 other = hg.peer(repo, opts, source)
3630 other = hg.peer(repo, opts, source)
3630 except error.LookupError as ex:
3631 except error.LookupError as ex:
3631 msg = _(b"\nwarning: unable to open bundle %s") % source
3632 msg = _(b"\nwarning: unable to open bundle %s") % source
3632 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3633 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3633 ui.warn(msg, hint=hint)
3634 ui.warn(msg, hint=hint)
3634 continue
3635 continue
3635 revs, checkout = hg.addbranchrevs(
3636 revs, checkout = hg.addbranchrevs(
3636 repo, other, branches, opts.get(b"rev")
3637 repo, other, branches, opts.get(b"rev")
3637 )
3638 )
3638
3639
3639 if revs:
3640 if revs:
3640 revs = [other.lookup(rev) for rev in revs]
3641 revs = [other.lookup(rev) for rev in revs]
3641
3642
3642 quiet = ui.quiet
3643 quiet = ui.quiet
3643 try:
3644 try:
3644 ui.quiet = True
3645 ui.quiet = True
3645 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3646 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3646 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3647 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3647 )
3648 )
3648 except error.LookupError:
3649 except error.LookupError:
3649 continue
3650 continue
3650 finally:
3651 finally:
3651 ui.quiet = quiet
3652 ui.quiet = quiet
3652
3653
3653 try:
3654 try:
3654 if not chlist:
3655 if not chlist:
3655 continue
3656 continue
3656 if recovernode:
3657 if recovernode:
3657 with repo.lock(), repo.transaction(b"unbundle") as tr:
3658 with repo.lock(), repo.transaction(b"unbundle") as tr:
3658 if scmutil.isrevsymbol(other, recovernode):
3659 if scmutil.isrevsymbol(other, recovernode):
3659 ui.status(_(b"Unbundling %s\n") % (recovernode))
3660 ui.status(_(b"Unbundling %s\n") % (recovernode))
3660 f = hg.openpath(ui, source)
3661 f = hg.openpath(ui, source)
3661 gen = exchange.readbundle(ui, f, source)
3662 gen = exchange.readbundle(ui, f, source)
3662 if isinstance(gen, bundle2.unbundle20):
3663 if isinstance(gen, bundle2.unbundle20):
3663 bundle2.applybundle(
3664 bundle2.applybundle(
3664 repo,
3665 repo,
3665 gen,
3666 gen,
3666 tr,
3667 tr,
3667 source=b"unbundle",
3668 source=b"unbundle",
3668 url=b"bundle:" + source,
3669 url=b"bundle:" + source,
3669 )
3670 )
3670 else:
3671 else:
3671 gen.apply(repo, b"unbundle", b"bundle:" + source)
3672 gen.apply(repo, b"unbundle", b"bundle:" + source)
3672 break
3673 break
3673 else:
3674 else:
3674 backupdate = encoding.strtolocal(
3675 backupdate = encoding.strtolocal(
3675 time.strftime(
3676 time.strftime(
3676 "%a %H:%M, %Y-%m-%d",
3677 "%a %H:%M, %Y-%m-%d",
3677 time.localtime(os.path.getmtime(source)),
3678 time.localtime(os.path.getmtime(source)),
3678 )
3679 )
3679 )
3680 )
3680 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3681 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3681 if ui.verbose:
3682 if ui.verbose:
3682 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3683 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3683 else:
3684 else:
3684 opts[
3685 opts[
3685 b"template"
3686 b"template"
3686 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3687 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3687 displayer = logcmdutil.changesetdisplayer(
3688 displayer = logcmdutil.changesetdisplayer(
3688 ui, other, opts, False
3689 ui, other, opts, False
3689 )
3690 )
3690 display(other, chlist, displayer)
3691 display(other, chlist, displayer)
3691 displayer.close()
3692 displayer.close()
3692 finally:
3693 finally:
3693 cleanupfn()
3694 cleanupfn()
3694
3695
3695
3696
3696 @command(
3697 @command(
3697 b'debugsub',
3698 b'debugsub',
3698 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3699 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3699 _(b'[-r REV] [REV]'),
3700 _(b'[-r REV] [REV]'),
3700 )
3701 )
3701 def debugsub(ui, repo, rev=None):
3702 def debugsub(ui, repo, rev=None):
3702 ctx = scmutil.revsingle(repo, rev, None)
3703 ctx = scmutil.revsingle(repo, rev, None)
3703 for k, v in sorted(ctx.substate.items()):
3704 for k, v in sorted(ctx.substate.items()):
3704 ui.writenoi18n(b'path %s\n' % k)
3705 ui.writenoi18n(b'path %s\n' % k)
3705 ui.writenoi18n(b' source %s\n' % v[0])
3706 ui.writenoi18n(b' source %s\n' % v[0])
3706 ui.writenoi18n(b' revision %s\n' % v[1])
3707 ui.writenoi18n(b' revision %s\n' % v[1])
3707
3708
3708
3709
3709 @command(
3710 @command(
3710 b'debugsuccessorssets',
3711 b'debugsuccessorssets',
3711 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3712 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3712 _(b'[REV]'),
3713 _(b'[REV]'),
3713 )
3714 )
3714 def debugsuccessorssets(ui, repo, *revs, **opts):
3715 def debugsuccessorssets(ui, repo, *revs, **opts):
3715 """show set of successors for revision
3716 """show set of successors for revision
3716
3717
3717 A successors set of changeset A is a consistent group of revisions that
3718 A successors set of changeset A is a consistent group of revisions that
3718 succeed A. It contains non-obsolete changesets only unless closests
3719 succeed A. It contains non-obsolete changesets only unless closests
3719 successors set is set.
3720 successors set is set.
3720
3721
3721 In most cases a changeset A has a single successors set containing a single
3722 In most cases a changeset A has a single successors set containing a single
3722 successor (changeset A replaced by A').
3723 successor (changeset A replaced by A').
3723
3724
3724 A changeset that is made obsolete with no successors are called "pruned".
3725 A changeset that is made obsolete with no successors are called "pruned".
3725 Such changesets have no successors sets at all.
3726 Such changesets have no successors sets at all.
3726
3727
3727 A changeset that has been "split" will have a successors set containing
3728 A changeset that has been "split" will have a successors set containing
3728 more than one successor.
3729 more than one successor.
3729
3730
3730 A changeset that has been rewritten in multiple different ways is called
3731 A changeset that has been rewritten in multiple different ways is called
3731 "divergent". Such changesets have multiple successor sets (each of which
3732 "divergent". Such changesets have multiple successor sets (each of which
3732 may also be split, i.e. have multiple successors).
3733 may also be split, i.e. have multiple successors).
3733
3734
3734 Results are displayed as follows::
3735 Results are displayed as follows::
3735
3736
3736 <rev1>
3737 <rev1>
3737 <successors-1A>
3738 <successors-1A>
3738 <rev2>
3739 <rev2>
3739 <successors-2A>
3740 <successors-2A>
3740 <successors-2B1> <successors-2B2> <successors-2B3>
3741 <successors-2B1> <successors-2B2> <successors-2B3>
3741
3742
3742 Here rev2 has two possible (i.e. divergent) successors sets. The first
3743 Here rev2 has two possible (i.e. divergent) successors sets. The first
3743 holds one element, whereas the second holds three (i.e. the changeset has
3744 holds one element, whereas the second holds three (i.e. the changeset has
3744 been split).
3745 been split).
3745 """
3746 """
3746 # passed to successorssets caching computation from one call to another
3747 # passed to successorssets caching computation from one call to another
3747 cache = {}
3748 cache = {}
3748 ctx2str = bytes
3749 ctx2str = bytes
3749 node2str = short
3750 node2str = short
3750 for rev in scmutil.revrange(repo, revs):
3751 for rev in scmutil.revrange(repo, revs):
3751 ctx = repo[rev]
3752 ctx = repo[rev]
3752 ui.write(b'%s\n' % ctx2str(ctx))
3753 ui.write(b'%s\n' % ctx2str(ctx))
3753 for succsset in obsutil.successorssets(
3754 for succsset in obsutil.successorssets(
3754 repo, ctx.node(), closest=opts['closest'], cache=cache
3755 repo, ctx.node(), closest=opts['closest'], cache=cache
3755 ):
3756 ):
3756 if succsset:
3757 if succsset:
3757 ui.write(b' ')
3758 ui.write(b' ')
3758 ui.write(node2str(succsset[0]))
3759 ui.write(node2str(succsset[0]))
3759 for node in succsset[1:]:
3760 for node in succsset[1:]:
3760 ui.write(b' ')
3761 ui.write(b' ')
3761 ui.write(node2str(node))
3762 ui.write(node2str(node))
3762 ui.write(b'\n')
3763 ui.write(b'\n')
3763
3764
3764
3765
3765 @command(b'debugtagscache', [])
3766 @command(b'debugtagscache', [])
3766 def debugtagscache(ui, repo):
3767 def debugtagscache(ui, repo):
3767 """display the contents of .hg/cache/hgtagsfnodes1"""
3768 """display the contents of .hg/cache/hgtagsfnodes1"""
3768 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3769 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3769 for r in repo:
3770 for r in repo:
3770 node = repo[r].node()
3771 node = repo[r].node()
3771 tagsnode = cache.getfnode(node, computemissing=False)
3772 tagsnode = cache.getfnode(node, computemissing=False)
3772 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3773 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3773 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3774 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3774
3775
3775
3776
3776 @command(
3777 @command(
3777 b'debugtemplate',
3778 b'debugtemplate',
3778 [
3779 [
3779 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3780 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3780 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3781 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3781 ],
3782 ],
3782 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3783 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3783 optionalrepo=True,
3784 optionalrepo=True,
3784 )
3785 )
3785 def debugtemplate(ui, repo, tmpl, **opts):
3786 def debugtemplate(ui, repo, tmpl, **opts):
3786 """parse and apply a template
3787 """parse and apply a template
3787
3788
3788 If -r/--rev is given, the template is processed as a log template and
3789 If -r/--rev is given, the template is processed as a log template and
3789 applied to the given changesets. Otherwise, it is processed as a generic
3790 applied to the given changesets. Otherwise, it is processed as a generic
3790 template.
3791 template.
3791
3792
3792 Use --verbose to print the parsed tree.
3793 Use --verbose to print the parsed tree.
3793 """
3794 """
3794 revs = None
3795 revs = None
3795 if opts['rev']:
3796 if opts['rev']:
3796 if repo is None:
3797 if repo is None:
3797 raise error.RepoError(
3798 raise error.RepoError(
3798 _(b'there is no Mercurial repository here (.hg not found)')
3799 _(b'there is no Mercurial repository here (.hg not found)')
3799 )
3800 )
3800 revs = scmutil.revrange(repo, opts['rev'])
3801 revs = scmutil.revrange(repo, opts['rev'])
3801
3802
3802 props = {}
3803 props = {}
3803 for d in opts['define']:
3804 for d in opts['define']:
3804 try:
3805 try:
3805 k, v = (e.strip() for e in d.split(b'=', 1))
3806 k, v = (e.strip() for e in d.split(b'=', 1))
3806 if not k or k == b'ui':
3807 if not k or k == b'ui':
3807 raise ValueError
3808 raise ValueError
3808 props[k] = v
3809 props[k] = v
3809 except ValueError:
3810 except ValueError:
3810 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3811 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3811
3812
3812 if ui.verbose:
3813 if ui.verbose:
3813 aliases = ui.configitems(b'templatealias')
3814 aliases = ui.configitems(b'templatealias')
3814 tree = templater.parse(tmpl)
3815 tree = templater.parse(tmpl)
3815 ui.note(templater.prettyformat(tree), b'\n')
3816 ui.note(templater.prettyformat(tree), b'\n')
3816 newtree = templater.expandaliases(tree, aliases)
3817 newtree = templater.expandaliases(tree, aliases)
3817 if newtree != tree:
3818 if newtree != tree:
3818 ui.notenoi18n(
3819 ui.notenoi18n(
3819 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3820 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3820 )
3821 )
3821
3822
3822 if revs is None:
3823 if revs is None:
3823 tres = formatter.templateresources(ui, repo)
3824 tres = formatter.templateresources(ui, repo)
3824 t = formatter.maketemplater(ui, tmpl, resources=tres)
3825 t = formatter.maketemplater(ui, tmpl, resources=tres)
3825 if ui.verbose:
3826 if ui.verbose:
3826 kwds, funcs = t.symbolsuseddefault()
3827 kwds, funcs = t.symbolsuseddefault()
3827 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3828 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3828 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3829 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3829 ui.write(t.renderdefault(props))
3830 ui.write(t.renderdefault(props))
3830 else:
3831 else:
3831 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3832 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3832 if ui.verbose:
3833 if ui.verbose:
3833 kwds, funcs = displayer.t.symbolsuseddefault()
3834 kwds, funcs = displayer.t.symbolsuseddefault()
3834 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3835 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3835 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3836 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3836 for r in revs:
3837 for r in revs:
3837 displayer.show(repo[r], **pycompat.strkwargs(props))
3838 displayer.show(repo[r], **pycompat.strkwargs(props))
3838 displayer.close()
3839 displayer.close()
3839
3840
3840
3841
3841 @command(
3842 @command(
3842 b'debuguigetpass',
3843 b'debuguigetpass',
3843 [
3844 [
3844 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3845 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3845 ],
3846 ],
3846 _(b'[-p TEXT]'),
3847 _(b'[-p TEXT]'),
3847 norepo=True,
3848 norepo=True,
3848 )
3849 )
3849 def debuguigetpass(ui, prompt=b''):
3850 def debuguigetpass(ui, prompt=b''):
3850 """show prompt to type password"""
3851 """show prompt to type password"""
3851 r = ui.getpass(prompt)
3852 r = ui.getpass(prompt)
3852 if r is not None:
3853 if r is not None:
3853 r = encoding.strtolocal(r)
3854 r = encoding.strtolocal(r)
3854 else:
3855 else:
3855 r = b"<default response>"
3856 r = b"<default response>"
3856 ui.writenoi18n(b'response: %s\n' % r)
3857 ui.writenoi18n(b'response: %s\n' % r)
3857
3858
3858
3859
3859 @command(
3860 @command(
3860 b'debuguiprompt',
3861 b'debuguiprompt',
3861 [
3862 [
3862 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3863 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3863 ],
3864 ],
3864 _(b'[-p TEXT]'),
3865 _(b'[-p TEXT]'),
3865 norepo=True,
3866 norepo=True,
3866 )
3867 )
3867 def debuguiprompt(ui, prompt=b''):
3868 def debuguiprompt(ui, prompt=b''):
3868 """show plain prompt"""
3869 """show plain prompt"""
3869 r = ui.prompt(prompt)
3870 r = ui.prompt(prompt)
3870 ui.writenoi18n(b'response: %s\n' % r)
3871 ui.writenoi18n(b'response: %s\n' % r)
3871
3872
3872
3873
3873 @command(b'debugupdatecaches', [])
3874 @command(b'debugupdatecaches', [])
3874 def debugupdatecaches(ui, repo, *pats, **opts):
3875 def debugupdatecaches(ui, repo, *pats, **opts):
3875 """warm all known caches in the repository"""
3876 """warm all known caches in the repository"""
3876 with repo.wlock(), repo.lock():
3877 with repo.wlock(), repo.lock():
3877 repo.updatecaches(full=True)
3878 repo.updatecaches(full=True)
3878
3879
3879
3880
3880 @command(
3881 @command(
3881 b'debugupgraderepo',
3882 b'debugupgraderepo',
3882 [
3883 [
3883 (
3884 (
3884 b'o',
3885 b'o',
3885 b'optimize',
3886 b'optimize',
3886 [],
3887 [],
3887 _(b'extra optimization to perform'),
3888 _(b'extra optimization to perform'),
3888 _(b'NAME'),
3889 _(b'NAME'),
3889 ),
3890 ),
3890 (b'', b'run', False, _(b'performs an upgrade')),
3891 (b'', b'run', False, _(b'performs an upgrade')),
3891 (b'', b'backup', True, _(b'keep the old repository content around')),
3892 (b'', b'backup', True, _(b'keep the old repository content around')),
3892 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3893 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3893 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3894 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3894 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3895 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3895 ],
3896 ],
3896 )
3897 )
3897 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3898 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3898 """upgrade a repository to use different features
3899 """upgrade a repository to use different features
3899
3900
3900 If no arguments are specified, the repository is evaluated for upgrade
3901 If no arguments are specified, the repository is evaluated for upgrade
3901 and a list of problems and potential optimizations is printed.
3902 and a list of problems and potential optimizations is printed.
3902
3903
3903 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3904 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3904 can be influenced via additional arguments. More details will be provided
3905 can be influenced via additional arguments. More details will be provided
3905 by the command output when run without ``--run``.
3906 by the command output when run without ``--run``.
3906
3907
3907 During the upgrade, the repository will be locked and no writes will be
3908 During the upgrade, the repository will be locked and no writes will be
3908 allowed.
3909 allowed.
3909
3910
3910 At the end of the upgrade, the repository may not be readable while new
3911 At the end of the upgrade, the repository may not be readable while new
3911 repository data is swapped in. This window will be as long as it takes to
3912 repository data is swapped in. This window will be as long as it takes to
3912 rename some directories inside the ``.hg`` directory. On most machines, this
3913 rename some directories inside the ``.hg`` directory. On most machines, this
3913 should complete almost instantaneously and the chances of a consumer being
3914 should complete almost instantaneously and the chances of a consumer being
3914 unable to access the repository should be low.
3915 unable to access the repository should be low.
3915
3916
3916 By default, all revlog will be upgraded. You can restrict this using flag
3917 By default, all revlog will be upgraded. You can restrict this using flag
3917 such as `--manifest`:
3918 such as `--manifest`:
3918
3919
3919 * `--manifest`: only optimize the manifest
3920 * `--manifest`: only optimize the manifest
3920 * `--no-manifest`: optimize all revlog but the manifest
3921 * `--no-manifest`: optimize all revlog but the manifest
3921 * `--changelog`: optimize the changelog only
3922 * `--changelog`: optimize the changelog only
3922 * `--no-changelog --no-manifest`: optimize filelogs only
3923 * `--no-changelog --no-manifest`: optimize filelogs only
3923 * `--filelogs`: optimize the filelogs only
3924 * `--filelogs`: optimize the filelogs only
3924 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3925 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3925 """
3926 """
3926 return upgrade.upgraderepo(
3927 return upgrade.upgraderepo(
3927 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3928 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3928 )
3929 )
3929
3930
3930
3931
3931 @command(
3932 @command(
3932 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3933 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3933 )
3934 )
3934 def debugwalk(ui, repo, *pats, **opts):
3935 def debugwalk(ui, repo, *pats, **opts):
3935 """show how files match on given patterns"""
3936 """show how files match on given patterns"""
3936 opts = pycompat.byteskwargs(opts)
3937 opts = pycompat.byteskwargs(opts)
3937 m = scmutil.match(repo[None], pats, opts)
3938 m = scmutil.match(repo[None], pats, opts)
3938 if ui.verbose:
3939 if ui.verbose:
3939 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3940 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3940 items = list(repo[None].walk(m))
3941 items = list(repo[None].walk(m))
3941 if not items:
3942 if not items:
3942 return
3943 return
3943 f = lambda fn: fn
3944 f = lambda fn: fn
3944 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3945 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3945 f = lambda fn: util.normpath(fn)
3946 f = lambda fn: util.normpath(fn)
3946 fmt = b'f %%-%ds %%-%ds %%s' % (
3947 fmt = b'f %%-%ds %%-%ds %%s' % (
3947 max([len(abs) for abs in items]),
3948 max([len(abs) for abs in items]),
3948 max([len(repo.pathto(abs)) for abs in items]),
3949 max([len(repo.pathto(abs)) for abs in items]),
3949 )
3950 )
3950 for abs in items:
3951 for abs in items:
3951 line = fmt % (
3952 line = fmt % (
3952 abs,
3953 abs,
3953 f(repo.pathto(abs)),
3954 f(repo.pathto(abs)),
3954 m.exact(abs) and b'exact' or b'',
3955 m.exact(abs) and b'exact' or b'',
3955 )
3956 )
3956 ui.write(b"%s\n" % line.rstrip())
3957 ui.write(b"%s\n" % line.rstrip())
3957
3958
3958
3959
3959 @command(b'debugwhyunstable', [], _(b'REV'))
3960 @command(b'debugwhyunstable', [], _(b'REV'))
3960 def debugwhyunstable(ui, repo, rev):
3961 def debugwhyunstable(ui, repo, rev):
3961 """explain instabilities of a changeset"""
3962 """explain instabilities of a changeset"""
3962 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3963 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3963 dnodes = b''
3964 dnodes = b''
3964 if entry.get(b'divergentnodes'):
3965 if entry.get(b'divergentnodes'):
3965 dnodes = (
3966 dnodes = (
3966 b' '.join(
3967 b' '.join(
3967 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3968 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3968 for ctx in entry[b'divergentnodes']
3969 for ctx in entry[b'divergentnodes']
3969 )
3970 )
3970 + b' '
3971 + b' '
3971 )
3972 )
3972 ui.write(
3973 ui.write(
3973 b'%s: %s%s %s\n'
3974 b'%s: %s%s %s\n'
3974 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3975 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3975 )
3976 )
3976
3977
3977
3978
3978 @command(
3979 @command(
3979 b'debugwireargs',
3980 b'debugwireargs',
3980 [
3981 [
3981 (b'', b'three', b'', b'three'),
3982 (b'', b'three', b'', b'three'),
3982 (b'', b'four', b'', b'four'),
3983 (b'', b'four', b'', b'four'),
3983 (b'', b'five', b'', b'five'),
3984 (b'', b'five', b'', b'five'),
3984 ]
3985 ]
3985 + cmdutil.remoteopts,
3986 + cmdutil.remoteopts,
3986 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3987 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3987 norepo=True,
3988 norepo=True,
3988 )
3989 )
3989 def debugwireargs(ui, repopath, *vals, **opts):
3990 def debugwireargs(ui, repopath, *vals, **opts):
3990 opts = pycompat.byteskwargs(opts)
3991 opts = pycompat.byteskwargs(opts)
3991 repo = hg.peer(ui, opts, repopath)
3992 repo = hg.peer(ui, opts, repopath)
3992 for opt in cmdutil.remoteopts:
3993 for opt in cmdutil.remoteopts:
3993 del opts[opt[1]]
3994 del opts[opt[1]]
3994 args = {}
3995 args = {}
3995 for k, v in pycompat.iteritems(opts):
3996 for k, v in pycompat.iteritems(opts):
3996 if v:
3997 if v:
3997 args[k] = v
3998 args[k] = v
3998 args = pycompat.strkwargs(args)
3999 args = pycompat.strkwargs(args)
3999 # run twice to check that we don't mess up the stream for the next command
4000 # run twice to check that we don't mess up the stream for the next command
4000 res1 = repo.debugwireargs(*vals, **args)
4001 res1 = repo.debugwireargs(*vals, **args)
4001 res2 = repo.debugwireargs(*vals, **args)
4002 res2 = repo.debugwireargs(*vals, **args)
4002 ui.write(b"%s\n" % res1)
4003 ui.write(b"%s\n" % res1)
4003 if res1 != res2:
4004 if res1 != res2:
4004 ui.warn(b"%s\n" % res2)
4005 ui.warn(b"%s\n" % res2)
4005
4006
4006
4007
4007 def _parsewirelangblocks(fh):
4008 def _parsewirelangblocks(fh):
4008 activeaction = None
4009 activeaction = None
4009 blocklines = []
4010 blocklines = []
4010 lastindent = 0
4011 lastindent = 0
4011
4012
4012 for line in fh:
4013 for line in fh:
4013 line = line.rstrip()
4014 line = line.rstrip()
4014 if not line:
4015 if not line:
4015 continue
4016 continue
4016
4017
4017 if line.startswith(b'#'):
4018 if line.startswith(b'#'):
4018 continue
4019 continue
4019
4020
4020 if not line.startswith(b' '):
4021 if not line.startswith(b' '):
4021 # New block. Flush previous one.
4022 # New block. Flush previous one.
4022 if activeaction:
4023 if activeaction:
4023 yield activeaction, blocklines
4024 yield activeaction, blocklines
4024
4025
4025 activeaction = line
4026 activeaction = line
4026 blocklines = []
4027 blocklines = []
4027 lastindent = 0
4028 lastindent = 0
4028 continue
4029 continue
4029
4030
4030 # Else we start with an indent.
4031 # Else we start with an indent.
4031
4032
4032 if not activeaction:
4033 if not activeaction:
4033 raise error.Abort(_(b'indented line outside of block'))
4034 raise error.Abort(_(b'indented line outside of block'))
4034
4035
4035 indent = len(line) - len(line.lstrip())
4036 indent = len(line) - len(line.lstrip())
4036
4037
4037 # If this line is indented more than the last line, concatenate it.
4038 # If this line is indented more than the last line, concatenate it.
4038 if indent > lastindent and blocklines:
4039 if indent > lastindent and blocklines:
4039 blocklines[-1] += line.lstrip()
4040 blocklines[-1] += line.lstrip()
4040 else:
4041 else:
4041 blocklines.append(line)
4042 blocklines.append(line)
4042 lastindent = indent
4043 lastindent = indent
4043
4044
4044 # Flush last block.
4045 # Flush last block.
4045 if activeaction:
4046 if activeaction:
4046 yield activeaction, blocklines
4047 yield activeaction, blocklines
4047
4048
4048
4049
4049 @command(
4050 @command(
4050 b'debugwireproto',
4051 b'debugwireproto',
4051 [
4052 [
4052 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4053 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4053 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4054 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4054 (
4055 (
4055 b'',
4056 b'',
4056 b'noreadstderr',
4057 b'noreadstderr',
4057 False,
4058 False,
4058 _(b'do not read from stderr of the remote'),
4059 _(b'do not read from stderr of the remote'),
4059 ),
4060 ),
4060 (
4061 (
4061 b'',
4062 b'',
4062 b'nologhandshake',
4063 b'nologhandshake',
4063 False,
4064 False,
4064 _(b'do not log I/O related to the peer handshake'),
4065 _(b'do not log I/O related to the peer handshake'),
4065 ),
4066 ),
4066 ]
4067 ]
4067 + cmdutil.remoteopts,
4068 + cmdutil.remoteopts,
4068 _(b'[PATH]'),
4069 _(b'[PATH]'),
4069 optionalrepo=True,
4070 optionalrepo=True,
4070 )
4071 )
4071 def debugwireproto(ui, repo, path=None, **opts):
4072 def debugwireproto(ui, repo, path=None, **opts):
4072 """send wire protocol commands to a server
4073 """send wire protocol commands to a server
4073
4074
4074 This command can be used to issue wire protocol commands to remote
4075 This command can be used to issue wire protocol commands to remote
4075 peers and to debug the raw data being exchanged.
4076 peers and to debug the raw data being exchanged.
4076
4077
4077 ``--localssh`` will start an SSH server against the current repository
4078 ``--localssh`` will start an SSH server against the current repository
4078 and connect to that. By default, the connection will perform a handshake
4079 and connect to that. By default, the connection will perform a handshake
4079 and establish an appropriate peer instance.
4080 and establish an appropriate peer instance.
4080
4081
4081 ``--peer`` can be used to bypass the handshake protocol and construct a
4082 ``--peer`` can be used to bypass the handshake protocol and construct a
4082 peer instance using the specified class type. Valid values are ``raw``,
4083 peer instance using the specified class type. Valid values are ``raw``,
4083 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4084 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4084 raw data payloads and don't support higher-level command actions.
4085 raw data payloads and don't support higher-level command actions.
4085
4086
4086 ``--noreadstderr`` can be used to disable automatic reading from stderr
4087 ``--noreadstderr`` can be used to disable automatic reading from stderr
4087 of the peer (for SSH connections only). Disabling automatic reading of
4088 of the peer (for SSH connections only). Disabling automatic reading of
4088 stderr is useful for making output more deterministic.
4089 stderr is useful for making output more deterministic.
4089
4090
4090 Commands are issued via a mini language which is specified via stdin.
4091 Commands are issued via a mini language which is specified via stdin.
4091 The language consists of individual actions to perform. An action is
4092 The language consists of individual actions to perform. An action is
4092 defined by a block. A block is defined as a line with no leading
4093 defined by a block. A block is defined as a line with no leading
4093 space followed by 0 or more lines with leading space. Blocks are
4094 space followed by 0 or more lines with leading space. Blocks are
4094 effectively a high-level command with additional metadata.
4095 effectively a high-level command with additional metadata.
4095
4096
4096 Lines beginning with ``#`` are ignored.
4097 Lines beginning with ``#`` are ignored.
4097
4098
4098 The following sections denote available actions.
4099 The following sections denote available actions.
4099
4100
4100 raw
4101 raw
4101 ---
4102 ---
4102
4103
4103 Send raw data to the server.
4104 Send raw data to the server.
4104
4105
4105 The block payload contains the raw data to send as one atomic send
4106 The block payload contains the raw data to send as one atomic send
4106 operation. The data may not actually be delivered in a single system
4107 operation. The data may not actually be delivered in a single system
4107 call: it depends on the abilities of the transport being used.
4108 call: it depends on the abilities of the transport being used.
4108
4109
4109 Each line in the block is de-indented and concatenated. Then, that
4110 Each line in the block is de-indented and concatenated. Then, that
4110 value is evaluated as a Python b'' literal. This allows the use of
4111 value is evaluated as a Python b'' literal. This allows the use of
4111 backslash escaping, etc.
4112 backslash escaping, etc.
4112
4113
4113 raw+
4114 raw+
4114 ----
4115 ----
4115
4116
4116 Behaves like ``raw`` except flushes output afterwards.
4117 Behaves like ``raw`` except flushes output afterwards.
4117
4118
4118 command <X>
4119 command <X>
4119 -----------
4120 -----------
4120
4121
4121 Send a request to run a named command, whose name follows the ``command``
4122 Send a request to run a named command, whose name follows the ``command``
4122 string.
4123 string.
4123
4124
4124 Arguments to the command are defined as lines in this block. The format of
4125 Arguments to the command are defined as lines in this block. The format of
4125 each line is ``<key> <value>``. e.g.::
4126 each line is ``<key> <value>``. e.g.::
4126
4127
4127 command listkeys
4128 command listkeys
4128 namespace bookmarks
4129 namespace bookmarks
4129
4130
4130 If the value begins with ``eval:``, it will be interpreted as a Python
4131 If the value begins with ``eval:``, it will be interpreted as a Python
4131 literal expression. Otherwise values are interpreted as Python b'' literals.
4132 literal expression. Otherwise values are interpreted as Python b'' literals.
4132 This allows sending complex types and encoding special byte sequences via
4133 This allows sending complex types and encoding special byte sequences via
4133 backslash escaping.
4134 backslash escaping.
4134
4135
4135 The following arguments have special meaning:
4136 The following arguments have special meaning:
4136
4137
4137 ``PUSHFILE``
4138 ``PUSHFILE``
4138 When defined, the *push* mechanism of the peer will be used instead
4139 When defined, the *push* mechanism of the peer will be used instead
4139 of the static request-response mechanism and the content of the
4140 of the static request-response mechanism and the content of the
4140 file specified in the value of this argument will be sent as the
4141 file specified in the value of this argument will be sent as the
4141 command payload.
4142 command payload.
4142
4143
4143 This can be used to submit a local bundle file to the remote.
4144 This can be used to submit a local bundle file to the remote.
4144
4145
4145 batchbegin
4146 batchbegin
4146 ----------
4147 ----------
4147
4148
4148 Instruct the peer to begin a batched send.
4149 Instruct the peer to begin a batched send.
4149
4150
4150 All ``command`` blocks are queued for execution until the next
4151 All ``command`` blocks are queued for execution until the next
4151 ``batchsubmit`` block.
4152 ``batchsubmit`` block.
4152
4153
4153 batchsubmit
4154 batchsubmit
4154 -----------
4155 -----------
4155
4156
4156 Submit previously queued ``command`` blocks as a batch request.
4157 Submit previously queued ``command`` blocks as a batch request.
4157
4158
4158 This action MUST be paired with a ``batchbegin`` action.
4159 This action MUST be paired with a ``batchbegin`` action.
4159
4160
4160 httprequest <method> <path>
4161 httprequest <method> <path>
4161 ---------------------------
4162 ---------------------------
4162
4163
4163 (HTTP peer only)
4164 (HTTP peer only)
4164
4165
4165 Send an HTTP request to the peer.
4166 Send an HTTP request to the peer.
4166
4167
4167 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4168 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4168
4169
4169 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4170 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4170 headers to add to the request. e.g. ``Accept: foo``.
4171 headers to add to the request. e.g. ``Accept: foo``.
4171
4172
4172 The following arguments are special:
4173 The following arguments are special:
4173
4174
4174 ``BODYFILE``
4175 ``BODYFILE``
4175 The content of the file defined as the value to this argument will be
4176 The content of the file defined as the value to this argument will be
4176 transferred verbatim as the HTTP request body.
4177 transferred verbatim as the HTTP request body.
4177
4178
4178 ``frame <type> <flags> <payload>``
4179 ``frame <type> <flags> <payload>``
4179 Send a unified protocol frame as part of the request body.
4180 Send a unified protocol frame as part of the request body.
4180
4181
4181 All frames will be collected and sent as the body to the HTTP
4182 All frames will be collected and sent as the body to the HTTP
4182 request.
4183 request.
4183
4184
4184 close
4185 close
4185 -----
4186 -----
4186
4187
4187 Close the connection to the server.
4188 Close the connection to the server.
4188
4189
4189 flush
4190 flush
4190 -----
4191 -----
4191
4192
4192 Flush data written to the server.
4193 Flush data written to the server.
4193
4194
4194 readavailable
4195 readavailable
4195 -------------
4196 -------------
4196
4197
4197 Close the write end of the connection and read all available data from
4198 Close the write end of the connection and read all available data from
4198 the server.
4199 the server.
4199
4200
4200 If the connection to the server encompasses multiple pipes, we poll both
4201 If the connection to the server encompasses multiple pipes, we poll both
4201 pipes and read available data.
4202 pipes and read available data.
4202
4203
4203 readline
4204 readline
4204 --------
4205 --------
4205
4206
4206 Read a line of output from the server. If there are multiple output
4207 Read a line of output from the server. If there are multiple output
4207 pipes, reads only the main pipe.
4208 pipes, reads only the main pipe.
4208
4209
4209 ereadline
4210 ereadline
4210 ---------
4211 ---------
4211
4212
4212 Like ``readline``, but read from the stderr pipe, if available.
4213 Like ``readline``, but read from the stderr pipe, if available.
4213
4214
4214 read <X>
4215 read <X>
4215 --------
4216 --------
4216
4217
4217 ``read()`` N bytes from the server's main output pipe.
4218 ``read()`` N bytes from the server's main output pipe.
4218
4219
4219 eread <X>
4220 eread <X>
4220 ---------
4221 ---------
4221
4222
4222 ``read()`` N bytes from the server's stderr pipe, if available.
4223 ``read()`` N bytes from the server's stderr pipe, if available.
4223
4224
4224 Specifying Unified Frame-Based Protocol Frames
4225 Specifying Unified Frame-Based Protocol Frames
4225 ----------------------------------------------
4226 ----------------------------------------------
4226
4227
4227 It is possible to emit a *Unified Frame-Based Protocol* by using special
4228 It is possible to emit a *Unified Frame-Based Protocol* by using special
4228 syntax.
4229 syntax.
4229
4230
4230 A frame is composed as a type, flags, and payload. These can be parsed
4231 A frame is composed as a type, flags, and payload. These can be parsed
4231 from a string of the form:
4232 from a string of the form:
4232
4233
4233 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4234 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4234
4235
4235 ``request-id`` and ``stream-id`` are integers defining the request and
4236 ``request-id`` and ``stream-id`` are integers defining the request and
4236 stream identifiers.
4237 stream identifiers.
4237
4238
4238 ``type`` can be an integer value for the frame type or the string name
4239 ``type`` can be an integer value for the frame type or the string name
4239 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4240 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4240 ``command-name``.
4241 ``command-name``.
4241
4242
4242 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4243 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4243 components. Each component (and there can be just one) can be an integer
4244 components. Each component (and there can be just one) can be an integer
4244 or a flag name for stream flags or frame flags, respectively. Values are
4245 or a flag name for stream flags or frame flags, respectively. Values are
4245 resolved to integers and then bitwise OR'd together.
4246 resolved to integers and then bitwise OR'd together.
4246
4247
4247 ``payload`` represents the raw frame payload. If it begins with
4248 ``payload`` represents the raw frame payload. If it begins with
4248 ``cbor:``, the following string is evaluated as Python code and the
4249 ``cbor:``, the following string is evaluated as Python code and the
4249 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4250 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4250 as a Python byte string literal.
4251 as a Python byte string literal.
4251 """
4252 """
4252 opts = pycompat.byteskwargs(opts)
4253 opts = pycompat.byteskwargs(opts)
4253
4254
4254 if opts[b'localssh'] and not repo:
4255 if opts[b'localssh'] and not repo:
4255 raise error.Abort(_(b'--localssh requires a repository'))
4256 raise error.Abort(_(b'--localssh requires a repository'))
4256
4257
4257 if opts[b'peer'] and opts[b'peer'] not in (
4258 if opts[b'peer'] and opts[b'peer'] not in (
4258 b'raw',
4259 b'raw',
4259 b'http2',
4260 b'http2',
4260 b'ssh1',
4261 b'ssh1',
4261 b'ssh2',
4262 b'ssh2',
4262 ):
4263 ):
4263 raise error.Abort(
4264 raise error.Abort(
4264 _(b'invalid value for --peer'),
4265 _(b'invalid value for --peer'),
4265 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4266 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4266 )
4267 )
4267
4268
4268 if path and opts[b'localssh']:
4269 if path and opts[b'localssh']:
4269 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4270 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4270
4271
4271 if ui.interactive():
4272 if ui.interactive():
4272 ui.write(_(b'(waiting for commands on stdin)\n'))
4273 ui.write(_(b'(waiting for commands on stdin)\n'))
4273
4274
4274 blocks = list(_parsewirelangblocks(ui.fin))
4275 blocks = list(_parsewirelangblocks(ui.fin))
4275
4276
4276 proc = None
4277 proc = None
4277 stdin = None
4278 stdin = None
4278 stdout = None
4279 stdout = None
4279 stderr = None
4280 stderr = None
4280 opener = None
4281 opener = None
4281
4282
4282 if opts[b'localssh']:
4283 if opts[b'localssh']:
4283 # We start the SSH server in its own process so there is process
4284 # We start the SSH server in its own process so there is process
4284 # separation. This prevents a whole class of potential bugs around
4285 # separation. This prevents a whole class of potential bugs around
4285 # shared state from interfering with server operation.
4286 # shared state from interfering with server operation.
4286 args = procutil.hgcmd() + [
4287 args = procutil.hgcmd() + [
4287 b'-R',
4288 b'-R',
4288 repo.root,
4289 repo.root,
4289 b'debugserve',
4290 b'debugserve',
4290 b'--sshstdio',
4291 b'--sshstdio',
4291 ]
4292 ]
4292 proc = subprocess.Popen(
4293 proc = subprocess.Popen(
4293 pycompat.rapply(procutil.tonativestr, args),
4294 pycompat.rapply(procutil.tonativestr, args),
4294 stdin=subprocess.PIPE,
4295 stdin=subprocess.PIPE,
4295 stdout=subprocess.PIPE,
4296 stdout=subprocess.PIPE,
4296 stderr=subprocess.PIPE,
4297 stderr=subprocess.PIPE,
4297 bufsize=0,
4298 bufsize=0,
4298 )
4299 )
4299
4300
4300 stdin = proc.stdin
4301 stdin = proc.stdin
4301 stdout = proc.stdout
4302 stdout = proc.stdout
4302 stderr = proc.stderr
4303 stderr = proc.stderr
4303
4304
4304 # We turn the pipes into observers so we can log I/O.
4305 # We turn the pipes into observers so we can log I/O.
4305 if ui.verbose or opts[b'peer'] == b'raw':
4306 if ui.verbose or opts[b'peer'] == b'raw':
4306 stdin = util.makeloggingfileobject(
4307 stdin = util.makeloggingfileobject(
4307 ui, proc.stdin, b'i', logdata=True
4308 ui, proc.stdin, b'i', logdata=True
4308 )
4309 )
4309 stdout = util.makeloggingfileobject(
4310 stdout = util.makeloggingfileobject(
4310 ui, proc.stdout, b'o', logdata=True
4311 ui, proc.stdout, b'o', logdata=True
4311 )
4312 )
4312 stderr = util.makeloggingfileobject(
4313 stderr = util.makeloggingfileobject(
4313 ui, proc.stderr, b'e', logdata=True
4314 ui, proc.stderr, b'e', logdata=True
4314 )
4315 )
4315
4316
4316 # --localssh also implies the peer connection settings.
4317 # --localssh also implies the peer connection settings.
4317
4318
4318 url = b'ssh://localserver'
4319 url = b'ssh://localserver'
4319 autoreadstderr = not opts[b'noreadstderr']
4320 autoreadstderr = not opts[b'noreadstderr']
4320
4321
4321 if opts[b'peer'] == b'ssh1':
4322 if opts[b'peer'] == b'ssh1':
4322 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4323 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4323 peer = sshpeer.sshv1peer(
4324 peer = sshpeer.sshv1peer(
4324 ui,
4325 ui,
4325 url,
4326 url,
4326 proc,
4327 proc,
4327 stdin,
4328 stdin,
4328 stdout,
4329 stdout,
4329 stderr,
4330 stderr,
4330 None,
4331 None,
4331 autoreadstderr=autoreadstderr,
4332 autoreadstderr=autoreadstderr,
4332 )
4333 )
4333 elif opts[b'peer'] == b'ssh2':
4334 elif opts[b'peer'] == b'ssh2':
4334 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4335 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4335 peer = sshpeer.sshv2peer(
4336 peer = sshpeer.sshv2peer(
4336 ui,
4337 ui,
4337 url,
4338 url,
4338 proc,
4339 proc,
4339 stdin,
4340 stdin,
4340 stdout,
4341 stdout,
4341 stderr,
4342 stderr,
4342 None,
4343 None,
4343 autoreadstderr=autoreadstderr,
4344 autoreadstderr=autoreadstderr,
4344 )
4345 )
4345 elif opts[b'peer'] == b'raw':
4346 elif opts[b'peer'] == b'raw':
4346 ui.write(_(b'using raw connection to peer\n'))
4347 ui.write(_(b'using raw connection to peer\n'))
4347 peer = None
4348 peer = None
4348 else:
4349 else:
4349 ui.write(_(b'creating ssh peer from handshake results\n'))
4350 ui.write(_(b'creating ssh peer from handshake results\n'))
4350 peer = sshpeer.makepeer(
4351 peer = sshpeer.makepeer(
4351 ui,
4352 ui,
4352 url,
4353 url,
4353 proc,
4354 proc,
4354 stdin,
4355 stdin,
4355 stdout,
4356 stdout,
4356 stderr,
4357 stderr,
4357 autoreadstderr=autoreadstderr,
4358 autoreadstderr=autoreadstderr,
4358 )
4359 )
4359
4360
4360 elif path:
4361 elif path:
4361 # We bypass hg.peer() so we can proxy the sockets.
4362 # We bypass hg.peer() so we can proxy the sockets.
4362 # TODO consider not doing this because we skip
4363 # TODO consider not doing this because we skip
4363 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4364 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4364 u = util.url(path)
4365 u = util.url(path)
4365 if u.scheme != b'http':
4366 if u.scheme != b'http':
4366 raise error.Abort(_(b'only http:// paths are currently supported'))
4367 raise error.Abort(_(b'only http:// paths are currently supported'))
4367
4368
4368 url, authinfo = u.authinfo()
4369 url, authinfo = u.authinfo()
4369 openerargs = {
4370 openerargs = {
4370 'useragent': b'Mercurial debugwireproto',
4371 'useragent': b'Mercurial debugwireproto',
4371 }
4372 }
4372
4373
4373 # Turn pipes/sockets into observers so we can log I/O.
4374 # Turn pipes/sockets into observers so we can log I/O.
4374 if ui.verbose:
4375 if ui.verbose:
4375 openerargs.update(
4376 openerargs.update(
4376 {
4377 {
4377 'loggingfh': ui,
4378 'loggingfh': ui,
4378 'loggingname': b's',
4379 'loggingname': b's',
4379 'loggingopts': {
4380 'loggingopts': {
4380 'logdata': True,
4381 'logdata': True,
4381 'logdataapis': False,
4382 'logdataapis': False,
4382 },
4383 },
4383 }
4384 }
4384 )
4385 )
4385
4386
4386 if ui.debugflag:
4387 if ui.debugflag:
4387 openerargs['loggingopts']['logdataapis'] = True
4388 openerargs['loggingopts']['logdataapis'] = True
4388
4389
4389 # Don't send default headers when in raw mode. This allows us to
4390 # Don't send default headers when in raw mode. This allows us to
4390 # bypass most of the behavior of our URL handling code so we can
4391 # bypass most of the behavior of our URL handling code so we can
4391 # have near complete control over what's sent on the wire.
4392 # have near complete control over what's sent on the wire.
4392 if opts[b'peer'] == b'raw':
4393 if opts[b'peer'] == b'raw':
4393 openerargs['sendaccept'] = False
4394 openerargs['sendaccept'] = False
4394
4395
4395 opener = urlmod.opener(ui, authinfo, **openerargs)
4396 opener = urlmod.opener(ui, authinfo, **openerargs)
4396
4397
4397 if opts[b'peer'] == b'http2':
4398 if opts[b'peer'] == b'http2':
4398 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4399 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4399 # We go through makepeer() because we need an API descriptor for
4400 # We go through makepeer() because we need an API descriptor for
4400 # the peer instance to be useful.
4401 # the peer instance to be useful.
4401 with ui.configoverride(
4402 with ui.configoverride(
4402 {(b'experimental', b'httppeer.advertise-v2'): True}
4403 {(b'experimental', b'httppeer.advertise-v2'): True}
4403 ):
4404 ):
4404 if opts[b'nologhandshake']:
4405 if opts[b'nologhandshake']:
4405 ui.pushbuffer()
4406 ui.pushbuffer()
4406
4407
4407 peer = httppeer.makepeer(ui, path, opener=opener)
4408 peer = httppeer.makepeer(ui, path, opener=opener)
4408
4409
4409 if opts[b'nologhandshake']:
4410 if opts[b'nologhandshake']:
4410 ui.popbuffer()
4411 ui.popbuffer()
4411
4412
4412 if not isinstance(peer, httppeer.httpv2peer):
4413 if not isinstance(peer, httppeer.httpv2peer):
4413 raise error.Abort(
4414 raise error.Abort(
4414 _(
4415 _(
4415 b'could not instantiate HTTP peer for '
4416 b'could not instantiate HTTP peer for '
4416 b'wire protocol version 2'
4417 b'wire protocol version 2'
4417 ),
4418 ),
4418 hint=_(
4419 hint=_(
4419 b'the server may not have the feature '
4420 b'the server may not have the feature '
4420 b'enabled or is not allowing this '
4421 b'enabled or is not allowing this '
4421 b'client version'
4422 b'client version'
4422 ),
4423 ),
4423 )
4424 )
4424
4425
4425 elif opts[b'peer'] == b'raw':
4426 elif opts[b'peer'] == b'raw':
4426 ui.write(_(b'using raw connection to peer\n'))
4427 ui.write(_(b'using raw connection to peer\n'))
4427 peer = None
4428 peer = None
4428 elif opts[b'peer']:
4429 elif opts[b'peer']:
4429 raise error.Abort(
4430 raise error.Abort(
4430 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4431 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4431 )
4432 )
4432 else:
4433 else:
4433 peer = httppeer.makepeer(ui, path, opener=opener)
4434 peer = httppeer.makepeer(ui, path, opener=opener)
4434
4435
4435 # We /could/ populate stdin/stdout with sock.makefile()...
4436 # We /could/ populate stdin/stdout with sock.makefile()...
4436 else:
4437 else:
4437 raise error.Abort(_(b'unsupported connection configuration'))
4438 raise error.Abort(_(b'unsupported connection configuration'))
4438
4439
4439 batchedcommands = None
4440 batchedcommands = None
4440
4441
4441 # Now perform actions based on the parsed wire language instructions.
4442 # Now perform actions based on the parsed wire language instructions.
4442 for action, lines in blocks:
4443 for action, lines in blocks:
4443 if action in (b'raw', b'raw+'):
4444 if action in (b'raw', b'raw+'):
4444 if not stdin:
4445 if not stdin:
4445 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4446 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4446
4447
4447 # Concatenate the data together.
4448 # Concatenate the data together.
4448 data = b''.join(l.lstrip() for l in lines)
4449 data = b''.join(l.lstrip() for l in lines)
4449 data = stringutil.unescapestr(data)
4450 data = stringutil.unescapestr(data)
4450 stdin.write(data)
4451 stdin.write(data)
4451
4452
4452 if action == b'raw+':
4453 if action == b'raw+':
4453 stdin.flush()
4454 stdin.flush()
4454 elif action == b'flush':
4455 elif action == b'flush':
4455 if not stdin:
4456 if not stdin:
4456 raise error.Abort(_(b'cannot call flush on this peer'))
4457 raise error.Abort(_(b'cannot call flush on this peer'))
4457 stdin.flush()
4458 stdin.flush()
4458 elif action.startswith(b'command'):
4459 elif action.startswith(b'command'):
4459 if not peer:
4460 if not peer:
4460 raise error.Abort(
4461 raise error.Abort(
4461 _(
4462 _(
4462 b'cannot send commands unless peer instance '
4463 b'cannot send commands unless peer instance '
4463 b'is available'
4464 b'is available'
4464 )
4465 )
4465 )
4466 )
4466
4467
4467 command = action.split(b' ', 1)[1]
4468 command = action.split(b' ', 1)[1]
4468
4469
4469 args = {}
4470 args = {}
4470 for line in lines:
4471 for line in lines:
4471 # We need to allow empty values.
4472 # We need to allow empty values.
4472 fields = line.lstrip().split(b' ', 1)
4473 fields = line.lstrip().split(b' ', 1)
4473 if len(fields) == 1:
4474 if len(fields) == 1:
4474 key = fields[0]
4475 key = fields[0]
4475 value = b''
4476 value = b''
4476 else:
4477 else:
4477 key, value = fields
4478 key, value = fields
4478
4479
4479 if value.startswith(b'eval:'):
4480 if value.startswith(b'eval:'):
4480 value = stringutil.evalpythonliteral(value[5:])
4481 value = stringutil.evalpythonliteral(value[5:])
4481 else:
4482 else:
4482 value = stringutil.unescapestr(value)
4483 value = stringutil.unescapestr(value)
4483
4484
4484 args[key] = value
4485 args[key] = value
4485
4486
4486 if batchedcommands is not None:
4487 if batchedcommands is not None:
4487 batchedcommands.append((command, args))
4488 batchedcommands.append((command, args))
4488 continue
4489 continue
4489
4490
4490 ui.status(_(b'sending %s command\n') % command)
4491 ui.status(_(b'sending %s command\n') % command)
4491
4492
4492 if b'PUSHFILE' in args:
4493 if b'PUSHFILE' in args:
4493 with open(args[b'PUSHFILE'], 'rb') as fh:
4494 with open(args[b'PUSHFILE'], 'rb') as fh:
4494 del args[b'PUSHFILE']
4495 del args[b'PUSHFILE']
4495 res, output = peer._callpush(
4496 res, output = peer._callpush(
4496 command, fh, **pycompat.strkwargs(args)
4497 command, fh, **pycompat.strkwargs(args)
4497 )
4498 )
4498 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4499 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4499 ui.status(
4500 ui.status(
4500 _(b'remote output: %s\n') % stringutil.escapestr(output)
4501 _(b'remote output: %s\n') % stringutil.escapestr(output)
4501 )
4502 )
4502 else:
4503 else:
4503 with peer.commandexecutor() as e:
4504 with peer.commandexecutor() as e:
4504 res = e.callcommand(command, args).result()
4505 res = e.callcommand(command, args).result()
4505
4506
4506 if isinstance(res, wireprotov2peer.commandresponse):
4507 if isinstance(res, wireprotov2peer.commandresponse):
4507 val = res.objects()
4508 val = res.objects()
4508 ui.status(
4509 ui.status(
4509 _(b'response: %s\n')
4510 _(b'response: %s\n')
4510 % stringutil.pprint(val, bprefix=True, indent=2)
4511 % stringutil.pprint(val, bprefix=True, indent=2)
4511 )
4512 )
4512 else:
4513 else:
4513 ui.status(
4514 ui.status(
4514 _(b'response: %s\n')
4515 _(b'response: %s\n')
4515 % stringutil.pprint(res, bprefix=True, indent=2)
4516 % stringutil.pprint(res, bprefix=True, indent=2)
4516 )
4517 )
4517
4518
4518 elif action == b'batchbegin':
4519 elif action == b'batchbegin':
4519 if batchedcommands is not None:
4520 if batchedcommands is not None:
4520 raise error.Abort(_(b'nested batchbegin not allowed'))
4521 raise error.Abort(_(b'nested batchbegin not allowed'))
4521
4522
4522 batchedcommands = []
4523 batchedcommands = []
4523 elif action == b'batchsubmit':
4524 elif action == b'batchsubmit':
4524 # There is a batching API we could go through. But it would be
4525 # There is a batching API we could go through. But it would be
4525 # difficult to normalize requests into function calls. It is easier
4526 # difficult to normalize requests into function calls. It is easier
4526 # to bypass this layer and normalize to commands + args.
4527 # to bypass this layer and normalize to commands + args.
4527 ui.status(
4528 ui.status(
4528 _(b'sending batch with %d sub-commands\n')
4529 _(b'sending batch with %d sub-commands\n')
4529 % len(batchedcommands)
4530 % len(batchedcommands)
4530 )
4531 )
4531 assert peer is not None
4532 assert peer is not None
4532 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4533 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4533 ui.status(
4534 ui.status(
4534 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4535 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4535 )
4536 )
4536
4537
4537 batchedcommands = None
4538 batchedcommands = None
4538
4539
4539 elif action.startswith(b'httprequest '):
4540 elif action.startswith(b'httprequest '):
4540 if not opener:
4541 if not opener:
4541 raise error.Abort(
4542 raise error.Abort(
4542 _(b'cannot use httprequest without an HTTP peer')
4543 _(b'cannot use httprequest without an HTTP peer')
4543 )
4544 )
4544
4545
4545 request = action.split(b' ', 2)
4546 request = action.split(b' ', 2)
4546 if len(request) != 3:
4547 if len(request) != 3:
4547 raise error.Abort(
4548 raise error.Abort(
4548 _(
4549 _(
4549 b'invalid httprequest: expected format is '
4550 b'invalid httprequest: expected format is '
4550 b'"httprequest <method> <path>'
4551 b'"httprequest <method> <path>'
4551 )
4552 )
4552 )
4553 )
4553
4554
4554 method, httppath = request[1:]
4555 method, httppath = request[1:]
4555 headers = {}
4556 headers = {}
4556 body = None
4557 body = None
4557 frames = []
4558 frames = []
4558 for line in lines:
4559 for line in lines:
4559 line = line.lstrip()
4560 line = line.lstrip()
4560 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4561 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4561 if m:
4562 if m:
4562 # Headers need to use native strings.
4563 # Headers need to use native strings.
4563 key = pycompat.strurl(m.group(1))
4564 key = pycompat.strurl(m.group(1))
4564 value = pycompat.strurl(m.group(2))
4565 value = pycompat.strurl(m.group(2))
4565 headers[key] = value
4566 headers[key] = value
4566 continue
4567 continue
4567
4568
4568 if line.startswith(b'BODYFILE '):
4569 if line.startswith(b'BODYFILE '):
4569 with open(line.split(b' ', 1), b'rb') as fh:
4570 with open(line.split(b' ', 1), b'rb') as fh:
4570 body = fh.read()
4571 body = fh.read()
4571 elif line.startswith(b'frame '):
4572 elif line.startswith(b'frame '):
4572 frame = wireprotoframing.makeframefromhumanstring(
4573 frame = wireprotoframing.makeframefromhumanstring(
4573 line[len(b'frame ') :]
4574 line[len(b'frame ') :]
4574 )
4575 )
4575
4576
4576 frames.append(frame)
4577 frames.append(frame)
4577 else:
4578 else:
4578 raise error.Abort(
4579 raise error.Abort(
4579 _(b'unknown argument to httprequest: %s') % line
4580 _(b'unknown argument to httprequest: %s') % line
4580 )
4581 )
4581
4582
4582 url = path + httppath
4583 url = path + httppath
4583
4584
4584 if frames:
4585 if frames:
4585 body = b''.join(bytes(f) for f in frames)
4586 body = b''.join(bytes(f) for f in frames)
4586
4587
4587 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4588 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4588
4589
4589 # urllib.Request insists on using has_data() as a proxy for
4590 # urllib.Request insists on using has_data() as a proxy for
4590 # determining the request method. Override that to use our
4591 # determining the request method. Override that to use our
4591 # explicitly requested method.
4592 # explicitly requested method.
4592 req.get_method = lambda: pycompat.sysstr(method)
4593 req.get_method = lambda: pycompat.sysstr(method)
4593
4594
4594 try:
4595 try:
4595 res = opener.open(req)
4596 res = opener.open(req)
4596 body = res.read()
4597 body = res.read()
4597 except util.urlerr.urlerror as e:
4598 except util.urlerr.urlerror as e:
4598 # read() method must be called, but only exists in Python 2
4599 # read() method must be called, but only exists in Python 2
4599 getattr(e, 'read', lambda: None)()
4600 getattr(e, 'read', lambda: None)()
4600 continue
4601 continue
4601
4602
4602 ct = res.headers.get('Content-Type')
4603 ct = res.headers.get('Content-Type')
4603 if ct == 'application/mercurial-cbor':
4604 if ct == 'application/mercurial-cbor':
4604 ui.write(
4605 ui.write(
4605 _(b'cbor> %s\n')
4606 _(b'cbor> %s\n')
4606 % stringutil.pprint(
4607 % stringutil.pprint(
4607 cborutil.decodeall(body), bprefix=True, indent=2
4608 cborutil.decodeall(body), bprefix=True, indent=2
4608 )
4609 )
4609 )
4610 )
4610
4611
4611 elif action == b'close':
4612 elif action == b'close':
4612 assert peer is not None
4613 assert peer is not None
4613 peer.close()
4614 peer.close()
4614 elif action == b'readavailable':
4615 elif action == b'readavailable':
4615 if not stdout or not stderr:
4616 if not stdout or not stderr:
4616 raise error.Abort(
4617 raise error.Abort(
4617 _(b'readavailable not available on this peer')
4618 _(b'readavailable not available on this peer')
4618 )
4619 )
4619
4620
4620 stdin.close()
4621 stdin.close()
4621 stdout.read()
4622 stdout.read()
4622 stderr.read()
4623 stderr.read()
4623
4624
4624 elif action == b'readline':
4625 elif action == b'readline':
4625 if not stdout:
4626 if not stdout:
4626 raise error.Abort(_(b'readline not available on this peer'))
4627 raise error.Abort(_(b'readline not available on this peer'))
4627 stdout.readline()
4628 stdout.readline()
4628 elif action == b'ereadline':
4629 elif action == b'ereadline':
4629 if not stderr:
4630 if not stderr:
4630 raise error.Abort(_(b'ereadline not available on this peer'))
4631 raise error.Abort(_(b'ereadline not available on this peer'))
4631 stderr.readline()
4632 stderr.readline()
4632 elif action.startswith(b'read '):
4633 elif action.startswith(b'read '):
4633 count = int(action.split(b' ', 1)[1])
4634 count = int(action.split(b' ', 1)[1])
4634 if not stdout:
4635 if not stdout:
4635 raise error.Abort(_(b'read not available on this peer'))
4636 raise error.Abort(_(b'read not available on this peer'))
4636 stdout.read(count)
4637 stdout.read(count)
4637 elif action.startswith(b'eread '):
4638 elif action.startswith(b'eread '):
4638 count = int(action.split(b' ', 1)[1])
4639 count = int(action.split(b' ', 1)[1])
4639 if not stderr:
4640 if not stderr:
4640 raise error.Abort(_(b'eread not available on this peer'))
4641 raise error.Abort(_(b'eread not available on this peer'))
4641 stderr.read(count)
4642 stderr.read(count)
4642 else:
4643 else:
4643 raise error.Abort(_(b'unknown action: %s') % action)
4644 raise error.Abort(_(b'unknown action: %s') % action)
4644
4645
4645 if batchedcommands is not None:
4646 if batchedcommands is not None:
4646 raise error.Abort(_(b'unclosed "batchbegin" request'))
4647 raise error.Abort(_(b'unclosed "batchbegin" request'))
4647
4648
4648 if peer:
4649 if peer:
4649 peer.close()
4650 peer.close()
4650
4651
4651 if proc:
4652 if proc:
4652 proc.kill()
4653 proc.kill()
@@ -1,489 +1,500 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """
8 """
9 Algorithm works in the following way. You have two repository: local and
9 Algorithm works in the following way. You have two repository: local and
10 remote. They both contains a DAG of changelists.
10 remote. They both contains a DAG of changelists.
11
11
12 The goal of the discovery protocol is to find one set of node *common*,
12 The goal of the discovery protocol is to find one set of node *common*,
13 the set of nodes shared by local and remote.
13 the set of nodes shared by local and remote.
14
14
15 One of the issue with the original protocol was latency, it could
15 One of the issue with the original protocol was latency, it could
16 potentially require lots of roundtrips to discover that the local repo was a
16 potentially require lots of roundtrips to discover that the local repo was a
17 subset of remote (which is a very common case, you usually have few changes
17 subset of remote (which is a very common case, you usually have few changes
18 compared to upstream, while upstream probably had lots of development).
18 compared to upstream, while upstream probably had lots of development).
19
19
20 The new protocol only requires one interface for the remote repo: `known()`,
20 The new protocol only requires one interface for the remote repo: `known()`,
21 which given a set of changelists tells you if they are present in the DAG.
21 which given a set of changelists tells you if they are present in the DAG.
22
22
23 The algorithm then works as follow:
23 The algorithm then works as follow:
24
24
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 all nodes are in `unknown`.
26 all nodes are in `unknown`.
27 - Take a sample from `unknown`, call `remote.known(sample)`
27 - Take a sample from `unknown`, call `remote.known(sample)`
28 - For each node that remote knows, move it and all its ancestors to `common`
28 - For each node that remote knows, move it and all its ancestors to `common`
29 - For each node that remote doesn't know, move it and all its descendants
29 - For each node that remote doesn't know, move it and all its descendants
30 to `missing`
30 to `missing`
31 - Iterate until `unknown` is empty
31 - Iterate until `unknown` is empty
32
32
33 There are a couple optimizations, first is instead of starting with a random
33 There are a couple optimizations, first is instead of starting with a random
34 sample of missing, start by sending all heads, in the case where the local
34 sample of missing, start by sending all heads, in the case where the local
35 repo is a subset, you computed the answer in one round trip.
35 repo is a subset, you computed the answer in one round trip.
36
36
37 Then you can do something similar to the bisecting strategy used when
37 Then you can do something similar to the bisecting strategy used when
38 finding faulty changesets. Instead of random samples, you can try picking
38 finding faulty changesets. Instead of random samples, you can try picking
39 nodes that will maximize the number of nodes that will be
39 nodes that will maximize the number of nodes that will be
40 classified with it (since all ancestors or descendants will be marked as well).
40 classified with it (since all ancestors or descendants will be marked as well).
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import collections
45 import collections
46 import random
46 import random
47
47
48 from .i18n import _
48 from .i18n import _
49 from .node import (
49 from .node import (
50 nullid,
50 nullid,
51 nullrev,
51 nullrev,
52 )
52 )
53 from . import (
53 from . import (
54 error,
54 error,
55 policy,
55 policy,
56 util,
56 util,
57 )
57 )
58
58
59
59
60 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
60 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
61 """update an existing sample to match the expected size
61 """update an existing sample to match the expected size
62
62
63 The sample is updated with revs exponentially distant from each head of the
63 The sample is updated with revs exponentially distant from each head of the
64 <revs> set. (H~1, H~2, H~4, H~8, etc).
64 <revs> set. (H~1, H~2, H~4, H~8, etc).
65
65
66 If a target size is specified, the sampling will stop once this size is
66 If a target size is specified, the sampling will stop once this size is
67 reached. Otherwise sampling will happen until roots of the <revs> set are
67 reached. Otherwise sampling will happen until roots of the <revs> set are
68 reached.
68 reached.
69
69
70 :revs: set of revs we want to discover (if None, assume the whole dag)
70 :revs: set of revs we want to discover (if None, assume the whole dag)
71 :heads: set of DAG head revs
71 :heads: set of DAG head revs
72 :sample: a sample to update
72 :sample: a sample to update
73 :parentfn: a callable to resolve parents for a revision
73 :parentfn: a callable to resolve parents for a revision
74 :quicksamplesize: optional target size of the sample"""
74 :quicksamplesize: optional target size of the sample"""
75 dist = {}
75 dist = {}
76 visit = collections.deque(heads)
76 visit = collections.deque(heads)
77 seen = set()
77 seen = set()
78 factor = 1
78 factor = 1
79 while visit:
79 while visit:
80 curr = visit.popleft()
80 curr = visit.popleft()
81 if curr in seen:
81 if curr in seen:
82 continue
82 continue
83 d = dist.setdefault(curr, 1)
83 d = dist.setdefault(curr, 1)
84 if d > factor:
84 if d > factor:
85 factor *= 2
85 factor *= 2
86 if d == factor:
86 if d == factor:
87 sample.add(curr)
87 sample.add(curr)
88 if quicksamplesize and (len(sample) >= quicksamplesize):
88 if quicksamplesize and (len(sample) >= quicksamplesize):
89 return
89 return
90 seen.add(curr)
90 seen.add(curr)
91
91
92 for p in parentfn(curr):
92 for p in parentfn(curr):
93 if p != nullrev and (not revs or p in revs):
93 if p != nullrev and (not revs or p in revs):
94 dist.setdefault(p, d + 1)
94 dist.setdefault(p, d + 1)
95 visit.append(p)
95 visit.append(p)
96
96
97
97
98 def _limitsample(sample, desiredlen, randomize=True):
98 def _limitsample(sample, desiredlen, randomize=True):
99 """return a random subset of sample of at most desiredlen item.
99 """return a random subset of sample of at most desiredlen item.
100
100
101 If randomize is False, though, a deterministic subset is returned.
101 If randomize is False, though, a deterministic subset is returned.
102 This is meant for integration tests.
102 This is meant for integration tests.
103 """
103 """
104 if len(sample) <= desiredlen:
104 if len(sample) <= desiredlen:
105 return sample
105 return sample
106 if randomize:
106 if randomize:
107 return set(random.sample(sample, desiredlen))
107 return set(random.sample(sample, desiredlen))
108 sample = list(sample)
108 sample = list(sample)
109 sample.sort()
109 sample.sort()
110 return set(sample[:desiredlen])
110 return set(sample[:desiredlen])
111
111
112
112
113 class partialdiscovery(object):
113 class partialdiscovery(object):
114 """an object representing ongoing discovery
114 """an object representing ongoing discovery
115
115
116 Feed with data from the remote repository, this object keep track of the
116 Feed with data from the remote repository, this object keep track of the
117 current set of changeset in various states:
117 current set of changeset in various states:
118
118
119 - common: revs also known remotely
119 - common: revs also known remotely
120 - undecided: revs we don't have information on yet
120 - undecided: revs we don't have information on yet
121 - missing: revs missing remotely
121 - missing: revs missing remotely
122 (all tracked revisions are known locally)
122 (all tracked revisions are known locally)
123 """
123 """
124
124
125 def __init__(self, repo, targetheads, respectsize, randomize=True):
125 def __init__(self, repo, targetheads, respectsize, randomize=True):
126 self._repo = repo
126 self._repo = repo
127 self._targetheads = targetheads
127 self._targetheads = targetheads
128 self._common = repo.changelog.incrementalmissingrevs()
128 self._common = repo.changelog.incrementalmissingrevs()
129 self._undecided = None
129 self._undecided = None
130 self.missing = set()
130 self.missing = set()
131 self._childrenmap = None
131 self._childrenmap = None
132 self._respectsize = respectsize
132 self._respectsize = respectsize
133 self.randomize = randomize
133 self.randomize = randomize
134
134
135 def addcommons(self, commons):
135 def addcommons(self, commons):
136 """register nodes known as common"""
136 """register nodes known as common"""
137 self._common.addbases(commons)
137 self._common.addbases(commons)
138 if self._undecided is not None:
138 if self._undecided is not None:
139 self._common.removeancestorsfrom(self._undecided)
139 self._common.removeancestorsfrom(self._undecided)
140
140
141 def addmissings(self, missings):
141 def addmissings(self, missings):
142 """register some nodes as missing"""
142 """register some nodes as missing"""
143 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
143 newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
144 if newmissing:
144 if newmissing:
145 self.missing.update(newmissing)
145 self.missing.update(newmissing)
146 self.undecided.difference_update(newmissing)
146 self.undecided.difference_update(newmissing)
147
147
148 def addinfo(self, sample):
148 def addinfo(self, sample):
149 """consume an iterable of (rev, known) tuples"""
149 """consume an iterable of (rev, known) tuples"""
150 common = set()
150 common = set()
151 missing = set()
151 missing = set()
152 for rev, known in sample:
152 for rev, known in sample:
153 if known:
153 if known:
154 common.add(rev)
154 common.add(rev)
155 else:
155 else:
156 missing.add(rev)
156 missing.add(rev)
157 if common:
157 if common:
158 self.addcommons(common)
158 self.addcommons(common)
159 if missing:
159 if missing:
160 self.addmissings(missing)
160 self.addmissings(missing)
161
161
162 def hasinfo(self):
162 def hasinfo(self):
163 """return True is we have any clue about the remote state"""
163 """return True is we have any clue about the remote state"""
164 return self._common.hasbases()
164 return self._common.hasbases()
165
165
166 def iscomplete(self):
166 def iscomplete(self):
167 """True if all the necessary data have been gathered"""
167 """True if all the necessary data have been gathered"""
168 return self._undecided is not None and not self._undecided
168 return self._undecided is not None and not self._undecided
169
169
170 @property
170 @property
171 def undecided(self):
171 def undecided(self):
172 if self._undecided is not None:
172 if self._undecided is not None:
173 return self._undecided
173 return self._undecided
174 self._undecided = set(self._common.missingancestors(self._targetheads))
174 self._undecided = set(self._common.missingancestors(self._targetheads))
175 return self._undecided
175 return self._undecided
176
176
177 def stats(self):
177 def stats(self):
178 return {
178 return {
179 'undecided': len(self.undecided),
179 'undecided': len(self.undecided),
180 }
180 }
181
181
182 def commonheads(self):
182 def commonheads(self):
183 """the heads of the known common set"""
183 """the heads of the known common set"""
184 # heads(common) == heads(common.bases) since common represents
184 # heads(common) == heads(common.bases) since common represents
185 # common.bases and all its ancestors
185 # common.bases and all its ancestors
186 return self._common.basesheads()
186 return self._common.basesheads()
187
187
188 def _parentsgetter(self):
188 def _parentsgetter(self):
189 getrev = self._repo.changelog.index.__getitem__
189 getrev = self._repo.changelog.index.__getitem__
190
190
191 def getparents(r):
191 def getparents(r):
192 return getrev(r)[5:7]
192 return getrev(r)[5:7]
193
193
194 return getparents
194 return getparents
195
195
196 def _childrengetter(self):
196 def _childrengetter(self):
197
197
198 if self._childrenmap is not None:
198 if self._childrenmap is not None:
199 # During discovery, the `undecided` set keep shrinking.
199 # During discovery, the `undecided` set keep shrinking.
200 # Therefore, the map computed for an iteration N will be
200 # Therefore, the map computed for an iteration N will be
201 # valid for iteration N+1. Instead of computing the same
201 # valid for iteration N+1. Instead of computing the same
202 # data over and over we cached it the first time.
202 # data over and over we cached it the first time.
203 return self._childrenmap.__getitem__
203 return self._childrenmap.__getitem__
204
204
205 # _updatesample() essentially does interaction over revisions to look
205 # _updatesample() essentially does interaction over revisions to look
206 # up their children. This lookup is expensive and doing it in a loop is
206 # up their children. This lookup is expensive and doing it in a loop is
207 # quadratic. We precompute the children for all relevant revisions and
207 # quadratic. We precompute the children for all relevant revisions and
208 # make the lookup in _updatesample() a simple dict lookup.
208 # make the lookup in _updatesample() a simple dict lookup.
209 self._childrenmap = children = {}
209 self._childrenmap = children = {}
210
210
211 parentrevs = self._parentsgetter()
211 parentrevs = self._parentsgetter()
212 revs = self.undecided
212 revs = self.undecided
213
213
214 for rev in sorted(revs):
214 for rev in sorted(revs):
215 # Always ensure revision has an entry so we don't need to worry
215 # Always ensure revision has an entry so we don't need to worry
216 # about missing keys.
216 # about missing keys.
217 children[rev] = []
217 children[rev] = []
218 for prev in parentrevs(rev):
218 for prev in parentrevs(rev):
219 if prev == nullrev:
219 if prev == nullrev:
220 continue
220 continue
221 c = children.get(prev)
221 c = children.get(prev)
222 if c is not None:
222 if c is not None:
223 c.append(rev)
223 c.append(rev)
224 return children.__getitem__
224 return children.__getitem__
225
225
226 def takequicksample(self, headrevs, size):
226 def takequicksample(self, headrevs, size):
227 """takes a quick sample of size <size>
227 """takes a quick sample of size <size>
228
228
229 It is meant for initial sampling and focuses on querying heads and close
229 It is meant for initial sampling and focuses on querying heads and close
230 ancestors of heads.
230 ancestors of heads.
231
231
232 :headrevs: set of head revisions in local DAG to consider
232 :headrevs: set of head revisions in local DAG to consider
233 :size: the maximum size of the sample"""
233 :size: the maximum size of the sample"""
234 revs = self.undecided
234 revs = self.undecided
235 if len(revs) <= size:
235 if len(revs) <= size:
236 return list(revs)
236 return list(revs)
237 sample = set(self._repo.revs(b'heads(%ld)', revs))
237 sample = set(self._repo.revs(b'heads(%ld)', revs))
238
238
239 if len(sample) >= size:
239 if len(sample) >= size:
240 return _limitsample(sample, size, randomize=self.randomize)
240 return _limitsample(sample, size, randomize=self.randomize)
241
241
242 _updatesample(
242 _updatesample(
243 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
243 None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
244 )
244 )
245 return sample
245 return sample
246
246
247 def takefullsample(self, headrevs, size):
247 def takefullsample(self, headrevs, size):
248 revs = self.undecided
248 revs = self.undecided
249 if len(revs) <= size:
249 if len(revs) <= size:
250 return list(revs)
250 return list(revs)
251 repo = self._repo
251 repo = self._repo
252 sample = set(repo.revs(b'heads(%ld)', revs))
252 sample = set(repo.revs(b'heads(%ld)', revs))
253 parentrevs = self._parentsgetter()
253 parentrevs = self._parentsgetter()
254
254
255 # update from heads
255 # update from heads
256 revsheads = sample.copy()
256 revsheads = sample.copy()
257 _updatesample(revs, revsheads, sample, parentrevs)
257 _updatesample(revs, revsheads, sample, parentrevs)
258
258
259 # update from roots
259 # update from roots
260 revsroots = set(repo.revs(b'roots(%ld)', revs))
260 revsroots = set(repo.revs(b'roots(%ld)', revs))
261 childrenrevs = self._childrengetter()
261 childrenrevs = self._childrengetter()
262 _updatesample(revs, revsroots, sample, childrenrevs)
262 _updatesample(revs, revsroots, sample, childrenrevs)
263 assert sample
263 assert sample
264
264
265 if not self._respectsize:
265 if not self._respectsize:
266 size = max(size, min(len(revsroots), len(revsheads)))
266 size = max(size, min(len(revsroots), len(revsheads)))
267
267
268 sample = _limitsample(sample, size, randomize=self.randomize)
268 sample = _limitsample(sample, size, randomize=self.randomize)
269 if len(sample) < size:
269 if len(sample) < size:
270 more = size - len(sample)
270 more = size - len(sample)
271 takefrom = list(revs - sample)
271 takefrom = list(revs - sample)
272 if self.randomize:
272 if self.randomize:
273 sample.update(random.sample(takefrom, more))
273 sample.update(random.sample(takefrom, more))
274 else:
274 else:
275 takefrom.sort()
275 takefrom.sort()
276 sample.update(takefrom[:more])
276 sample.update(takefrom[:more])
277 return sample
277 return sample
278
278
279
279
280 partialdiscovery = policy.importrust(
280 partialdiscovery = policy.importrust(
281 'discovery', member='PartialDiscovery', default=partialdiscovery
281 'discovery', member='PartialDiscovery', default=partialdiscovery
282 )
282 )
283
283
284
284
285 def findcommonheads(
285 def findcommonheads(
286 ui,
286 ui,
287 local,
287 local,
288 remote,
288 remote,
289 initialsamplesize=100,
289 initialsamplesize=100,
290 fullsamplesize=200,
290 fullsamplesize=200,
291 abortwhenunrelated=True,
291 abortwhenunrelated=True,
292 ancestorsof=None,
292 ancestorsof=None,
293 samplegrowth=1.05,
293 samplegrowth=1.05,
294 audit=None,
294 ):
295 ):
295 """Return a tuple (common, anyincoming, remoteheads) used to identify
296 """Return a tuple (common, anyincoming, remoteheads) used to identify
296 missing nodes from or in remote.
297 missing nodes from or in remote.
298
299 The audit argument is an optional dictionnary that a caller can pass. it
300 will be updated with extra data about the discovery, this is useful for
301 debug.
297 """
302 """
298 start = util.timer()
303 start = util.timer()
299
304
300 roundtrips = 0
305 roundtrips = 0
301 cl = local.changelog
306 cl = local.changelog
302 clnode = cl.node
307 clnode = cl.node
303 clrev = cl.rev
308 clrev = cl.rev
304
309
305 if ancestorsof is not None:
310 if ancestorsof is not None:
306 ownheads = [clrev(n) for n in ancestorsof]
311 ownheads = [clrev(n) for n in ancestorsof]
307 else:
312 else:
308 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
313 ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
309
314
310 # early exit if we know all the specified remote heads already
315 # early exit if we know all the specified remote heads already
311 ui.debug(b"query 1; heads\n")
316 ui.debug(b"query 1; heads\n")
312 roundtrips += 1
317 roundtrips += 1
313 # We also ask remote about all the local heads. That set can be arbitrarily
318 # We also ask remote about all the local heads. That set can be arbitrarily
314 # large, so we used to limit it size to `initialsamplesize`. We no longer
319 # large, so we used to limit it size to `initialsamplesize`. We no longer
315 # do as it proved counter productive. The skipped heads could lead to a
320 # do as it proved counter productive. The skipped heads could lead to a
316 # large "undecided" set, slower to be clarified than if we asked the
321 # large "undecided" set, slower to be clarified than if we asked the
317 # question for all heads right away.
322 # question for all heads right away.
318 #
323 #
319 # We are already fetching all server heads using the `heads` commands,
324 # We are already fetching all server heads using the `heads` commands,
320 # sending a equivalent number of heads the other way should not have a
325 # sending a equivalent number of heads the other way should not have a
321 # significant impact. In addition, it is very likely that we are going to
326 # significant impact. In addition, it is very likely that we are going to
322 # have to issue "known" request for an equivalent amount of revisions in
327 # have to issue "known" request for an equivalent amount of revisions in
323 # order to decide if theses heads are common or missing.
328 # order to decide if theses heads are common or missing.
324 #
329 #
325 # find a detailled analysis below.
330 # find a detailled analysis below.
326 #
331 #
327 # Case A: local and server both has few heads
332 # Case A: local and server both has few heads
328 #
333 #
329 # Ownheads is below initialsamplesize, limit would not have any effect.
334 # Ownheads is below initialsamplesize, limit would not have any effect.
330 #
335 #
331 # Case B: local has few heads and server has many
336 # Case B: local has few heads and server has many
332 #
337 #
333 # Ownheads is below initialsamplesize, limit would not have any effect.
338 # Ownheads is below initialsamplesize, limit would not have any effect.
334 #
339 #
335 # Case C: local and server both has many heads
340 # Case C: local and server both has many heads
336 #
341 #
337 # We now transfert some more data, but not significantly more than is
342 # We now transfert some more data, but not significantly more than is
338 # already transfered to carry the server heads.
343 # already transfered to carry the server heads.
339 #
344 #
340 # Case D: local has many heads, server has few
345 # Case D: local has many heads, server has few
341 #
346 #
342 # D.1 local heads are mostly known remotely
347 # D.1 local heads are mostly known remotely
343 #
348 #
344 # All the known head will have be part of a `known` request at some
349 # All the known head will have be part of a `known` request at some
345 # point for the discovery to finish. Sending them all earlier is
350 # point for the discovery to finish. Sending them all earlier is
346 # actually helping.
351 # actually helping.
347 #
352 #
348 # (This case is fairly unlikely, it requires the numerous heads to all
353 # (This case is fairly unlikely, it requires the numerous heads to all
349 # be merged server side in only a few heads)
354 # be merged server side in only a few heads)
350 #
355 #
351 # D.2 local heads are mostly missing remotely
356 # D.2 local heads are mostly missing remotely
352 #
357 #
353 # To determine that the heads are missing, we'll have to issue `known`
358 # To determine that the heads are missing, we'll have to issue `known`
354 # request for them or one of their ancestors. This amount of `known`
359 # request for them or one of their ancestors. This amount of `known`
355 # request will likely be in the same order of magnitude than the amount
360 # request will likely be in the same order of magnitude than the amount
356 # of local heads.
361 # of local heads.
357 #
362 #
358 # The only case where we can be more efficient using `known` request on
363 # The only case where we can be more efficient using `known` request on
359 # ancestors are case were all the "missing" local heads are based on a
364 # ancestors are case were all the "missing" local heads are based on a
360 # few changeset, also "missing". This means we would have a "complex"
365 # few changeset, also "missing". This means we would have a "complex"
361 # graph (with many heads) attached to, but very independant to a the
366 # graph (with many heads) attached to, but very independant to a the
362 # "simple" graph on the server. This is a fairly usual case and have
367 # "simple" graph on the server. This is a fairly usual case and have
363 # not been met in the wild so far.
368 # not been met in the wild so far.
364 if remote.limitedarguments:
369 if remote.limitedarguments:
365 sample = _limitsample(ownheads, initialsamplesize)
370 sample = _limitsample(ownheads, initialsamplesize)
366 # indices between sample and externalized version must match
371 # indices between sample and externalized version must match
367 sample = list(sample)
372 sample = list(sample)
368 else:
373 else:
369 sample = ownheads
374 sample = ownheads
370
375
371 with remote.commandexecutor() as e:
376 with remote.commandexecutor() as e:
372 fheads = e.callcommand(b'heads', {})
377 fheads = e.callcommand(b'heads', {})
373 fknown = e.callcommand(
378 fknown = e.callcommand(
374 b'known',
379 b'known',
375 {
380 {
376 b'nodes': [clnode(r) for r in sample],
381 b'nodes': [clnode(r) for r in sample],
377 },
382 },
378 )
383 )
379
384
380 srvheadhashes, yesno = fheads.result(), fknown.result()
385 srvheadhashes, yesno = fheads.result(), fknown.result()
381
386
387 if audit is not None:
388 audit[b'total-roundtrips'] = 1
389
382 if cl.tip() == nullid:
390 if cl.tip() == nullid:
383 if srvheadhashes != [nullid]:
391 if srvheadhashes != [nullid]:
384 return [nullid], True, srvheadhashes
392 return [nullid], True, srvheadhashes
385 return [nullid], False, []
393 return [nullid], False, []
386
394
387 # start actual discovery (we note this before the next "if" for
395 # start actual discovery (we note this before the next "if" for
388 # compatibility reasons)
396 # compatibility reasons)
389 ui.status(_(b"searching for changes\n"))
397 ui.status(_(b"searching for changes\n"))
390
398
391 knownsrvheads = [] # revnos of remote heads that are known locally
399 knownsrvheads = [] # revnos of remote heads that are known locally
392 for node in srvheadhashes:
400 for node in srvheadhashes:
393 if node == nullid:
401 if node == nullid:
394 continue
402 continue
395
403
396 try:
404 try:
397 knownsrvheads.append(clrev(node))
405 knownsrvheads.append(clrev(node))
398 # Catches unknown and filtered nodes.
406 # Catches unknown and filtered nodes.
399 except error.LookupError:
407 except error.LookupError:
400 continue
408 continue
401
409
402 if len(knownsrvheads) == len(srvheadhashes):
410 if len(knownsrvheads) == len(srvheadhashes):
403 ui.debug(b"all remote heads known locally\n")
411 ui.debug(b"all remote heads known locally\n")
404 return srvheadhashes, False, srvheadhashes
412 return srvheadhashes, False, srvheadhashes
405
413
406 if len(sample) == len(ownheads) and all(yesno):
414 if len(sample) == len(ownheads) and all(yesno):
407 ui.note(_(b"all local changesets known remotely\n"))
415 ui.note(_(b"all local changesets known remotely\n"))
408 ownheadhashes = [clnode(r) for r in ownheads]
416 ownheadhashes = [clnode(r) for r in ownheads]
409 return ownheadhashes, True, srvheadhashes
417 return ownheadhashes, True, srvheadhashes
410
418
411 # full blown discovery
419 # full blown discovery
412
420
413 randomize = ui.configbool(b'devel', b'discovery.randomize')
421 randomize = ui.configbool(b'devel', b'discovery.randomize')
414 disco = partialdiscovery(
422 disco = partialdiscovery(
415 local, ownheads, remote.limitedarguments, randomize=randomize
423 local, ownheads, remote.limitedarguments, randomize=randomize
416 )
424 )
417 # treat remote heads (and maybe own heads) as a first implicit sample
425 # treat remote heads (and maybe own heads) as a first implicit sample
418 # response
426 # response
419 disco.addcommons(knownsrvheads)
427 disco.addcommons(knownsrvheads)
420 disco.addinfo(zip(sample, yesno))
428 disco.addinfo(zip(sample, yesno))
421
429
422 full = False
430 full = False
423 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
431 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
424 while not disco.iscomplete():
432 while not disco.iscomplete():
425
433
426 if full or disco.hasinfo():
434 if full or disco.hasinfo():
427 if full:
435 if full:
428 ui.note(_(b"sampling from both directions\n"))
436 ui.note(_(b"sampling from both directions\n"))
429 else:
437 else:
430 ui.debug(b"taking initial sample\n")
438 ui.debug(b"taking initial sample\n")
431 samplefunc = disco.takefullsample
439 samplefunc = disco.takefullsample
432 targetsize = fullsamplesize
440 targetsize = fullsamplesize
433 if not remote.limitedarguments:
441 if not remote.limitedarguments:
434 fullsamplesize = int(fullsamplesize * samplegrowth)
442 fullsamplesize = int(fullsamplesize * samplegrowth)
435 else:
443 else:
436 # use even cheaper initial sample
444 # use even cheaper initial sample
437 ui.debug(b"taking quick initial sample\n")
445 ui.debug(b"taking quick initial sample\n")
438 samplefunc = disco.takequicksample
446 samplefunc = disco.takequicksample
439 targetsize = initialsamplesize
447 targetsize = initialsamplesize
440 sample = samplefunc(ownheads, targetsize)
448 sample = samplefunc(ownheads, targetsize)
441
449
442 roundtrips += 1
450 roundtrips += 1
443 progress.update(roundtrips)
451 progress.update(roundtrips)
444 stats = disco.stats()
452 stats = disco.stats()
445 ui.debug(
453 ui.debug(
446 b"query %i; still undecided: %i, sample size is: %i\n"
454 b"query %i; still undecided: %i, sample size is: %i\n"
447 % (roundtrips, stats['undecided'], len(sample))
455 % (roundtrips, stats['undecided'], len(sample))
448 )
456 )
449
457
450 # indices between sample and externalized version must match
458 # indices between sample and externalized version must match
451 sample = list(sample)
459 sample = list(sample)
452
460
453 with remote.commandexecutor() as e:
461 with remote.commandexecutor() as e:
454 yesno = e.callcommand(
462 yesno = e.callcommand(
455 b'known',
463 b'known',
456 {
464 {
457 b'nodes': [clnode(r) for r in sample],
465 b'nodes': [clnode(r) for r in sample],
458 },
466 },
459 ).result()
467 ).result()
460
468
461 full = True
469 full = True
462
470
463 disco.addinfo(zip(sample, yesno))
471 disco.addinfo(zip(sample, yesno))
464
472
465 result = disco.commonheads()
473 result = disco.commonheads()
466 elapsed = util.timer() - start
474 elapsed = util.timer() - start
467 progress.complete()
475 progress.complete()
468 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
476 ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
469 msg = (
477 msg = (
470 b'found %d common and %d unknown server heads,'
478 b'found %d common and %d unknown server heads,'
471 b' %d roundtrips in %.4fs\n'
479 b' %d roundtrips in %.4fs\n'
472 )
480 )
473 missing = set(result) - set(knownsrvheads)
481 missing = set(result) - set(knownsrvheads)
474 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
482 ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
475
483
484 if audit is not None:
485 audit[b'total-roundtrips'] = roundtrips
486
476 if not result and srvheadhashes != [nullid]:
487 if not result and srvheadhashes != [nullid]:
477 if abortwhenunrelated:
488 if abortwhenunrelated:
478 raise error.Abort(_(b"repository is unrelated"))
489 raise error.Abort(_(b"repository is unrelated"))
479 else:
490 else:
480 ui.warn(_(b"warning: repository is unrelated\n"))
491 ui.warn(_(b"warning: repository is unrelated\n"))
481 return (
492 return (
482 {nullid},
493 {nullid},
483 True,
494 True,
484 srvheadhashes,
495 srvheadhashes,
485 )
496 )
486
497
487 anyincoming = srvheadhashes != [nullid]
498 anyincoming = srvheadhashes != [nullid]
488 result = {clnode(r) for r in result}
499 result = {clnode(r) for r in result}
489 return result, anyincoming, srvheadhashes
500 return result, anyincoming, srvheadhashes
@@ -1,185 +1,190 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11
11
12 from .i18n import _
12 from .i18n import _
13 from .node import (
13 from .node import (
14 nullid,
14 nullid,
15 short,
15 short,
16 )
16 )
17 from . import (
17 from . import (
18 error,
18 error,
19 pycompat,
19 pycompat,
20 )
20 )
21
21
22
22
23 def findcommonincoming(repo, remote, heads=None, force=False):
23 def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
24 """Return a tuple (common, fetch, heads) used to identify the common
24 """Return a tuple (common, fetch, heads) used to identify the common
25 subset of nodes between repo and remote.
25 subset of nodes between repo and remote.
26
26
27 "common" is a list of (at least) the heads of the common subset.
27 "common" is a list of (at least) the heads of the common subset.
28 "fetch" is a list of roots of the nodes that would be incoming, to be
28 "fetch" is a list of roots of the nodes that would be incoming, to be
29 supplied to changegroupsubset.
29 supplied to changegroupsubset.
30 "heads" is either the supplied heads, or else the remote's heads.
30 "heads" is either the supplied heads, or else the remote's heads.
31 """
31 """
32
32
33 knownnode = repo.changelog.hasnode
33 knownnode = repo.changelog.hasnode
34 search = []
34 search = []
35 fetch = set()
35 fetch = set()
36 seen = set()
36 seen = set()
37 seenbranch = set()
37 seenbranch = set()
38 base = set()
38 base = set()
39
39
40 if not heads:
40 if not heads:
41 with remote.commandexecutor() as e:
41 with remote.commandexecutor() as e:
42 heads = e.callcommand(b'heads', {}).result()
42 heads = e.callcommand(b'heads', {}).result()
43
43
44 if audit is not None:
45 audit[b'total-roundtrips'] = 1
46
44 if repo.changelog.tip() == nullid:
47 if repo.changelog.tip() == nullid:
45 base.add(nullid)
48 base.add(nullid)
46 if heads != [nullid]:
49 if heads != [nullid]:
47 return [nullid], [nullid], list(heads)
50 return [nullid], [nullid], list(heads)
48 return [nullid], [], heads
51 return [nullid], [], heads
49
52
50 # assume we're closer to the tip than the root
53 # assume we're closer to the tip than the root
51 # and start by examining the heads
54 # and start by examining the heads
52 repo.ui.status(_(b"searching for changes\n"))
55 repo.ui.status(_(b"searching for changes\n"))
53
56
54 unknown = []
57 unknown = []
55 for h in heads:
58 for h in heads:
56 if not knownnode(h):
59 if not knownnode(h):
57 unknown.append(h)
60 unknown.append(h)
58 else:
61 else:
59 base.add(h)
62 base.add(h)
60
63
61 if not unknown:
64 if not unknown:
62 return list(base), [], list(heads)
65 return list(base), [], list(heads)
63
66
64 req = set(unknown)
67 req = set(unknown)
65 reqcnt = 0
68 reqcnt = 0
66 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
69 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
67
70
68 # search through remote branches
71 # search through remote branches
69 # a 'branch' here is a linear segment of history, with four parts:
72 # a 'branch' here is a linear segment of history, with four parts:
70 # head, root, first parent, second parent
73 # head, root, first parent, second parent
71 # (a branch always has two parents (or none) by definition)
74 # (a branch always has two parents (or none) by definition)
72 with remote.commandexecutor() as e:
75 with remote.commandexecutor() as e:
73 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
76 branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
74
77
75 unknown = collections.deque(branches)
78 unknown = collections.deque(branches)
76 while unknown:
79 while unknown:
77 r = []
80 r = []
78 while unknown:
81 while unknown:
79 n = unknown.popleft()
82 n = unknown.popleft()
80 if n[0] in seen:
83 if n[0] in seen:
81 continue
84 continue
82
85
83 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
86 repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
84 if n[0] == nullid: # found the end of the branch
87 if n[0] == nullid: # found the end of the branch
85 pass
88 pass
86 elif n in seenbranch:
89 elif n in seenbranch:
87 repo.ui.debug(b"branch already found\n")
90 repo.ui.debug(b"branch already found\n")
88 continue
91 continue
89 elif n[1] and knownnode(n[1]): # do we know the base?
92 elif n[1] and knownnode(n[1]): # do we know the base?
90 repo.ui.debug(
93 repo.ui.debug(
91 b"found incomplete branch %s:%s\n"
94 b"found incomplete branch %s:%s\n"
92 % (short(n[0]), short(n[1]))
95 % (short(n[0]), short(n[1]))
93 )
96 )
94 search.append(n[0:2]) # schedule branch range for scanning
97 search.append(n[0:2]) # schedule branch range for scanning
95 seenbranch.add(n)
98 seenbranch.add(n)
96 else:
99 else:
97 if n[1] not in seen and n[1] not in fetch:
100 if n[1] not in seen and n[1] not in fetch:
98 if knownnode(n[2]) and knownnode(n[3]):
101 if knownnode(n[2]) and knownnode(n[3]):
99 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
102 repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
100 fetch.add(n[1]) # earliest unknown
103 fetch.add(n[1]) # earliest unknown
101 for p in n[2:4]:
104 for p in n[2:4]:
102 if knownnode(p):
105 if knownnode(p):
103 base.add(p) # latest known
106 base.add(p) # latest known
104
107
105 for p in n[2:4]:
108 for p in n[2:4]:
106 if p not in req and not knownnode(p):
109 if p not in req and not knownnode(p):
107 r.append(p)
110 r.append(p)
108 req.add(p)
111 req.add(p)
109 seen.add(n[0])
112 seen.add(n[0])
110
113
111 if r:
114 if r:
112 reqcnt += 1
115 reqcnt += 1
113 progress.increment()
116 progress.increment()
114 repo.ui.debug(
117 repo.ui.debug(
115 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
118 b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
116 )
119 )
117 for p in pycompat.xrange(0, len(r), 10):
120 for p in pycompat.xrange(0, len(r), 10):
118 with remote.commandexecutor() as e:
121 with remote.commandexecutor() as e:
119 branches = e.callcommand(
122 branches = e.callcommand(
120 b'branches',
123 b'branches',
121 {
124 {
122 b'nodes': r[p : p + 10],
125 b'nodes': r[p : p + 10],
123 },
126 },
124 ).result()
127 ).result()
125
128
126 for b in branches:
129 for b in branches:
127 repo.ui.debug(
130 repo.ui.debug(
128 b"received %s:%s\n" % (short(b[0]), short(b[1]))
131 b"received %s:%s\n" % (short(b[0]), short(b[1]))
129 )
132 )
130 unknown.append(b)
133 unknown.append(b)
131
134
132 # do binary search on the branches we found
135 # do binary search on the branches we found
133 while search:
136 while search:
134 newsearch = []
137 newsearch = []
135 reqcnt += 1
138 reqcnt += 1
136 progress.increment()
139 progress.increment()
137
140
138 with remote.commandexecutor() as e:
141 with remote.commandexecutor() as e:
139 between = e.callcommand(b'between', {b'pairs': search}).result()
142 between = e.callcommand(b'between', {b'pairs': search}).result()
140
143
141 for n, l in zip(search, between):
144 for n, l in zip(search, between):
142 l.append(n[1])
145 l.append(n[1])
143 p = n[0]
146 p = n[0]
144 f = 1
147 f = 1
145 for i in l:
148 for i in l:
146 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
149 repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
147 if knownnode(i):
150 if knownnode(i):
148 if f <= 2:
151 if f <= 2:
149 repo.ui.debug(
152 repo.ui.debug(
150 b"found new branch changeset %s\n" % short(p)
153 b"found new branch changeset %s\n" % short(p)
151 )
154 )
152 fetch.add(p)
155 fetch.add(p)
153 base.add(i)
156 base.add(i)
154 else:
157 else:
155 repo.ui.debug(
158 repo.ui.debug(
156 b"narrowed branch search to %s:%s\n"
159 b"narrowed branch search to %s:%s\n"
157 % (short(p), short(i))
160 % (short(p), short(i))
158 )
161 )
159 newsearch.append((p, i))
162 newsearch.append((p, i))
160 break
163 break
161 p, f = i, f * 2
164 p, f = i, f * 2
162 search = newsearch
165 search = newsearch
163
166
164 # sanity check our fetch list
167 # sanity check our fetch list
165 for f in fetch:
168 for f in fetch:
166 if knownnode(f):
169 if knownnode(f):
167 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
170 raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
168
171
169 base = list(base)
172 base = list(base)
170 if base == [nullid]:
173 if base == [nullid]:
171 if force:
174 if force:
172 repo.ui.warn(_(b"warning: repository is unrelated\n"))
175 repo.ui.warn(_(b"warning: repository is unrelated\n"))
173 else:
176 else:
174 raise error.Abort(_(b"repository is unrelated"))
177 raise error.Abort(_(b"repository is unrelated"))
175
178
176 repo.ui.debug(
179 repo.ui.debug(
177 b"found new changesets starting at "
180 b"found new changesets starting at "
178 + b" ".join([short(f) for f in fetch])
181 + b" ".join([short(f) for f in fetch])
179 + b"\n"
182 + b"\n"
180 )
183 )
181
184
182 progress.complete()
185 progress.complete()
183 repo.ui.debug(b"%d total queries\n" % reqcnt)
186 repo.ui.debug(b"%d total queries\n" % reqcnt)
187 if audit is not None:
188 audit[b'total-roundtrips'] = reqcnt
184
189
185 return base, list(fetch), heads
190 return base, list(fetch), heads
@@ -1,1502 +1,1541 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 round-trips: 2
47 heads summary:
48 heads summary:
48 total common heads: 2
49 total common heads: 2
49 also local heads: 2
50 also local heads: 2
50 also remote heads: 1
51 also remote heads: 1
51 both: 1
52 both: 1
52 local heads: 2
53 local heads: 2
53 common: 2
54 common: 2
54 missing: 0
55 missing: 0
55 remote heads: 3
56 remote heads: 3
56 common: 1
57 common: 1
57 unknown: 2
58 unknown: 2
58 local changesets: 7
59 local changesets: 7
59 common: 7
60 common: 7
60 heads: 2
61 heads: 2
61 roots: 1
62 roots: 1
62 missing: 0
63 missing: 0
63 heads: 0
64 heads: 0
64 roots: 0
65 roots: 0
65 first undecided set: 3
66 first undecided set: 3
66 heads: 1
67 heads: 1
67 roots: 1
68 roots: 1
68 common: 3
69 common: 3
69 missing: 0
70 missing: 0
70 common heads: 01241442b3c2 b5714e113bc0
71 common heads: 01241442b3c2 b5714e113bc0
71
72
72 % -- a -> b set
73 % -- a -> b set
73 comparing with b
74 comparing with b
74 query 1; heads
75 query 1; heads
75 searching for changes
76 searching for changes
76 all local changesets known remotely
77 all local changesets known remotely
77 elapsed time: * seconds (glob)
78 elapsed time: * seconds (glob)
79 round-trips: 1
78 heads summary:
80 heads summary:
79 total common heads: 2
81 total common heads: 2
80 also local heads: 2
82 also local heads: 2
81 also remote heads: 1
83 also remote heads: 1
82 both: 1
84 both: 1
83 local heads: 2
85 local heads: 2
84 common: 2
86 common: 2
85 missing: 0
87 missing: 0
86 remote heads: 3
88 remote heads: 3
87 common: 1
89 common: 1
88 unknown: 2
90 unknown: 2
89 local changesets: 7
91 local changesets: 7
90 common: 7
92 common: 7
91 heads: 2
93 heads: 2
92 roots: 1
94 roots: 1
93 missing: 0
95 missing: 0
94 heads: 0
96 heads: 0
95 roots: 0
97 roots: 0
96 first undecided set: 3
98 first undecided set: 3
97 heads: 1
99 heads: 1
98 roots: 1
100 roots: 1
99 common: 3
101 common: 3
100 missing: 0
102 missing: 0
101 common heads: 01241442b3c2 b5714e113bc0
103 common heads: 01241442b3c2 b5714e113bc0
102
104
103 % -- a -> b set (tip only)
105 % -- a -> b set (tip only)
104 comparing with b
106 comparing with b
105 query 1; heads
107 query 1; heads
106 searching for changes
108 searching for changes
107 all local changesets known remotely
109 all local changesets known remotely
108 elapsed time: * seconds (glob)
110 elapsed time: * seconds (glob)
111 round-trips: 1
109 heads summary:
112 heads summary:
110 total common heads: 1
113 total common heads: 1
111 also local heads: 1
114 also local heads: 1
112 also remote heads: 0
115 also remote heads: 0
113 both: 0
116 both: 0
114 local heads: 2
117 local heads: 2
115 common: 1
118 common: 1
116 missing: 1
119 missing: 1
117 remote heads: 3
120 remote heads: 3
118 common: 0
121 common: 0
119 unknown: 3
122 unknown: 3
120 local changesets: 7
123 local changesets: 7
121 common: 6
124 common: 6
122 heads: 1
125 heads: 1
123 roots: 1
126 roots: 1
124 missing: 1
127 missing: 1
125 heads: 1
128 heads: 1
126 roots: 1
129 roots: 1
127 first undecided set: 6
130 first undecided set: 6
128 heads: 2
131 heads: 2
129 roots: 1
132 roots: 1
130 common: 5
133 common: 5
131 missing: 1
134 missing: 1
132 common heads: b5714e113bc0
135 common heads: b5714e113bc0
133
136
134 % -- b -> a tree
137 % -- b -> a tree
135 comparing with a
138 comparing with a
136 searching for changes
139 searching for changes
137 unpruned common: 01241442b3c2 b5714e113bc0
140 unpruned common: 01241442b3c2 b5714e113bc0
138 elapsed time: * seconds (glob)
141 elapsed time: * seconds (glob)
142 round-trips: 1
139 heads summary:
143 heads summary:
140 total common heads: 2
144 total common heads: 2
141 also local heads: 1
145 also local heads: 1
142 also remote heads: 2
146 also remote heads: 2
143 both: 1
147 both: 1
144 local heads: 3
148 local heads: 3
145 common: 1
149 common: 1
146 missing: 2
150 missing: 2
147 remote heads: 2
151 remote heads: 2
148 common: 2
152 common: 2
149 unknown: 0
153 unknown: 0
150 local changesets: 15
154 local changesets: 15
151 common: 7
155 common: 7
152 heads: 2
156 heads: 2
153 roots: 1
157 roots: 1
154 missing: 8
158 missing: 8
155 heads: 2
159 heads: 2
156 roots: 2
160 roots: 2
157 first undecided set: 8
161 first undecided set: 8
158 heads: 2
162 heads: 2
159 roots: 2
163 roots: 2
160 common: 0
164 common: 0
161 missing: 8
165 missing: 8
162 common heads: 01241442b3c2 b5714e113bc0
166 common heads: 01241442b3c2 b5714e113bc0
163
167
164 % -- b -> a set
168 % -- b -> a set
165 comparing with a
169 comparing with a
166 query 1; heads
170 query 1; heads
167 searching for changes
171 searching for changes
168 all remote heads known locally
172 all remote heads known locally
169 elapsed time: * seconds (glob)
173 elapsed time: * seconds (glob)
174 round-trips: 1
170 heads summary:
175 heads summary:
171 total common heads: 2
176 total common heads: 2
172 also local heads: 1
177 also local heads: 1
173 also remote heads: 2
178 also remote heads: 2
174 both: 1
179 both: 1
175 local heads: 3
180 local heads: 3
176 common: 1
181 common: 1
177 missing: 2
182 missing: 2
178 remote heads: 2
183 remote heads: 2
179 common: 2
184 common: 2
180 unknown: 0
185 unknown: 0
181 local changesets: 15
186 local changesets: 15
182 common: 7
187 common: 7
183 heads: 2
188 heads: 2
184 roots: 1
189 roots: 1
185 missing: 8
190 missing: 8
186 heads: 2
191 heads: 2
187 roots: 2
192 roots: 2
188 first undecided set: 8
193 first undecided set: 8
189 heads: 2
194 heads: 2
190 roots: 2
195 roots: 2
191 common: 0
196 common: 0
192 missing: 8
197 missing: 8
193 common heads: 01241442b3c2 b5714e113bc0
198 common heads: 01241442b3c2 b5714e113bc0
194
199
195 % -- b -> a set (tip only)
200 % -- b -> a set (tip only)
196 comparing with a
201 comparing with a
197 query 1; heads
202 query 1; heads
198 searching for changes
203 searching for changes
199 all remote heads known locally
204 all remote heads known locally
200 elapsed time: * seconds (glob)
205 elapsed time: * seconds (glob)
206 round-trips: 1
201 heads summary:
207 heads summary:
202 total common heads: 2
208 total common heads: 2
203 also local heads: 1
209 also local heads: 1
204 also remote heads: 2
210 also remote heads: 2
205 both: 1
211 both: 1
206 local heads: 3
212 local heads: 3
207 common: 1
213 common: 1
208 missing: 2
214 missing: 2
209 remote heads: 2
215 remote heads: 2
210 common: 2
216 common: 2
211 unknown: 0
217 unknown: 0
212 local changesets: 15
218 local changesets: 15
213 common: 7
219 common: 7
214 heads: 2
220 heads: 2
215 roots: 1
221 roots: 1
216 missing: 8
222 missing: 8
217 heads: 2
223 heads: 2
218 roots: 2
224 roots: 2
219 first undecided set: 8
225 first undecided set: 8
220 heads: 2
226 heads: 2
221 roots: 2
227 roots: 2
222 common: 0
228 common: 0
223 missing: 8
229 missing: 8
224 common heads: 01241442b3c2 b5714e113bc0
230 common heads: 01241442b3c2 b5714e113bc0
225
231
226
232
227 Many new:
233 Many new:
228
234
229 $ testdesc '-ra1 -ra2' '-rb' '
235 $ testdesc '-ra1 -ra2' '-rb' '
230 > +2:f +3:a1 +3:b
236 > +2:f +3:a1 +3:b
231 > <f +30 :a2'
237 > <f +30 :a2'
232
238
233 % -- a -> b tree
239 % -- a -> b tree
234 comparing with b
240 comparing with b
235 searching for changes
241 searching for changes
236 unpruned common: bebd167eb94d
242 unpruned common: bebd167eb94d
237 elapsed time: * seconds (glob)
243 elapsed time: * seconds (glob)
244 round-trips: 2
238 heads summary:
245 heads summary:
239 total common heads: 1
246 total common heads: 1
240 also local heads: 1
247 also local heads: 1
241 also remote heads: 0
248 also remote heads: 0
242 both: 0
249 both: 0
243 local heads: 2
250 local heads: 2
244 common: 1
251 common: 1
245 missing: 1
252 missing: 1
246 remote heads: 1
253 remote heads: 1
247 common: 0
254 common: 0
248 unknown: 1
255 unknown: 1
249 local changesets: 35
256 local changesets: 35
250 common: 5
257 common: 5
251 heads: 1
258 heads: 1
252 roots: 1
259 roots: 1
253 missing: 30
260 missing: 30
254 heads: 1
261 heads: 1
255 roots: 1
262 roots: 1
256 first undecided set: 34
263 first undecided set: 34
257 heads: 2
264 heads: 2
258 roots: 1
265 roots: 1
259 common: 4
266 common: 4
260 missing: 30
267 missing: 30
261 common heads: bebd167eb94d
268 common heads: bebd167eb94d
262
269
263 % -- a -> b set
270 % -- a -> b set
264 comparing with b
271 comparing with b
265 query 1; heads
272 query 1; heads
266 searching for changes
273 searching for changes
267 taking initial sample
274 taking initial sample
268 searching: 2 queries
275 searching: 2 queries
269 query 2; still undecided: 29, sample size is: 29
276 query 2; still undecided: 29, sample size is: 29
270 2 total queries in *.????s (glob)
277 2 total queries in *.????s (glob)
271 elapsed time: * seconds (glob)
278 elapsed time: * seconds (glob)
279 round-trips: 2
272 heads summary:
280 heads summary:
273 total common heads: 1
281 total common heads: 1
274 also local heads: 1
282 also local heads: 1
275 also remote heads: 0
283 also remote heads: 0
276 both: 0
284 both: 0
277 local heads: 2
285 local heads: 2
278 common: 1
286 common: 1
279 missing: 1
287 missing: 1
280 remote heads: 1
288 remote heads: 1
281 common: 0
289 common: 0
282 unknown: 1
290 unknown: 1
283 local changesets: 35
291 local changesets: 35
284 common: 5
292 common: 5
285 heads: 1
293 heads: 1
286 roots: 1
294 roots: 1
287 missing: 30
295 missing: 30
288 heads: 1
296 heads: 1
289 roots: 1
297 roots: 1
290 first undecided set: 34
298 first undecided set: 34
291 heads: 2
299 heads: 2
292 roots: 1
300 roots: 1
293 common: 4
301 common: 4
294 missing: 30
302 missing: 30
295 common heads: bebd167eb94d
303 common heads: bebd167eb94d
296
304
297 % -- a -> b set (tip only)
305 % -- a -> b set (tip only)
298 comparing with b
306 comparing with b
299 query 1; heads
307 query 1; heads
300 searching for changes
308 searching for changes
301 taking quick initial sample
309 taking quick initial sample
302 searching: 2 queries
310 searching: 2 queries
303 query 2; still undecided: 31, sample size is: 31
311 query 2; still undecided: 31, sample size is: 31
304 2 total queries in *.????s (glob)
312 2 total queries in *.????s (glob)
305 elapsed time: * seconds (glob)
313 elapsed time: * seconds (glob)
314 round-trips: 2
306 heads summary:
315 heads summary:
307 total common heads: 1
316 total common heads: 1
308 also local heads: 0
317 also local heads: 0
309 also remote heads: 0
318 also remote heads: 0
310 both: 0
319 both: 0
311 local heads: 2
320 local heads: 2
312 common: 0
321 common: 0
313 missing: 2
322 missing: 2
314 remote heads: 1
323 remote heads: 1
315 common: 0
324 common: 0
316 unknown: 1
325 unknown: 1
317 local changesets: 35
326 local changesets: 35
318 common: 2
327 common: 2
319 heads: 1
328 heads: 1
320 roots: 1
329 roots: 1
321 missing: 33
330 missing: 33
322 heads: 2
331 heads: 2
323 roots: 2
332 roots: 2
324 first undecided set: 35
333 first undecided set: 35
325 heads: 2
334 heads: 2
326 roots: 1
335 roots: 1
327 common: 2
336 common: 2
328 missing: 33
337 missing: 33
329 common heads: 66f7d451a68b
338 common heads: 66f7d451a68b
330
339
331 % -- b -> a tree
340 % -- b -> a tree
332 comparing with a
341 comparing with a
333 searching for changes
342 searching for changes
334 unpruned common: 66f7d451a68b bebd167eb94d
343 unpruned common: 66f7d451a68b bebd167eb94d
335 elapsed time: * seconds (glob)
344 elapsed time: * seconds (glob)
345 round-trips: 4
336 heads summary:
346 heads summary:
337 total common heads: 1
347 total common heads: 1
338 also local heads: 0
348 also local heads: 0
339 also remote heads: 1
349 also remote heads: 1
340 both: 0
350 both: 0
341 local heads: 1
351 local heads: 1
342 common: 0
352 common: 0
343 missing: 1
353 missing: 1
344 remote heads: 2
354 remote heads: 2
345 common: 1
355 common: 1
346 unknown: 1
356 unknown: 1
347 local changesets: 8
357 local changesets: 8
348 common: 5
358 common: 5
349 heads: 1
359 heads: 1
350 roots: 1
360 roots: 1
351 missing: 3
361 missing: 3
352 heads: 1
362 heads: 1
353 roots: 1
363 roots: 1
354 first undecided set: 3
364 first undecided set: 3
355 heads: 1
365 heads: 1
356 roots: 1
366 roots: 1
357 common: 0
367 common: 0
358 missing: 3
368 missing: 3
359 common heads: bebd167eb94d
369 common heads: bebd167eb94d
360
370
361 % -- b -> a set
371 % -- b -> a set
362 comparing with a
372 comparing with a
363 query 1; heads
373 query 1; heads
364 searching for changes
374 searching for changes
365 taking initial sample
375 taking initial sample
366 searching: 2 queries
376 searching: 2 queries
367 query 2; still undecided: 2, sample size is: 2
377 query 2; still undecided: 2, sample size is: 2
368 2 total queries in *.????s (glob)
378 2 total queries in *.????s (glob)
369 elapsed time: * seconds (glob)
379 elapsed time: * seconds (glob)
380 round-trips: 2
370 heads summary:
381 heads summary:
371 total common heads: 1
382 total common heads: 1
372 also local heads: 0
383 also local heads: 0
373 also remote heads: 1
384 also remote heads: 1
374 both: 0
385 both: 0
375 local heads: 1
386 local heads: 1
376 common: 0
387 common: 0
377 missing: 1
388 missing: 1
378 remote heads: 2
389 remote heads: 2
379 common: 1
390 common: 1
380 unknown: 1
391 unknown: 1
381 local changesets: 8
392 local changesets: 8
382 common: 5
393 common: 5
383 heads: 1
394 heads: 1
384 roots: 1
395 roots: 1
385 missing: 3
396 missing: 3
386 heads: 1
397 heads: 1
387 roots: 1
398 roots: 1
388 first undecided set: 3
399 first undecided set: 3
389 heads: 1
400 heads: 1
390 roots: 1
401 roots: 1
391 common: 0
402 common: 0
392 missing: 3
403 missing: 3
393 common heads: bebd167eb94d
404 common heads: bebd167eb94d
394
405
395 % -- b -> a set (tip only)
406 % -- b -> a set (tip only)
396 comparing with a
407 comparing with a
397 query 1; heads
408 query 1; heads
398 searching for changes
409 searching for changes
399 taking initial sample
410 taking initial sample
400 searching: 2 queries
411 searching: 2 queries
401 query 2; still undecided: 2, sample size is: 2
412 query 2; still undecided: 2, sample size is: 2
402 2 total queries in *.????s (glob)
413 2 total queries in *.????s (glob)
403 elapsed time: * seconds (glob)
414 elapsed time: * seconds (glob)
415 round-trips: 2
404 heads summary:
416 heads summary:
405 total common heads: 1
417 total common heads: 1
406 also local heads: 0
418 also local heads: 0
407 also remote heads: 1
419 also remote heads: 1
408 both: 0
420 both: 0
409 local heads: 1
421 local heads: 1
410 common: 0
422 common: 0
411 missing: 1
423 missing: 1
412 remote heads: 2
424 remote heads: 2
413 common: 1
425 common: 1
414 unknown: 1
426 unknown: 1
415 local changesets: 8
427 local changesets: 8
416 common: 5
428 common: 5
417 heads: 1
429 heads: 1
418 roots: 1
430 roots: 1
419 missing: 3
431 missing: 3
420 heads: 1
432 heads: 1
421 roots: 1
433 roots: 1
422 first undecided set: 3
434 first undecided set: 3
423 heads: 1
435 heads: 1
424 roots: 1
436 roots: 1
425 common: 0
437 common: 0
426 missing: 3
438 missing: 3
427 common heads: bebd167eb94d
439 common heads: bebd167eb94d
428
440
429 Both sides many new with stub:
441 Both sides many new with stub:
430
442
431 $ testdesc '-ra1 -ra2' '-rb' '
443 $ testdesc '-ra1 -ra2' '-rb' '
432 > +2:f +2:a1 +30 :b
444 > +2:f +2:a1 +30 :b
433 > <f +30 :a2'
445 > <f +30 :a2'
434
446
435 % -- a -> b tree
447 % -- a -> b tree
436 comparing with b
448 comparing with b
437 searching for changes
449 searching for changes
438 unpruned common: 2dc09a01254d
450 unpruned common: 2dc09a01254d
439 elapsed time: * seconds (glob)
451 elapsed time: * seconds (glob)
452 round-trips: 4
440 heads summary:
453 heads summary:
441 total common heads: 1
454 total common heads: 1
442 also local heads: 1
455 also local heads: 1
443 also remote heads: 0
456 also remote heads: 0
444 both: 0
457 both: 0
445 local heads: 2
458 local heads: 2
446 common: 1
459 common: 1
447 missing: 1
460 missing: 1
448 remote heads: 1
461 remote heads: 1
449 common: 0
462 common: 0
450 unknown: 1
463 unknown: 1
451 local changesets: 34
464 local changesets: 34
452 common: 4
465 common: 4
453 heads: 1
466 heads: 1
454 roots: 1
467 roots: 1
455 missing: 30
468 missing: 30
456 heads: 1
469 heads: 1
457 roots: 1
470 roots: 1
458 first undecided set: 33
471 first undecided set: 33
459 heads: 2
472 heads: 2
460 roots: 1
473 roots: 1
461 common: 3
474 common: 3
462 missing: 30
475 missing: 30
463 common heads: 2dc09a01254d
476 common heads: 2dc09a01254d
464
477
465 % -- a -> b set
478 % -- a -> b set
466 comparing with b
479 comparing with b
467 query 1; heads
480 query 1; heads
468 searching for changes
481 searching for changes
469 taking initial sample
482 taking initial sample
470 searching: 2 queries
483 searching: 2 queries
471 query 2; still undecided: 29, sample size is: 29
484 query 2; still undecided: 29, sample size is: 29
472 2 total queries in *.????s (glob)
485 2 total queries in *.????s (glob)
473 elapsed time: * seconds (glob)
486 elapsed time: * seconds (glob)
487 round-trips: 2
474 heads summary:
488 heads summary:
475 total common heads: 1
489 total common heads: 1
476 also local heads: 1
490 also local heads: 1
477 also remote heads: 0
491 also remote heads: 0
478 both: 0
492 both: 0
479 local heads: 2
493 local heads: 2
480 common: 1
494 common: 1
481 missing: 1
495 missing: 1
482 remote heads: 1
496 remote heads: 1
483 common: 0
497 common: 0
484 unknown: 1
498 unknown: 1
485 local changesets: 34
499 local changesets: 34
486 common: 4
500 common: 4
487 heads: 1
501 heads: 1
488 roots: 1
502 roots: 1
489 missing: 30
503 missing: 30
490 heads: 1
504 heads: 1
491 roots: 1
505 roots: 1
492 first undecided set: 33
506 first undecided set: 33
493 heads: 2
507 heads: 2
494 roots: 1
508 roots: 1
495 common: 3
509 common: 3
496 missing: 30
510 missing: 30
497 common heads: 2dc09a01254d
511 common heads: 2dc09a01254d
498
512
499 % -- a -> b set (tip only)
513 % -- a -> b set (tip only)
500 comparing with b
514 comparing with b
501 query 1; heads
515 query 1; heads
502 searching for changes
516 searching for changes
503 taking quick initial sample
517 taking quick initial sample
504 searching: 2 queries
518 searching: 2 queries
505 query 2; still undecided: 31, sample size is: 31
519 query 2; still undecided: 31, sample size is: 31
506 2 total queries in *.????s (glob)
520 2 total queries in *.????s (glob)
507 elapsed time: * seconds (glob)
521 elapsed time: * seconds (glob)
522 round-trips: 2
508 heads summary:
523 heads summary:
509 total common heads: 1
524 total common heads: 1
510 also local heads: 0
525 also local heads: 0
511 also remote heads: 0
526 also remote heads: 0
512 both: 0
527 both: 0
513 local heads: 2
528 local heads: 2
514 common: 0
529 common: 0
515 missing: 2
530 missing: 2
516 remote heads: 1
531 remote heads: 1
517 common: 0
532 common: 0
518 unknown: 1
533 unknown: 1
519 local changesets: 34
534 local changesets: 34
520 common: 2
535 common: 2
521 heads: 1
536 heads: 1
522 roots: 1
537 roots: 1
523 missing: 32
538 missing: 32
524 heads: 2
539 heads: 2
525 roots: 2
540 roots: 2
526 first undecided set: 34
541 first undecided set: 34
527 heads: 2
542 heads: 2
528 roots: 1
543 roots: 1
529 common: 2
544 common: 2
530 missing: 32
545 missing: 32
531 common heads: 66f7d451a68b
546 common heads: 66f7d451a68b
532
547
533 % -- b -> a tree
548 % -- b -> a tree
534 comparing with a
549 comparing with a
535 searching for changes
550 searching for changes
536 unpruned common: 2dc09a01254d 66f7d451a68b
551 unpruned common: 2dc09a01254d 66f7d451a68b
537 elapsed time: * seconds (glob)
552 elapsed time: * seconds (glob)
553 round-trips: 4
538 heads summary:
554 heads summary:
539 total common heads: 1
555 total common heads: 1
540 also local heads: 0
556 also local heads: 0
541 also remote heads: 1
557 also remote heads: 1
542 both: 0
558 both: 0
543 local heads: 1
559 local heads: 1
544 common: 0
560 common: 0
545 missing: 1
561 missing: 1
546 remote heads: 2
562 remote heads: 2
547 common: 1
563 common: 1
548 unknown: 1
564 unknown: 1
549 local changesets: 34
565 local changesets: 34
550 common: 4
566 common: 4
551 heads: 1
567 heads: 1
552 roots: 1
568 roots: 1
553 missing: 30
569 missing: 30
554 heads: 1
570 heads: 1
555 roots: 1
571 roots: 1
556 first undecided set: 30
572 first undecided set: 30
557 heads: 1
573 heads: 1
558 roots: 1
574 roots: 1
559 common: 0
575 common: 0
560 missing: 30
576 missing: 30
561 common heads: 2dc09a01254d
577 common heads: 2dc09a01254d
562
578
563 % -- b -> a set
579 % -- b -> a set
564 comparing with a
580 comparing with a
565 query 1; heads
581 query 1; heads
566 searching for changes
582 searching for changes
567 taking initial sample
583 taking initial sample
568 searching: 2 queries
584 searching: 2 queries
569 query 2; still undecided: 29, sample size is: 29
585 query 2; still undecided: 29, sample size is: 29
570 2 total queries in *.????s (glob)
586 2 total queries in *.????s (glob)
571 elapsed time: * seconds (glob)
587 elapsed time: * seconds (glob)
588 round-trips: 2
572 heads summary:
589 heads summary:
573 total common heads: 1
590 total common heads: 1
574 also local heads: 0
591 also local heads: 0
575 also remote heads: 1
592 also remote heads: 1
576 both: 0
593 both: 0
577 local heads: 1
594 local heads: 1
578 common: 0
595 common: 0
579 missing: 1
596 missing: 1
580 remote heads: 2
597 remote heads: 2
581 common: 1
598 common: 1
582 unknown: 1
599 unknown: 1
583 local changesets: 34
600 local changesets: 34
584 common: 4
601 common: 4
585 heads: 1
602 heads: 1
586 roots: 1
603 roots: 1
587 missing: 30
604 missing: 30
588 heads: 1
605 heads: 1
589 roots: 1
606 roots: 1
590 first undecided set: 30
607 first undecided set: 30
591 heads: 1
608 heads: 1
592 roots: 1
609 roots: 1
593 common: 0
610 common: 0
594 missing: 30
611 missing: 30
595 common heads: 2dc09a01254d
612 common heads: 2dc09a01254d
596
613
597 % -- b -> a set (tip only)
614 % -- b -> a set (tip only)
598 comparing with a
615 comparing with a
599 query 1; heads
616 query 1; heads
600 searching for changes
617 searching for changes
601 taking initial sample
618 taking initial sample
602 searching: 2 queries
619 searching: 2 queries
603 query 2; still undecided: 29, sample size is: 29
620 query 2; still undecided: 29, sample size is: 29
604 2 total queries in *.????s (glob)
621 2 total queries in *.????s (glob)
605 elapsed time: * seconds (glob)
622 elapsed time: * seconds (glob)
623 round-trips: 2
606 heads summary:
624 heads summary:
607 total common heads: 1
625 total common heads: 1
608 also local heads: 0
626 also local heads: 0
609 also remote heads: 1
627 also remote heads: 1
610 both: 0
628 both: 0
611 local heads: 1
629 local heads: 1
612 common: 0
630 common: 0
613 missing: 1
631 missing: 1
614 remote heads: 2
632 remote heads: 2
615 common: 1
633 common: 1
616 unknown: 1
634 unknown: 1
617 local changesets: 34
635 local changesets: 34
618 common: 4
636 common: 4
619 heads: 1
637 heads: 1
620 roots: 1
638 roots: 1
621 missing: 30
639 missing: 30
622 heads: 1
640 heads: 1
623 roots: 1
641 roots: 1
624 first undecided set: 30
642 first undecided set: 30
625 heads: 1
643 heads: 1
626 roots: 1
644 roots: 1
627 common: 0
645 common: 0
628 missing: 30
646 missing: 30
629 common heads: 2dc09a01254d
647 common heads: 2dc09a01254d
630
648
631
649
632 Both many new:
650 Both many new:
633
651
634 $ testdesc '-ra' '-rb' '
652 $ testdesc '-ra' '-rb' '
635 > +2:f +30 :b
653 > +2:f +30 :b
636 > <f +30 :a'
654 > <f +30 :a'
637
655
638 % -- a -> b tree
656 % -- a -> b tree
639 comparing with b
657 comparing with b
640 searching for changes
658 searching for changes
641 unpruned common: 66f7d451a68b
659 unpruned common: 66f7d451a68b
642 elapsed time: * seconds (glob)
660 elapsed time: * seconds (glob)
661 round-trips: 4
643 heads summary:
662 heads summary:
644 total common heads: 1
663 total common heads: 1
645 also local heads: 0
664 also local heads: 0
646 also remote heads: 0
665 also remote heads: 0
647 both: 0
666 both: 0
648 local heads: 1
667 local heads: 1
649 common: 0
668 common: 0
650 missing: 1
669 missing: 1
651 remote heads: 1
670 remote heads: 1
652 common: 0
671 common: 0
653 unknown: 1
672 unknown: 1
654 local changesets: 32
673 local changesets: 32
655 common: 2
674 common: 2
656 heads: 1
675 heads: 1
657 roots: 1
676 roots: 1
658 missing: 30
677 missing: 30
659 heads: 1
678 heads: 1
660 roots: 1
679 roots: 1
661 first undecided set: 32
680 first undecided set: 32
662 heads: 1
681 heads: 1
663 roots: 1
682 roots: 1
664 common: 2
683 common: 2
665 missing: 30
684 missing: 30
666 common heads: 66f7d451a68b
685 common heads: 66f7d451a68b
667
686
668 % -- a -> b set
687 % -- a -> b set
669 comparing with b
688 comparing with b
670 query 1; heads
689 query 1; heads
671 searching for changes
690 searching for changes
672 taking quick initial sample
691 taking quick initial sample
673 searching: 2 queries
692 searching: 2 queries
674 query 2; still undecided: 31, sample size is: 31
693 query 2; still undecided: 31, sample size is: 31
675 2 total queries in *.????s (glob)
694 2 total queries in *.????s (glob)
676 elapsed time: * seconds (glob)
695 elapsed time: * seconds (glob)
696 round-trips: 2
677 heads summary:
697 heads summary:
678 total common heads: 1
698 total common heads: 1
679 also local heads: 0
699 also local heads: 0
680 also remote heads: 0
700 also remote heads: 0
681 both: 0
701 both: 0
682 local heads: 1
702 local heads: 1
683 common: 0
703 common: 0
684 missing: 1
704 missing: 1
685 remote heads: 1
705 remote heads: 1
686 common: 0
706 common: 0
687 unknown: 1
707 unknown: 1
688 local changesets: 32
708 local changesets: 32
689 common: 2
709 common: 2
690 heads: 1
710 heads: 1
691 roots: 1
711 roots: 1
692 missing: 30
712 missing: 30
693 heads: 1
713 heads: 1
694 roots: 1
714 roots: 1
695 first undecided set: 32
715 first undecided set: 32
696 heads: 1
716 heads: 1
697 roots: 1
717 roots: 1
698 common: 2
718 common: 2
699 missing: 30
719 missing: 30
700 common heads: 66f7d451a68b
720 common heads: 66f7d451a68b
701
721
702 % -- a -> b set (tip only)
722 % -- a -> b set (tip only)
703 comparing with b
723 comparing with b
704 query 1; heads
724 query 1; heads
705 searching for changes
725 searching for changes
706 taking quick initial sample
726 taking quick initial sample
707 searching: 2 queries
727 searching: 2 queries
708 query 2; still undecided: 31, sample size is: 31
728 query 2; still undecided: 31, sample size is: 31
709 2 total queries in *.????s (glob)
729 2 total queries in *.????s (glob)
710 elapsed time: * seconds (glob)
730 elapsed time: * seconds (glob)
731 round-trips: 2
711 heads summary:
732 heads summary:
712 total common heads: 1
733 total common heads: 1
713 also local heads: 0
734 also local heads: 0
714 also remote heads: 0
735 also remote heads: 0
715 both: 0
736 both: 0
716 local heads: 1
737 local heads: 1
717 common: 0
738 common: 0
718 missing: 1
739 missing: 1
719 remote heads: 1
740 remote heads: 1
720 common: 0
741 common: 0
721 unknown: 1
742 unknown: 1
722 local changesets: 32
743 local changesets: 32
723 common: 2
744 common: 2
724 heads: 1
745 heads: 1
725 roots: 1
746 roots: 1
726 missing: 30
747 missing: 30
727 heads: 1
748 heads: 1
728 roots: 1
749 roots: 1
729 first undecided set: 32
750 first undecided set: 32
730 heads: 1
751 heads: 1
731 roots: 1
752 roots: 1
732 common: 2
753 common: 2
733 missing: 30
754 missing: 30
734 common heads: 66f7d451a68b
755 common heads: 66f7d451a68b
735
756
736 % -- b -> a tree
757 % -- b -> a tree
737 comparing with a
758 comparing with a
738 searching for changes
759 searching for changes
739 unpruned common: 66f7d451a68b
760 unpruned common: 66f7d451a68b
740 elapsed time: * seconds (glob)
761 elapsed time: * seconds (glob)
762 round-trips: 4
741 heads summary:
763 heads summary:
742 total common heads: 1
764 total common heads: 1
743 also local heads: 0
765 also local heads: 0
744 also remote heads: 0
766 also remote heads: 0
745 both: 0
767 both: 0
746 local heads: 1
768 local heads: 1
747 common: 0
769 common: 0
748 missing: 1
770 missing: 1
749 remote heads: 1
771 remote heads: 1
750 common: 0
772 common: 0
751 unknown: 1
773 unknown: 1
752 local changesets: 32
774 local changesets: 32
753 common: 2
775 common: 2
754 heads: 1
776 heads: 1
755 roots: 1
777 roots: 1
756 missing: 30
778 missing: 30
757 heads: 1
779 heads: 1
758 roots: 1
780 roots: 1
759 first undecided set: 32
781 first undecided set: 32
760 heads: 1
782 heads: 1
761 roots: 1
783 roots: 1
762 common: 2
784 common: 2
763 missing: 30
785 missing: 30
764 common heads: 66f7d451a68b
786 common heads: 66f7d451a68b
765
787
766 % -- b -> a set
788 % -- b -> a set
767 comparing with a
789 comparing with a
768 query 1; heads
790 query 1; heads
769 searching for changes
791 searching for changes
770 taking quick initial sample
792 taking quick initial sample
771 searching: 2 queries
793 searching: 2 queries
772 query 2; still undecided: 31, sample size is: 31
794 query 2; still undecided: 31, sample size is: 31
773 2 total queries in *.????s (glob)
795 2 total queries in *.????s (glob)
774 elapsed time: * seconds (glob)
796 elapsed time: * seconds (glob)
797 round-trips: 2
775 heads summary:
798 heads summary:
776 total common heads: 1
799 total common heads: 1
777 also local heads: 0
800 also local heads: 0
778 also remote heads: 0
801 also remote heads: 0
779 both: 0
802 both: 0
780 local heads: 1
803 local heads: 1
781 common: 0
804 common: 0
782 missing: 1
805 missing: 1
783 remote heads: 1
806 remote heads: 1
784 common: 0
807 common: 0
785 unknown: 1
808 unknown: 1
786 local changesets: 32
809 local changesets: 32
787 common: 2
810 common: 2
788 heads: 1
811 heads: 1
789 roots: 1
812 roots: 1
790 missing: 30
813 missing: 30
791 heads: 1
814 heads: 1
792 roots: 1
815 roots: 1
793 first undecided set: 32
816 first undecided set: 32
794 heads: 1
817 heads: 1
795 roots: 1
818 roots: 1
796 common: 2
819 common: 2
797 missing: 30
820 missing: 30
798 common heads: 66f7d451a68b
821 common heads: 66f7d451a68b
799
822
800 % -- b -> a set (tip only)
823 % -- b -> a set (tip only)
801 comparing with a
824 comparing with a
802 query 1; heads
825 query 1; heads
803 searching for changes
826 searching for changes
804 taking quick initial sample
827 taking quick initial sample
805 searching: 2 queries
828 searching: 2 queries
806 query 2; still undecided: 31, sample size is: 31
829 query 2; still undecided: 31, sample size is: 31
807 2 total queries in *.????s (glob)
830 2 total queries in *.????s (glob)
808 elapsed time: * seconds (glob)
831 elapsed time: * seconds (glob)
832 round-trips: 2
809 heads summary:
833 heads summary:
810 total common heads: 1
834 total common heads: 1
811 also local heads: 0
835 also local heads: 0
812 also remote heads: 0
836 also remote heads: 0
813 both: 0
837 both: 0
814 local heads: 1
838 local heads: 1
815 common: 0
839 common: 0
816 missing: 1
840 missing: 1
817 remote heads: 1
841 remote heads: 1
818 common: 0
842 common: 0
819 unknown: 1
843 unknown: 1
820 local changesets: 32
844 local changesets: 32
821 common: 2
845 common: 2
822 heads: 1
846 heads: 1
823 roots: 1
847 roots: 1
824 missing: 30
848 missing: 30
825 heads: 1
849 heads: 1
826 roots: 1
850 roots: 1
827 first undecided set: 32
851 first undecided set: 32
828 heads: 1
852 heads: 1
829 roots: 1
853 roots: 1
830 common: 2
854 common: 2
831 missing: 30
855 missing: 30
832 common heads: 66f7d451a68b
856 common heads: 66f7d451a68b
833
857
834
858
835 Both many new skewed:
859 Both many new skewed:
836
860
837 $ testdesc '-ra' '-rb' '
861 $ testdesc '-ra' '-rb' '
838 > +2:f +30 :b
862 > +2:f +30 :b
839 > <f +50 :a'
863 > <f +50 :a'
840
864
841 % -- a -> b tree
865 % -- a -> b tree
842 comparing with b
866 comparing with b
843 searching for changes
867 searching for changes
844 unpruned common: 66f7d451a68b
868 unpruned common: 66f7d451a68b
845 elapsed time: * seconds (glob)
869 elapsed time: * seconds (glob)
870 round-trips: 4
846 heads summary:
871 heads summary:
847 total common heads: 1
872 total common heads: 1
848 also local heads: 0
873 also local heads: 0
849 also remote heads: 0
874 also remote heads: 0
850 both: 0
875 both: 0
851 local heads: 1
876 local heads: 1
852 common: 0
877 common: 0
853 missing: 1
878 missing: 1
854 remote heads: 1
879 remote heads: 1
855 common: 0
880 common: 0
856 unknown: 1
881 unknown: 1
857 local changesets: 52
882 local changesets: 52
858 common: 2
883 common: 2
859 heads: 1
884 heads: 1
860 roots: 1
885 roots: 1
861 missing: 50
886 missing: 50
862 heads: 1
887 heads: 1
863 roots: 1
888 roots: 1
864 first undecided set: 52
889 first undecided set: 52
865 heads: 1
890 heads: 1
866 roots: 1
891 roots: 1
867 common: 2
892 common: 2
868 missing: 50
893 missing: 50
869 common heads: 66f7d451a68b
894 common heads: 66f7d451a68b
870
895
871 % -- a -> b set
896 % -- a -> b set
872 comparing with b
897 comparing with b
873 query 1; heads
898 query 1; heads
874 searching for changes
899 searching for changes
875 taking quick initial sample
900 taking quick initial sample
876 searching: 2 queries
901 searching: 2 queries
877 query 2; still undecided: 51, sample size is: 51
902 query 2; still undecided: 51, sample size is: 51
878 2 total queries in *.????s (glob)
903 2 total queries in *.????s (glob)
879 elapsed time: * seconds (glob)
904 elapsed time: * seconds (glob)
905 round-trips: 2
880 heads summary:
906 heads summary:
881 total common heads: 1
907 total common heads: 1
882 also local heads: 0
908 also local heads: 0
883 also remote heads: 0
909 also remote heads: 0
884 both: 0
910 both: 0
885 local heads: 1
911 local heads: 1
886 common: 0
912 common: 0
887 missing: 1
913 missing: 1
888 remote heads: 1
914 remote heads: 1
889 common: 0
915 common: 0
890 unknown: 1
916 unknown: 1
891 local changesets: 52
917 local changesets: 52
892 common: 2
918 common: 2
893 heads: 1
919 heads: 1
894 roots: 1
920 roots: 1
895 missing: 50
921 missing: 50
896 heads: 1
922 heads: 1
897 roots: 1
923 roots: 1
898 first undecided set: 52
924 first undecided set: 52
899 heads: 1
925 heads: 1
900 roots: 1
926 roots: 1
901 common: 2
927 common: 2
902 missing: 50
928 missing: 50
903 common heads: 66f7d451a68b
929 common heads: 66f7d451a68b
904
930
905 % -- a -> b set (tip only)
931 % -- a -> b set (tip only)
906 comparing with b
932 comparing with b
907 query 1; heads
933 query 1; heads
908 searching for changes
934 searching for changes
909 taking quick initial sample
935 taking quick initial sample
910 searching: 2 queries
936 searching: 2 queries
911 query 2; still undecided: 51, sample size is: 51
937 query 2; still undecided: 51, sample size is: 51
912 2 total queries in *.????s (glob)
938 2 total queries in *.????s (glob)
913 elapsed time: * seconds (glob)
939 elapsed time: * seconds (glob)
940 round-trips: 2
914 heads summary:
941 heads summary:
915 total common heads: 1
942 total common heads: 1
916 also local heads: 0
943 also local heads: 0
917 also remote heads: 0
944 also remote heads: 0
918 both: 0
945 both: 0
919 local heads: 1
946 local heads: 1
920 common: 0
947 common: 0
921 missing: 1
948 missing: 1
922 remote heads: 1
949 remote heads: 1
923 common: 0
950 common: 0
924 unknown: 1
951 unknown: 1
925 local changesets: 52
952 local changesets: 52
926 common: 2
953 common: 2
927 heads: 1
954 heads: 1
928 roots: 1
955 roots: 1
929 missing: 50
956 missing: 50
930 heads: 1
957 heads: 1
931 roots: 1
958 roots: 1
932 first undecided set: 52
959 first undecided set: 52
933 heads: 1
960 heads: 1
934 roots: 1
961 roots: 1
935 common: 2
962 common: 2
936 missing: 50
963 missing: 50
937 common heads: 66f7d451a68b
964 common heads: 66f7d451a68b
938
965
939 % -- b -> a tree
966 % -- b -> a tree
940 comparing with a
967 comparing with a
941 searching for changes
968 searching for changes
942 unpruned common: 66f7d451a68b
969 unpruned common: 66f7d451a68b
943 elapsed time: * seconds (glob)
970 elapsed time: * seconds (glob)
971 round-trips: 3
944 heads summary:
972 heads summary:
945 total common heads: 1
973 total common heads: 1
946 also local heads: 0
974 also local heads: 0
947 also remote heads: 0
975 also remote heads: 0
948 both: 0
976 both: 0
949 local heads: 1
977 local heads: 1
950 common: 0
978 common: 0
951 missing: 1
979 missing: 1
952 remote heads: 1
980 remote heads: 1
953 common: 0
981 common: 0
954 unknown: 1
982 unknown: 1
955 local changesets: 32
983 local changesets: 32
956 common: 2
984 common: 2
957 heads: 1
985 heads: 1
958 roots: 1
986 roots: 1
959 missing: 30
987 missing: 30
960 heads: 1
988 heads: 1
961 roots: 1
989 roots: 1
962 first undecided set: 32
990 first undecided set: 32
963 heads: 1
991 heads: 1
964 roots: 1
992 roots: 1
965 common: 2
993 common: 2
966 missing: 30
994 missing: 30
967 common heads: 66f7d451a68b
995 common heads: 66f7d451a68b
968
996
969 % -- b -> a set
997 % -- b -> a set
970 comparing with a
998 comparing with a
971 query 1; heads
999 query 1; heads
972 searching for changes
1000 searching for changes
973 taking quick initial sample
1001 taking quick initial sample
974 searching: 2 queries
1002 searching: 2 queries
975 query 2; still undecided: 31, sample size is: 31
1003 query 2; still undecided: 31, sample size is: 31
976 2 total queries in *.????s (glob)
1004 2 total queries in *.????s (glob)
977 elapsed time: * seconds (glob)
1005 elapsed time: * seconds (glob)
1006 round-trips: 2
978 heads summary:
1007 heads summary:
979 total common heads: 1
1008 total common heads: 1
980 also local heads: 0
1009 also local heads: 0
981 also remote heads: 0
1010 also remote heads: 0
982 both: 0
1011 both: 0
983 local heads: 1
1012 local heads: 1
984 common: 0
1013 common: 0
985 missing: 1
1014 missing: 1
986 remote heads: 1
1015 remote heads: 1
987 common: 0
1016 common: 0
988 unknown: 1
1017 unknown: 1
989 local changesets: 32
1018 local changesets: 32
990 common: 2
1019 common: 2
991 heads: 1
1020 heads: 1
992 roots: 1
1021 roots: 1
993 missing: 30
1022 missing: 30
994 heads: 1
1023 heads: 1
995 roots: 1
1024 roots: 1
996 first undecided set: 32
1025 first undecided set: 32
997 heads: 1
1026 heads: 1
998 roots: 1
1027 roots: 1
999 common: 2
1028 common: 2
1000 missing: 30
1029 missing: 30
1001 common heads: 66f7d451a68b
1030 common heads: 66f7d451a68b
1002
1031
1003 % -- b -> a set (tip only)
1032 % -- b -> a set (tip only)
1004 comparing with a
1033 comparing with a
1005 query 1; heads
1034 query 1; heads
1006 searching for changes
1035 searching for changes
1007 taking quick initial sample
1036 taking quick initial sample
1008 searching: 2 queries
1037 searching: 2 queries
1009 query 2; still undecided: 31, sample size is: 31
1038 query 2; still undecided: 31, sample size is: 31
1010 2 total queries in *.????s (glob)
1039 2 total queries in *.????s (glob)
1011 elapsed time: * seconds (glob)
1040 elapsed time: * seconds (glob)
1041 round-trips: 2
1012 heads summary:
1042 heads summary:
1013 total common heads: 1
1043 total common heads: 1
1014 also local heads: 0
1044 also local heads: 0
1015 also remote heads: 0
1045 also remote heads: 0
1016 both: 0
1046 both: 0
1017 local heads: 1
1047 local heads: 1
1018 common: 0
1048 common: 0
1019 missing: 1
1049 missing: 1
1020 remote heads: 1
1050 remote heads: 1
1021 common: 0
1051 common: 0
1022 unknown: 1
1052 unknown: 1
1023 local changesets: 32
1053 local changesets: 32
1024 common: 2
1054 common: 2
1025 heads: 1
1055 heads: 1
1026 roots: 1
1056 roots: 1
1027 missing: 30
1057 missing: 30
1028 heads: 1
1058 heads: 1
1029 roots: 1
1059 roots: 1
1030 first undecided set: 32
1060 first undecided set: 32
1031 heads: 1
1061 heads: 1
1032 roots: 1
1062 roots: 1
1033 common: 2
1063 common: 2
1034 missing: 30
1064 missing: 30
1035 common heads: 66f7d451a68b
1065 common heads: 66f7d451a68b
1036
1066
1037
1067
1038 Both many new on top of long history:
1068 Both many new on top of long history:
1039
1069
1040 $ testdesc '-ra' '-rb' '
1070 $ testdesc '-ra' '-rb' '
1041 > +1000:f +30 :b
1071 > +1000:f +30 :b
1042 > <f +50 :a'
1072 > <f +50 :a'
1043
1073
1044 % -- a -> b tree
1074 % -- a -> b tree
1045 comparing with b
1075 comparing with b
1046 searching for changes
1076 searching for changes
1047 unpruned common: 7ead0cba2838
1077 unpruned common: 7ead0cba2838
1048 elapsed time: * seconds (glob)
1078 elapsed time: * seconds (glob)
1079 round-trips: 4
1049 heads summary:
1080 heads summary:
1050 total common heads: 1
1081 total common heads: 1
1051 also local heads: 0
1082 also local heads: 0
1052 also remote heads: 0
1083 also remote heads: 0
1053 both: 0
1084 both: 0
1054 local heads: 1
1085 local heads: 1
1055 common: 0
1086 common: 0
1056 missing: 1
1087 missing: 1
1057 remote heads: 1
1088 remote heads: 1
1058 common: 0
1089 common: 0
1059 unknown: 1
1090 unknown: 1
1060 local changesets: 1050
1091 local changesets: 1050
1061 common: 1000
1092 common: 1000
1062 heads: 1
1093 heads: 1
1063 roots: 1
1094 roots: 1
1064 missing: 50
1095 missing: 50
1065 heads: 1
1096 heads: 1
1066 roots: 1
1097 roots: 1
1067 first undecided set: 1050
1098 first undecided set: 1050
1068 heads: 1
1099 heads: 1
1069 roots: 1
1100 roots: 1
1070 common: 1000
1101 common: 1000
1071 missing: 50
1102 missing: 50
1072 common heads: 7ead0cba2838
1103 common heads: 7ead0cba2838
1073
1104
1074 % -- a -> b set
1105 % -- a -> b set
1075 comparing with b
1106 comparing with b
1076 query 1; heads
1107 query 1; heads
1077 searching for changes
1108 searching for changes
1078 taking quick initial sample
1109 taking quick initial sample
1079 searching: 2 queries
1110 searching: 2 queries
1080 query 2; still undecided: 1049, sample size is: 11
1111 query 2; still undecided: 1049, sample size is: 11
1081 sampling from both directions
1112 sampling from both directions
1082 searching: 3 queries
1113 searching: 3 queries
1083 query 3; still undecided: 31, sample size is: 31
1114 query 3; still undecided: 31, sample size is: 31
1084 3 total queries in *.????s (glob)
1115 3 total queries in *.????s (glob)
1085 elapsed time: * seconds (glob)
1116 elapsed time: * seconds (glob)
1117 round-trips: 3
1086 heads summary:
1118 heads summary:
1087 total common heads: 1
1119 total common heads: 1
1088 also local heads: 0
1120 also local heads: 0
1089 also remote heads: 0
1121 also remote heads: 0
1090 both: 0
1122 both: 0
1091 local heads: 1
1123 local heads: 1
1092 common: 0
1124 common: 0
1093 missing: 1
1125 missing: 1
1094 remote heads: 1
1126 remote heads: 1
1095 common: 0
1127 common: 0
1096 unknown: 1
1128 unknown: 1
1097 local changesets: 1050
1129 local changesets: 1050
1098 common: 1000
1130 common: 1000
1099 heads: 1
1131 heads: 1
1100 roots: 1
1132 roots: 1
1101 missing: 50
1133 missing: 50
1102 heads: 1
1134 heads: 1
1103 roots: 1
1135 roots: 1
1104 first undecided set: 1050
1136 first undecided set: 1050
1105 heads: 1
1137 heads: 1
1106 roots: 1
1138 roots: 1
1107 common: 1000
1139 common: 1000
1108 missing: 50
1140 missing: 50
1109 common heads: 7ead0cba2838
1141 common heads: 7ead0cba2838
1110
1142
1111 % -- a -> b set (tip only)
1143 % -- a -> b set (tip only)
1112 comparing with b
1144 comparing with b
1113 query 1; heads
1145 query 1; heads
1114 searching for changes
1146 searching for changes
1115 taking quick initial sample
1147 taking quick initial sample
1116 searching: 2 queries
1148 searching: 2 queries
1117 query 2; still undecided: 1049, sample size is: 11
1149 query 2; still undecided: 1049, sample size is: 11
1118 sampling from both directions
1150 sampling from both directions
1119 searching: 3 queries
1151 searching: 3 queries
1120 query 3; still undecided: 31, sample size is: 31
1152 query 3; still undecided: 31, sample size is: 31
1121 3 total queries in *.????s (glob)
1153 3 total queries in *.????s (glob)
1122 elapsed time: * seconds (glob)
1154 elapsed time: * seconds (glob)
1155 round-trips: 3
1123 heads summary:
1156 heads summary:
1124 total common heads: 1
1157 total common heads: 1
1125 also local heads: 0
1158 also local heads: 0
1126 also remote heads: 0
1159 also remote heads: 0
1127 both: 0
1160 both: 0
1128 local heads: 1
1161 local heads: 1
1129 common: 0
1162 common: 0
1130 missing: 1
1163 missing: 1
1131 remote heads: 1
1164 remote heads: 1
1132 common: 0
1165 common: 0
1133 unknown: 1
1166 unknown: 1
1134 local changesets: 1050
1167 local changesets: 1050
1135 common: 1000
1168 common: 1000
1136 heads: 1
1169 heads: 1
1137 roots: 1
1170 roots: 1
1138 missing: 50
1171 missing: 50
1139 heads: 1
1172 heads: 1
1140 roots: 1
1173 roots: 1
1141 first undecided set: 1050
1174 first undecided set: 1050
1142 heads: 1
1175 heads: 1
1143 roots: 1
1176 roots: 1
1144 common: 1000
1177 common: 1000
1145 missing: 50
1178 missing: 50
1146 common heads: 7ead0cba2838
1179 common heads: 7ead0cba2838
1147
1180
1148 % -- b -> a tree
1181 % -- b -> a tree
1149 comparing with a
1182 comparing with a
1150 searching for changes
1183 searching for changes
1151 unpruned common: 7ead0cba2838
1184 unpruned common: 7ead0cba2838
1152 elapsed time: * seconds (glob)
1185 elapsed time: * seconds (glob)
1186 round-trips: 3
1153 heads summary:
1187 heads summary:
1154 total common heads: 1
1188 total common heads: 1
1155 also local heads: 0
1189 also local heads: 0
1156 also remote heads: 0
1190 also remote heads: 0
1157 both: 0
1191 both: 0
1158 local heads: 1
1192 local heads: 1
1159 common: 0
1193 common: 0
1160 missing: 1
1194 missing: 1
1161 remote heads: 1
1195 remote heads: 1
1162 common: 0
1196 common: 0
1163 unknown: 1
1197 unknown: 1
1164 local changesets: 1030
1198 local changesets: 1030
1165 common: 1000
1199 common: 1000
1166 heads: 1
1200 heads: 1
1167 roots: 1
1201 roots: 1
1168 missing: 30
1202 missing: 30
1169 heads: 1
1203 heads: 1
1170 roots: 1
1204 roots: 1
1171 first undecided set: 1030
1205 first undecided set: 1030
1172 heads: 1
1206 heads: 1
1173 roots: 1
1207 roots: 1
1174 common: 1000
1208 common: 1000
1175 missing: 30
1209 missing: 30
1176 common heads: 7ead0cba2838
1210 common heads: 7ead0cba2838
1177
1211
1178 % -- b -> a set
1212 % -- b -> a set
1179 comparing with a
1213 comparing with a
1180 query 1; heads
1214 query 1; heads
1181 searching for changes
1215 searching for changes
1182 taking quick initial sample
1216 taking quick initial sample
1183 searching: 2 queries
1217 searching: 2 queries
1184 query 2; still undecided: 1029, sample size is: 11
1218 query 2; still undecided: 1029, sample size is: 11
1185 sampling from both directions
1219 sampling from both directions
1186 searching: 3 queries
1220 searching: 3 queries
1187 query 3; still undecided: 15, sample size is: 15
1221 query 3; still undecided: 15, sample size is: 15
1188 3 total queries in *.????s (glob)
1222 3 total queries in *.????s (glob)
1189 elapsed time: * seconds (glob)
1223 elapsed time: * seconds (glob)
1224 round-trips: 3
1190 heads summary:
1225 heads summary:
1191 total common heads: 1
1226 total common heads: 1
1192 also local heads: 0
1227 also local heads: 0
1193 also remote heads: 0
1228 also remote heads: 0
1194 both: 0
1229 both: 0
1195 local heads: 1
1230 local heads: 1
1196 common: 0
1231 common: 0
1197 missing: 1
1232 missing: 1
1198 remote heads: 1
1233 remote heads: 1
1199 common: 0
1234 common: 0
1200 unknown: 1
1235 unknown: 1
1201 local changesets: 1030
1236 local changesets: 1030
1202 common: 1000
1237 common: 1000
1203 heads: 1
1238 heads: 1
1204 roots: 1
1239 roots: 1
1205 missing: 30
1240 missing: 30
1206 heads: 1
1241 heads: 1
1207 roots: 1
1242 roots: 1
1208 first undecided set: 1030
1243 first undecided set: 1030
1209 heads: 1
1244 heads: 1
1210 roots: 1
1245 roots: 1
1211 common: 1000
1246 common: 1000
1212 missing: 30
1247 missing: 30
1213 common heads: 7ead0cba2838
1248 common heads: 7ead0cba2838
1214
1249
1215 % -- b -> a set (tip only)
1250 % -- b -> a set (tip only)
1216 comparing with a
1251 comparing with a
1217 query 1; heads
1252 query 1; heads
1218 searching for changes
1253 searching for changes
1219 taking quick initial sample
1254 taking quick initial sample
1220 searching: 2 queries
1255 searching: 2 queries
1221 query 2; still undecided: 1029, sample size is: 11
1256 query 2; still undecided: 1029, sample size is: 11
1222 sampling from both directions
1257 sampling from both directions
1223 searching: 3 queries
1258 searching: 3 queries
1224 query 3; still undecided: 15, sample size is: 15
1259 query 3; still undecided: 15, sample size is: 15
1225 3 total queries in *.????s (glob)
1260 3 total queries in *.????s (glob)
1226 elapsed time: * seconds (glob)
1261 elapsed time: * seconds (glob)
1262 round-trips: 3
1227 heads summary:
1263 heads summary:
1228 total common heads: 1
1264 total common heads: 1
1229 also local heads: 0
1265 also local heads: 0
1230 also remote heads: 0
1266 also remote heads: 0
1231 both: 0
1267 both: 0
1232 local heads: 1
1268 local heads: 1
1233 common: 0
1269 common: 0
1234 missing: 1
1270 missing: 1
1235 remote heads: 1
1271 remote heads: 1
1236 common: 0
1272 common: 0
1237 unknown: 1
1273 unknown: 1
1238 local changesets: 1030
1274 local changesets: 1030
1239 common: 1000
1275 common: 1000
1240 heads: 1
1276 heads: 1
1241 roots: 1
1277 roots: 1
1242 missing: 30
1278 missing: 30
1243 heads: 1
1279 heads: 1
1244 roots: 1
1280 roots: 1
1245 first undecided set: 1030
1281 first undecided set: 1030
1246 heads: 1
1282 heads: 1
1247 roots: 1
1283 roots: 1
1248 common: 1000
1284 common: 1000
1249 missing: 30
1285 missing: 30
1250 common heads: 7ead0cba2838
1286 common heads: 7ead0cba2838
1251
1287
1252
1288
1253 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1254
1290
1255 $ hg init manyheads
1291 $ hg init manyheads
1256 $ cd manyheads
1292 $ cd manyheads
1257 $ echo "+300:r @a" >dagdesc
1293 $ echo "+300:r @a" >dagdesc
1258 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1259 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1260 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1261 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1262 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1263 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1264 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1265 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1266 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1267 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1268 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1269 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1270 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1271 $ echo "@b *r+3" >>dagdesc # one more head
1307 $ echo "@b *r+3" >>dagdesc # one more head
1272 $ hg debugbuilddag <dagdesc
1308 $ hg debugbuilddag <dagdesc
1273 reading DAG from stdin
1309 reading DAG from stdin
1274
1310
1275 $ hg heads -t --template . | wc -c
1311 $ hg heads -t --template . | wc -c
1276 \s*261 (re)
1312 \s*261 (re)
1277
1313
1278 $ hg clone -b a . a
1314 $ hg clone -b a . a
1279 adding changesets
1315 adding changesets
1280 adding manifests
1316 adding manifests
1281 adding file changes
1317 adding file changes
1282 added 1340 changesets with 0 changes to 0 files (+259 heads)
1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1283 new changesets 1ea73414a91b:1c51e2c80832
1319 new changesets 1ea73414a91b:1c51e2c80832
1284 updating to branch a
1320 updating to branch a
1285 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1286 $ hg clone -b b . b
1322 $ hg clone -b b . b
1287 adding changesets
1323 adding changesets
1288 adding manifests
1324 adding manifests
1289 adding file changes
1325 adding file changes
1290 added 304 changesets with 0 changes to 0 files
1326 added 304 changesets with 0 changes to 0 files
1291 new changesets 1ea73414a91b:513314ca8b3a
1327 new changesets 1ea73414a91b:513314ca8b3a
1292 updating to branch b
1328 updating to branch b
1293 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1294
1330
1295 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1296 comparing with b
1332 comparing with b
1297 query 1; heads
1333 query 1; heads
1298 searching for changes
1334 searching for changes
1299 taking quick initial sample
1335 taking quick initial sample
1300 searching: 2 queries
1336 searching: 2 queries
1301 query 2; still undecided: 1080, sample size is: 100
1337 query 2; still undecided: 1080, sample size is: 100
1302 sampling from both directions
1338 sampling from both directions
1303 searching: 3 queries
1339 searching: 3 queries
1304 query 3; still undecided: 980, sample size is: 200
1340 query 3; still undecided: 980, sample size is: 200
1305 sampling from both directions
1341 sampling from both directions
1306 searching: 4 queries
1342 searching: 4 queries
1307 query 4; still undecided: 497, sample size is: 210
1343 query 4; still undecided: 497, sample size is: 210
1308 sampling from both directions
1344 sampling from both directions
1309 searching: 5 queries
1345 searching: 5 queries
1310 query 5; still undecided: 285, sample size is: 220
1346 query 5; still undecided: 285, sample size is: 220
1311 sampling from both directions
1347 sampling from both directions
1312 searching: 6 queries
1348 searching: 6 queries
1313 query 6; still undecided: 63, sample size is: 63
1349 query 6; still undecided: 63, sample size is: 63
1314 6 total queries in *.????s (glob)
1350 6 total queries in *.????s (glob)
1315 elapsed time: * seconds (glob)
1351 elapsed time: * seconds (glob)
1352 round-trips: 6
1316 heads summary:
1353 heads summary:
1317 total common heads: 1
1354 total common heads: 1
1318 also local heads: 0
1355 also local heads: 0
1319 also remote heads: 0
1356 also remote heads: 0
1320 both: 0
1357 both: 0
1321 local heads: 260
1358 local heads: 260
1322 common: 0
1359 common: 0
1323 missing: 260
1360 missing: 260
1324 remote heads: 1
1361 remote heads: 1
1325 common: 0
1362 common: 0
1326 unknown: 1
1363 unknown: 1
1327 local changesets: 1340
1364 local changesets: 1340
1328 common: 300
1365 common: 300
1329 heads: 1
1366 heads: 1
1330 roots: 1
1367 roots: 1
1331 missing: 1040
1368 missing: 1040
1332 heads: 260
1369 heads: 260
1333 roots: 260
1370 roots: 260
1334 first undecided set: 1340
1371 first undecided set: 1340
1335 heads: 260
1372 heads: 260
1336 roots: 1
1373 roots: 1
1337 common: 300
1374 common: 300
1338 missing: 1040
1375 missing: 1040
1339 common heads: 3ee37d65064a
1376 common heads: 3ee37d65064a
1340 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1341 comparing with b
1378 comparing with b
1342 query 1; heads
1379 query 1; heads
1343 searching for changes
1380 searching for changes
1344 taking quick initial sample
1381 taking quick initial sample
1345 searching: 2 queries
1382 searching: 2 queries
1346 query 2; still undecided: 303, sample size is: 9
1383 query 2; still undecided: 303, sample size is: 9
1347 sampling from both directions
1384 sampling from both directions
1348 searching: 3 queries
1385 searching: 3 queries
1349 query 3; still undecided: 3, sample size is: 3
1386 query 3; still undecided: 3, sample size is: 3
1350 3 total queries in *.????s (glob)
1387 3 total queries in *.????s (glob)
1351 elapsed time: * seconds (glob)
1388 elapsed time: * seconds (glob)
1389 round-trips: 3
1352 heads summary:
1390 heads summary:
1353 total common heads: 1
1391 total common heads: 1
1354 also local heads: 0
1392 also local heads: 0
1355 also remote heads: 0
1393 also remote heads: 0
1356 both: 0
1394 both: 0
1357 local heads: 260
1395 local heads: 260
1358 common: 0
1396 common: 0
1359 missing: 260
1397 missing: 260
1360 remote heads: 1
1398 remote heads: 1
1361 common: 0
1399 common: 0
1362 unknown: 1
1400 unknown: 1
1363 local changesets: 1340
1401 local changesets: 1340
1364 common: 300
1402 common: 300
1365 heads: 1
1403 heads: 1
1366 roots: 1
1404 roots: 1
1367 missing: 1040
1405 missing: 1040
1368 heads: 260
1406 heads: 260
1369 roots: 260
1407 roots: 260
1370 first undecided set: 1340
1408 first undecided set: 1340
1371 heads: 260
1409 heads: 260
1372 roots: 1
1410 roots: 1
1373 common: 300
1411 common: 300
1374 missing: 1040
1412 missing: 1040
1375 common heads: 3ee37d65064a
1413 common heads: 3ee37d65064a
1376
1414
1377 Test actual protocol when pulling one new head in addition to common heads
1415 Test actual protocol when pulling one new head in addition to common heads
1378
1416
1379 $ hg clone -U b c
1417 $ hg clone -U b c
1380 $ hg -R c id -ir tip
1418 $ hg -R c id -ir tip
1381 513314ca8b3a
1419 513314ca8b3a
1382 $ hg -R c up -qr default
1420 $ hg -R c up -qr default
1383 $ touch c/f
1421 $ touch c/f
1384 $ hg -R c ci -Aqm "extra head"
1422 $ hg -R c ci -Aqm "extra head"
1385 $ hg -R c id -i
1423 $ hg -R c id -i
1386 e64a39e7da8b
1424 e64a39e7da8b
1387
1425
1388 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1426 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1389 $ cat hg.pid >> $DAEMON_PIDS
1427 $ cat hg.pid >> $DAEMON_PIDS
1390
1428
1391 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1429 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1392 comparing with http://localhost:$HGPORT/
1430 comparing with http://localhost:$HGPORT/
1393 searching for changes
1431 searching for changes
1394 e64a39e7da8b
1432 e64a39e7da8b
1395
1433
1396 $ killdaemons.py
1434 $ killdaemons.py
1397 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1435 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1398 "GET /?cmd=capabilities HTTP/1.1" 200 -
1436 "GET /?cmd=capabilities HTTP/1.1" 200 -
1399 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1437 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1400 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1438 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1401 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1439 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1402 $ cat errors.log
1440 $ cat errors.log
1403
1441
1404 $ cd ..
1442 $ cd ..
1405
1443
1406
1444
1407 Issue 4438 - test coverage for 3ef893520a85 issues.
1445 Issue 4438 - test coverage for 3ef893520a85 issues.
1408
1446
1409 $ mkdir issue4438
1447 $ mkdir issue4438
1410 $ cd issue4438
1448 $ cd issue4438
1411 #if false
1449 #if false
1412 generate new bundles:
1450 generate new bundles:
1413 $ hg init r1
1451 $ hg init r1
1414 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1452 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1415 $ hg clone -q r1 r2
1453 $ hg clone -q r1 r2
1416 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1454 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1417 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1455 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1418 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1456 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1419 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1457 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1420 #else
1458 #else
1421 use existing bundles:
1459 use existing bundles:
1422 $ hg init r1
1460 $ hg init r1
1423 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1461 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1424 $ hg -R r1 -q up
1462 $ hg -R r1 -q up
1425 $ hg init r2
1463 $ hg init r2
1426 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1464 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1427 $ hg -R r2 -q up
1465 $ hg -R r2 -q up
1428 #endif
1466 #endif
1429
1467
1430 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1468 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1431
1469
1432 $ hg -R r1 outgoing r2 -T'{rev} '
1470 $ hg -R r1 outgoing r2 -T'{rev} '
1433 comparing with r2
1471 comparing with r2
1434 searching for changes
1472 searching for changes
1435 101 102 103 104 105 106 107 108 109 110 (no-eol)
1473 101 102 103 104 105 106 107 108 109 110 (no-eol)
1436
1474
1437 The case where all the 'initialsamplesize' samples already were common would
1475 The case where all the 'initialsamplesize' samples already were common would
1438 give 'all remote heads known locally' without checking the remaining heads -
1476 give 'all remote heads known locally' without checking the remaining heads -
1439 fixed in 86c35b7ae300:
1477 fixed in 86c35b7ae300:
1440
1478
1441 $ cat >> r1/.hg/hgrc << EOF
1479 $ cat >> r1/.hg/hgrc << EOF
1442 > [devel]
1480 > [devel]
1443 > discovery.randomize = False
1481 > discovery.randomize = False
1444 > EOF
1482 > EOF
1445
1483
1446 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1484 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1447 > --config blackbox.track='command commandfinish discovery'
1485 > --config blackbox.track='command commandfinish discovery'
1448 comparing with r2
1486 comparing with r2
1449 searching for changes
1487 searching for changes
1450 101 102 103 104 105 106 107 108 109 110 (no-eol)
1488 101 102 103 104 105 106 107 108 109 110 (no-eol)
1451 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1489 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1452 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1490 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1453 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1491 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1454 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1492 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1455 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1493 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1456 $ cd ..
1494 $ cd ..
1457
1495
1458 Even if the set of revs to discover is restricted, unrelated revs may be
1496 Even if the set of revs to discover is restricted, unrelated revs may be
1459 returned as common heads.
1497 returned as common heads.
1460
1498
1461 $ mkdir ancestorsof
1499 $ mkdir ancestorsof
1462 $ cd ancestorsof
1500 $ cd ancestorsof
1463 $ hg init a
1501 $ hg init a
1464 $ hg clone a b -q
1502 $ hg clone a b -q
1465 $ cd b
1503 $ cd b
1466 $ hg debugbuilddag '.:root *root *root'
1504 $ hg debugbuilddag '.:root *root *root'
1467 $ hg log -G -T '{node|short}'
1505 $ hg log -G -T '{node|short}'
1468 o fa942426a6fd
1506 o fa942426a6fd
1469 |
1507 |
1470 | o 66f7d451a68b
1508 | o 66f7d451a68b
1471 |/
1509 |/
1472 o 1ea73414a91b
1510 o 1ea73414a91b
1473
1511
1474 $ hg push -r 66f7d451a68b -q
1512 $ hg push -r 66f7d451a68b -q
1475 $ hg debugdiscovery --verbose --rev fa942426a6fd
1513 $ hg debugdiscovery --verbose --rev fa942426a6fd
1476 comparing with $TESTTMP/ancestorsof/a
1514 comparing with $TESTTMP/ancestorsof/a
1477 searching for changes
1515 searching for changes
1478 elapsed time: * seconds (glob)
1516 elapsed time: * seconds (glob)
1517 round-trips: 1
1479 heads summary:
1518 heads summary:
1480 total common heads: 1
1519 total common heads: 1
1481 also local heads: 1
1520 also local heads: 1
1482 also remote heads: 1
1521 also remote heads: 1
1483 both: 1
1522 both: 1
1484 local heads: 2
1523 local heads: 2
1485 common: 1
1524 common: 1
1486 missing: 1
1525 missing: 1
1487 remote heads: 1
1526 remote heads: 1
1488 common: 1
1527 common: 1
1489 unknown: 0
1528 unknown: 0
1490 local changesets: 3
1529 local changesets: 3
1491 common: 2
1530 common: 2
1492 heads: 1
1531 heads: 1
1493 roots: 1
1532 roots: 1
1494 missing: 1
1533 missing: 1
1495 heads: 1
1534 heads: 1
1496 roots: 1
1535 roots: 1
1497 first undecided set: 1
1536 first undecided set: 1
1498 heads: 1
1537 heads: 1
1499 roots: 1
1538 roots: 1
1500 common: 0
1539 common: 0
1501 missing: 1
1540 missing: 1
1502 common heads: 66f7d451a68b
1541 common heads: 66f7d451a68b
General Comments 0
You need to be logged in to leave comments. Login now