##// END OF EJS Templates
debugdiscovery: also integrate the discovery output in the json one...
marmoute -
r47503:67a2ecea default
parent child Browse files
Show More
@@ -1,4766 +1,4779 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import difflib
13 import difflib
13 import errno
14 import errno
14 import glob
15 import glob
15 import operator
16 import operator
16 import os
17 import os
17 import platform
18 import platform
18 import random
19 import random
19 import re
20 import re
20 import socket
21 import socket
21 import ssl
22 import ssl
22 import stat
23 import stat
23 import string
24 import string
24 import subprocess
25 import subprocess
25 import sys
26 import sys
26 import time
27 import time
27
28
28 from .i18n import _
29 from .i18n import _
29 from .node import (
30 from .node import (
30 bin,
31 bin,
31 hex,
32 hex,
32 nullid,
33 nullid,
33 nullrev,
34 nullrev,
34 short,
35 short,
35 )
36 )
36 from .pycompat import (
37 from .pycompat import (
37 getattr,
38 getattr,
38 open,
39 open,
39 )
40 )
40 from . import (
41 from . import (
41 bundle2,
42 bundle2,
42 bundlerepo,
43 bundlerepo,
43 changegroup,
44 changegroup,
44 cmdutil,
45 cmdutil,
45 color,
46 color,
46 context,
47 context,
47 copies,
48 copies,
48 dagparser,
49 dagparser,
49 encoding,
50 encoding,
50 error,
51 error,
51 exchange,
52 exchange,
52 extensions,
53 extensions,
53 filemerge,
54 filemerge,
54 filesetlang,
55 filesetlang,
55 formatter,
56 formatter,
56 hg,
57 hg,
57 httppeer,
58 httppeer,
58 localrepo,
59 localrepo,
59 lock as lockmod,
60 lock as lockmod,
60 logcmdutil,
61 logcmdutil,
61 mergestate as mergestatemod,
62 mergestate as mergestatemod,
62 metadata,
63 metadata,
63 obsolete,
64 obsolete,
64 obsutil,
65 obsutil,
65 pathutil,
66 pathutil,
66 phases,
67 phases,
67 policy,
68 policy,
68 pvec,
69 pvec,
69 pycompat,
70 pycompat,
70 registrar,
71 registrar,
71 repair,
72 repair,
72 repoview,
73 repoview,
73 revlog,
74 revlog,
74 revset,
75 revset,
75 revsetlang,
76 revsetlang,
76 scmutil,
77 scmutil,
77 setdiscovery,
78 setdiscovery,
78 simplemerge,
79 simplemerge,
79 sshpeer,
80 sshpeer,
80 sslutil,
81 sslutil,
81 streamclone,
82 streamclone,
82 strip,
83 strip,
83 tags as tagsmod,
84 tags as tagsmod,
84 templater,
85 templater,
85 treediscovery,
86 treediscovery,
86 upgrade,
87 upgrade,
87 url as urlmod,
88 url as urlmod,
88 util,
89 util,
89 vfs as vfsmod,
90 vfs as vfsmod,
90 wireprotoframing,
91 wireprotoframing,
91 wireprotoserver,
92 wireprotoserver,
92 wireprotov2peer,
93 wireprotov2peer,
93 )
94 )
94 from .utils import (
95 from .utils import (
95 cborutil,
96 cborutil,
96 compression,
97 compression,
97 dateutil,
98 dateutil,
98 procutil,
99 procutil,
99 stringutil,
100 stringutil,
100 )
101 )
101
102
102 from .revlogutils import (
103 from .revlogutils import (
103 deltas as deltautil,
104 deltas as deltautil,
104 nodemap,
105 nodemap,
105 sidedata,
106 sidedata,
106 )
107 )
107
108
108 release = lockmod.release
109 release = lockmod.release
109
110
110 table = {}
111 table = {}
111 table.update(strip.command._table)
112 table.update(strip.command._table)
112 command = registrar.command(table)
113 command = registrar.command(table)
113
114
114
115
115 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 def debugancestor(ui, repo, *args):
117 def debugancestor(ui, repo, *args):
117 """find the ancestor revision of two revisions in a given index"""
118 """find the ancestor revision of two revisions in a given index"""
118 if len(args) == 3:
119 if len(args) == 3:
119 index, rev1, rev2 = args
120 index, rev1, rev2 = args
120 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 lookup = r.lookup
122 lookup = r.lookup
122 elif len(args) == 2:
123 elif len(args) == 2:
123 if not repo:
124 if not repo:
124 raise error.Abort(
125 raise error.Abort(
125 _(b'there is no Mercurial repository here (.hg not found)')
126 _(b'there is no Mercurial repository here (.hg not found)')
126 )
127 )
127 rev1, rev2 = args
128 rev1, rev2 = args
128 r = repo.changelog
129 r = repo.changelog
129 lookup = repo.lookup
130 lookup = repo.lookup
130 else:
131 else:
131 raise error.Abort(_(b'either two or three arguments required'))
132 raise error.Abort(_(b'either two or three arguments required'))
132 a = r.ancestor(lookup(rev1), lookup(rev2))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
133 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134
135
135
136
136 @command(b'debugantivirusrunning', [])
137 @command(b'debugantivirusrunning', [])
137 def debugantivirusrunning(ui, repo):
138 def debugantivirusrunning(ui, repo):
138 """attempt to trigger an antivirus scanner to see if one is active"""
139 """attempt to trigger an antivirus scanner to see if one is active"""
139 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 f.write(
141 f.write(
141 util.b85decode(
142 util.b85decode(
142 # This is a base85-armored version of the EICAR test file. See
143 # This is a base85-armored version of the EICAR test file. See
143 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 )
147 )
147 )
148 )
148 # Give an AV engine time to scan the file.
149 # Give an AV engine time to scan the file.
149 time.sleep(2)
150 time.sleep(2)
150 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151
152
152
153
153 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 def debugapplystreamclonebundle(ui, repo, fname):
155 def debugapplystreamclonebundle(ui, repo, fname):
155 """apply a stream clone bundle file"""
156 """apply a stream clone bundle file"""
156 f = hg.openpath(ui, fname)
157 f = hg.openpath(ui, fname)
157 gen = exchange.readbundle(ui, f, fname)
158 gen = exchange.readbundle(ui, f, fname)
158 gen.apply(repo)
159 gen.apply(repo)
159
160
160
161
161 @command(
162 @command(
162 b'debugbuilddag',
163 b'debugbuilddag',
163 [
164 [
164 (
165 (
165 b'm',
166 b'm',
166 b'mergeable-file',
167 b'mergeable-file',
167 None,
168 None,
168 _(b'add single file mergeable changes'),
169 _(b'add single file mergeable changes'),
169 ),
170 ),
170 (
171 (
171 b'o',
172 b'o',
172 b'overwritten-file',
173 b'overwritten-file',
173 None,
174 None,
174 _(b'add single file all revs overwrite'),
175 _(b'add single file all revs overwrite'),
175 ),
176 ),
176 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 ],
178 ],
178 _(b'[OPTION]... [TEXT]'),
179 _(b'[OPTION]... [TEXT]'),
179 )
180 )
180 def debugbuilddag(
181 def debugbuilddag(
181 ui,
182 ui,
182 repo,
183 repo,
183 text=None,
184 text=None,
184 mergeable_file=False,
185 mergeable_file=False,
185 overwritten_file=False,
186 overwritten_file=False,
186 new_file=False,
187 new_file=False,
187 ):
188 ):
188 """builds a repo with a given DAG from scratch in the current empty repo
189 """builds a repo with a given DAG from scratch in the current empty repo
189
190
190 The description of the DAG is read from stdin if not given on the
191 The description of the DAG is read from stdin if not given on the
191 command line.
192 command line.
192
193
193 Elements:
194 Elements:
194
195
195 - "+n" is a linear run of n nodes based on the current default parent
196 - "+n" is a linear run of n nodes based on the current default parent
196 - "." is a single node based on the current default parent
197 - "." is a single node based on the current default parent
197 - "$" resets the default parent to null (implied at the start);
198 - "$" resets the default parent to null (implied at the start);
198 otherwise the default parent is always the last node created
199 otherwise the default parent is always the last node created
199 - "<p" sets the default parent to the backref p
200 - "<p" sets the default parent to the backref p
200 - "*p" is a fork at parent p, which is a backref
201 - "*p" is a fork at parent p, which is a backref
201 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 - "/p2" is a merge of the preceding node and p2
203 - "/p2" is a merge of the preceding node and p2
203 - ":tag" defines a local tag for the preceding node
204 - ":tag" defines a local tag for the preceding node
204 - "@branch" sets the named branch for subsequent nodes
205 - "@branch" sets the named branch for subsequent nodes
205 - "#...\\n" is a comment up to the end of the line
206 - "#...\\n" is a comment up to the end of the line
206
207
207 Whitespace between the above elements is ignored.
208 Whitespace between the above elements is ignored.
208
209
209 A backref is either
210 A backref is either
210
211
211 - a number n, which references the node curr-n, where curr is the current
212 - a number n, which references the node curr-n, where curr is the current
212 node, or
213 node, or
213 - the name of a local tag you placed earlier using ":tag", or
214 - the name of a local tag you placed earlier using ":tag", or
214 - empty to denote the default parent.
215 - empty to denote the default parent.
215
216
216 All string valued-elements are either strictly alphanumeric, or must
217 All string valued-elements are either strictly alphanumeric, or must
217 be enclosed in double quotes ("..."), with "\\" as escape character.
218 be enclosed in double quotes ("..."), with "\\" as escape character.
218 """
219 """
219
220
220 if text is None:
221 if text is None:
221 ui.status(_(b"reading DAG from stdin\n"))
222 ui.status(_(b"reading DAG from stdin\n"))
222 text = ui.fin.read()
223 text = ui.fin.read()
223
224
224 cl = repo.changelog
225 cl = repo.changelog
225 if len(cl) > 0:
226 if len(cl) > 0:
226 raise error.Abort(_(b'repository is not empty'))
227 raise error.Abort(_(b'repository is not empty'))
227
228
228 # determine number of revs in DAG
229 # determine number of revs in DAG
229 total = 0
230 total = 0
230 for type, data in dagparser.parsedag(text):
231 for type, data in dagparser.parsedag(text):
231 if type == b'n':
232 if type == b'n':
232 total += 1
233 total += 1
233
234
234 if mergeable_file:
235 if mergeable_file:
235 linesperrev = 2
236 linesperrev = 2
236 # make a file with k lines per rev
237 # make a file with k lines per rev
237 initialmergedlines = [
238 initialmergedlines = [
238 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 ]
240 ]
240 initialmergedlines.append(b"")
241 initialmergedlines.append(b"")
241
242
242 tags = []
243 tags = []
243 progress = ui.makeprogress(
244 progress = ui.makeprogress(
244 _(b'building'), unit=_(b'revisions'), total=total
245 _(b'building'), unit=_(b'revisions'), total=total
245 )
246 )
246 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 at = -1
248 at = -1
248 atbranch = b'default'
249 atbranch = b'default'
249 nodeids = []
250 nodeids = []
250 id = 0
251 id = 0
251 progress.update(id)
252 progress.update(id)
252 for type, data in dagparser.parsedag(text):
253 for type, data in dagparser.parsedag(text):
253 if type == b'n':
254 if type == b'n':
254 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 id, ps = data
256 id, ps = data
256
257
257 files = []
258 files = []
258 filecontent = {}
259 filecontent = {}
259
260
260 p2 = None
261 p2 = None
261 if mergeable_file:
262 if mergeable_file:
262 fn = b"mf"
263 fn = b"mf"
263 p1 = repo[ps[0]]
264 p1 = repo[ps[0]]
264 if len(ps) > 1:
265 if len(ps) > 1:
265 p2 = repo[ps[1]]
266 p2 = repo[ps[1]]
266 pa = p1.ancestor(p2)
267 pa = p1.ancestor(p2)
267 base, local, other = [
268 base, local, other = [
268 x[fn].data() for x in (pa, p1, p2)
269 x[fn].data() for x in (pa, p1, p2)
269 ]
270 ]
270 m3 = simplemerge.Merge3Text(base, local, other)
271 m3 = simplemerge.Merge3Text(base, local, other)
271 ml = [l.strip() for l in m3.merge_lines()]
272 ml = [l.strip() for l in m3.merge_lines()]
272 ml.append(b"")
273 ml.append(b"")
273 elif at > 0:
274 elif at > 0:
274 ml = p1[fn].data().split(b"\n")
275 ml = p1[fn].data().split(b"\n")
275 else:
276 else:
276 ml = initialmergedlines
277 ml = initialmergedlines
277 ml[id * linesperrev] += b" r%i" % id
278 ml[id * linesperrev] += b" r%i" % id
278 mergedtext = b"\n".join(ml)
279 mergedtext = b"\n".join(ml)
279 files.append(fn)
280 files.append(fn)
280 filecontent[fn] = mergedtext
281 filecontent[fn] = mergedtext
281
282
282 if overwritten_file:
283 if overwritten_file:
283 fn = b"of"
284 fn = b"of"
284 files.append(fn)
285 files.append(fn)
285 filecontent[fn] = b"r%i\n" % id
286 filecontent[fn] = b"r%i\n" % id
286
287
287 if new_file:
288 if new_file:
288 fn = b"nf%i" % id
289 fn = b"nf%i" % id
289 files.append(fn)
290 files.append(fn)
290 filecontent[fn] = b"r%i\n" % id
291 filecontent[fn] = b"r%i\n" % id
291 if len(ps) > 1:
292 if len(ps) > 1:
292 if not p2:
293 if not p2:
293 p2 = repo[ps[1]]
294 p2 = repo[ps[1]]
294 for fn in p2:
295 for fn in p2:
295 if fn.startswith(b"nf"):
296 if fn.startswith(b"nf"):
296 files.append(fn)
297 files.append(fn)
297 filecontent[fn] = p2[fn].data()
298 filecontent[fn] = p2[fn].data()
298
299
299 def fctxfn(repo, cx, path):
300 def fctxfn(repo, cx, path):
300 if path in filecontent:
301 if path in filecontent:
301 return context.memfilectx(
302 return context.memfilectx(
302 repo, cx, path, filecontent[path]
303 repo, cx, path, filecontent[path]
303 )
304 )
304 return None
305 return None
305
306
306 if len(ps) == 0 or ps[0] < 0:
307 if len(ps) == 0 or ps[0] < 0:
307 pars = [None, None]
308 pars = [None, None]
308 elif len(ps) == 1:
309 elif len(ps) == 1:
309 pars = [nodeids[ps[0]], None]
310 pars = [nodeids[ps[0]], None]
310 else:
311 else:
311 pars = [nodeids[p] for p in ps]
312 pars = [nodeids[p] for p in ps]
312 cx = context.memctx(
313 cx = context.memctx(
313 repo,
314 repo,
314 pars,
315 pars,
315 b"r%i" % id,
316 b"r%i" % id,
316 files,
317 files,
317 fctxfn,
318 fctxfn,
318 date=(id, 0),
319 date=(id, 0),
319 user=b"debugbuilddag",
320 user=b"debugbuilddag",
320 extra={b'branch': atbranch},
321 extra={b'branch': atbranch},
321 )
322 )
322 nodeid = repo.commitctx(cx)
323 nodeid = repo.commitctx(cx)
323 nodeids.append(nodeid)
324 nodeids.append(nodeid)
324 at = id
325 at = id
325 elif type == b'l':
326 elif type == b'l':
326 id, name = data
327 id, name = data
327 ui.note((b'tag %s\n' % name))
328 ui.note((b'tag %s\n' % name))
328 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 elif type == b'a':
330 elif type == b'a':
330 ui.note((b'branch %s\n' % data))
331 ui.note((b'branch %s\n' % data))
331 atbranch = data
332 atbranch = data
332 progress.update(id)
333 progress.update(id)
333
334
334 if tags:
335 if tags:
335 repo.vfs.write(b"localtags", b"".join(tags))
336 repo.vfs.write(b"localtags", b"".join(tags))
336
337
337
338
338 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 indent_string = b' ' * indent
340 indent_string = b' ' * indent
340 if all:
341 if all:
341 ui.writenoi18n(
342 ui.writenoi18n(
342 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 % indent_string
344 % indent_string
344 )
345 )
345
346
346 def showchunks(named):
347 def showchunks(named):
347 ui.write(b"\n%s%s\n" % (indent_string, named))
348 ui.write(b"\n%s%s\n" % (indent_string, named))
348 for deltadata in gen.deltaiter():
349 for deltadata in gen.deltaiter():
349 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
350 ui.write(
351 ui.write(
351 b"%s%s %s %s %s %s %d\n"
352 b"%s%s %s %s %s %s %d\n"
352 % (
353 % (
353 indent_string,
354 indent_string,
354 hex(node),
355 hex(node),
355 hex(p1),
356 hex(p1),
356 hex(p2),
357 hex(p2),
357 hex(cs),
358 hex(cs),
358 hex(deltabase),
359 hex(deltabase),
359 len(delta),
360 len(delta),
360 )
361 )
361 )
362 )
362
363
363 gen.changelogheader()
364 gen.changelogheader()
364 showchunks(b"changelog")
365 showchunks(b"changelog")
365 gen.manifestheader()
366 gen.manifestheader()
366 showchunks(b"manifest")
367 showchunks(b"manifest")
367 for chunkdata in iter(gen.filelogheader, {}):
368 for chunkdata in iter(gen.filelogheader, {}):
368 fname = chunkdata[b'filename']
369 fname = chunkdata[b'filename']
369 showchunks(fname)
370 showchunks(fname)
370 else:
371 else:
371 if isinstance(gen, bundle2.unbundle20):
372 if isinstance(gen, bundle2.unbundle20):
372 raise error.Abort(_(b'use debugbundle2 for this file'))
373 raise error.Abort(_(b'use debugbundle2 for this file'))
373 gen.changelogheader()
374 gen.changelogheader()
374 for deltadata in gen.deltaiter():
375 for deltadata in gen.deltaiter():
375 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
376 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377
378
378
379
379 def _debugobsmarkers(ui, part, indent=0, **opts):
380 def _debugobsmarkers(ui, part, indent=0, **opts):
380 """display version and markers contained in 'data'"""
381 """display version and markers contained in 'data'"""
381 opts = pycompat.byteskwargs(opts)
382 opts = pycompat.byteskwargs(opts)
382 data = part.read()
383 data = part.read()
383 indent_string = b' ' * indent
384 indent_string = b' ' * indent
384 try:
385 try:
385 version, markers = obsolete._readmarkers(data)
386 version, markers = obsolete._readmarkers(data)
386 except error.UnknownVersion as exc:
387 except error.UnknownVersion as exc:
387 msg = b"%sunsupported version: %s (%d bytes)\n"
388 msg = b"%sunsupported version: %s (%d bytes)\n"
388 msg %= indent_string, exc.version, len(data)
389 msg %= indent_string, exc.version, len(data)
389 ui.write(msg)
390 ui.write(msg)
390 else:
391 else:
391 msg = b"%sversion: %d (%d bytes)\n"
392 msg = b"%sversion: %d (%d bytes)\n"
392 msg %= indent_string, version, len(data)
393 msg %= indent_string, version, len(data)
393 ui.write(msg)
394 ui.write(msg)
394 fm = ui.formatter(b'debugobsolete', opts)
395 fm = ui.formatter(b'debugobsolete', opts)
395 for rawmarker in sorted(markers):
396 for rawmarker in sorted(markers):
396 m = obsutil.marker(None, rawmarker)
397 m = obsutil.marker(None, rawmarker)
397 fm.startitem()
398 fm.startitem()
398 fm.plain(indent_string)
399 fm.plain(indent_string)
399 cmdutil.showmarker(fm, m)
400 cmdutil.showmarker(fm, m)
400 fm.end()
401 fm.end()
401
402
402
403
403 def _debugphaseheads(ui, data, indent=0):
404 def _debugphaseheads(ui, data, indent=0):
404 """display version and markers contained in 'data'"""
405 """display version and markers contained in 'data'"""
405 indent_string = b' ' * indent
406 indent_string = b' ' * indent
406 headsbyphase = phases.binarydecode(data)
407 headsbyphase = phases.binarydecode(data)
407 for phase in phases.allphases:
408 for phase in phases.allphases:
408 for head in headsbyphase[phase]:
409 for head in headsbyphase[phase]:
409 ui.write(indent_string)
410 ui.write(indent_string)
410 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411
412
412
413
413 def _quasirepr(thing):
414 def _quasirepr(thing):
414 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 return b'{%s}' % (
416 return b'{%s}' % (
416 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 )
418 )
418 return pycompat.bytestr(repr(thing))
419 return pycompat.bytestr(repr(thing))
419
420
420
421
421 def _debugbundle2(ui, gen, all=None, **opts):
422 def _debugbundle2(ui, gen, all=None, **opts):
422 """lists the contents of a bundle2"""
423 """lists the contents of a bundle2"""
423 if not isinstance(gen, bundle2.unbundle20):
424 if not isinstance(gen, bundle2.unbundle20):
424 raise error.Abort(_(b'not a bundle2 file'))
425 raise error.Abort(_(b'not a bundle2 file'))
425 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 parttypes = opts.get('part_type', [])
427 parttypes = opts.get('part_type', [])
427 for part in gen.iterparts():
428 for part in gen.iterparts():
428 if parttypes and part.type not in parttypes:
429 if parttypes and part.type not in parttypes:
429 continue
430 continue
430 msg = b'%s -- %s (mandatory: %r)\n'
431 msg = b'%s -- %s (mandatory: %r)\n'
431 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 if part.type == b'changegroup':
433 if part.type == b'changegroup':
433 version = part.params.get(b'version', b'01')
434 version = part.params.get(b'version', b'01')
434 cg = changegroup.getunbundler(version, part, b'UN')
435 cg = changegroup.getunbundler(version, part, b'UN')
435 if not ui.quiet:
436 if not ui.quiet:
436 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 if part.type == b'obsmarkers':
438 if part.type == b'obsmarkers':
438 if not ui.quiet:
439 if not ui.quiet:
439 _debugobsmarkers(ui, part, indent=4, **opts)
440 _debugobsmarkers(ui, part, indent=4, **opts)
440 if part.type == b'phase-heads':
441 if part.type == b'phase-heads':
441 if not ui.quiet:
442 if not ui.quiet:
442 _debugphaseheads(ui, part, indent=4)
443 _debugphaseheads(ui, part, indent=4)
443
444
444
445
445 @command(
446 @command(
446 b'debugbundle',
447 b'debugbundle',
447 [
448 [
448 (b'a', b'all', None, _(b'show all details')),
449 (b'a', b'all', None, _(b'show all details')),
449 (b'', b'part-type', [], _(b'show only the named part type')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
450 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 ],
452 ],
452 _(b'FILE'),
453 _(b'FILE'),
453 norepo=True,
454 norepo=True,
454 )
455 )
455 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 """lists the contents of a bundle"""
457 """lists the contents of a bundle"""
457 with hg.openpath(ui, bundlepath) as f:
458 with hg.openpath(ui, bundlepath) as f:
458 if spec:
459 if spec:
459 spec = exchange.getbundlespec(ui, f)
460 spec = exchange.getbundlespec(ui, f)
460 ui.write(b'%s\n' % spec)
461 ui.write(b'%s\n' % spec)
461 return
462 return
462
463
463 gen = exchange.readbundle(ui, f, bundlepath)
464 gen = exchange.readbundle(ui, f, bundlepath)
464 if isinstance(gen, bundle2.unbundle20):
465 if isinstance(gen, bundle2.unbundle20):
465 return _debugbundle2(ui, gen, all=all, **opts)
466 return _debugbundle2(ui, gen, all=all, **opts)
466 _debugchangegroup(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
467
468
468
469
469 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 def debugcapabilities(ui, path, **opts):
471 def debugcapabilities(ui, path, **opts):
471 """lists the capabilities of a remote peer"""
472 """lists the capabilities of a remote peer"""
472 opts = pycompat.byteskwargs(opts)
473 opts = pycompat.byteskwargs(opts)
473 peer = hg.peer(ui, opts, path)
474 peer = hg.peer(ui, opts, path)
474 try:
475 try:
475 caps = peer.capabilities()
476 caps = peer.capabilities()
476 ui.writenoi18n(b'Main capabilities:\n')
477 ui.writenoi18n(b'Main capabilities:\n')
477 for c in sorted(caps):
478 for c in sorted(caps):
478 ui.write(b' %s\n' % c)
479 ui.write(b' %s\n' % c)
479 b2caps = bundle2.bundle2caps(peer)
480 b2caps = bundle2.bundle2caps(peer)
480 if b2caps:
481 if b2caps:
481 ui.writenoi18n(b'Bundle2 capabilities:\n')
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
482 for key, values in sorted(pycompat.iteritems(b2caps)):
483 for key, values in sorted(pycompat.iteritems(b2caps)):
483 ui.write(b' %s\n' % key)
484 ui.write(b' %s\n' % key)
484 for v in values:
485 for v in values:
485 ui.write(b' %s\n' % v)
486 ui.write(b' %s\n' % v)
486 finally:
487 finally:
487 peer.close()
488 peer.close()
488
489
489
490
490 @command(
491 @command(
491 b'debugchangedfiles',
492 b'debugchangedfiles',
492 [
493 [
493 (
494 (
494 b'',
495 b'',
495 b'compute',
496 b'compute',
496 False,
497 False,
497 b"compute information instead of reading it from storage",
498 b"compute information instead of reading it from storage",
498 ),
499 ),
499 ],
500 ],
500 b'REV',
501 b'REV',
501 )
502 )
502 def debugchangedfiles(ui, repo, rev, **opts):
503 def debugchangedfiles(ui, repo, rev, **opts):
503 """list the stored files changes for a revision"""
504 """list the stored files changes for a revision"""
504 ctx = scmutil.revsingle(repo, rev, None)
505 ctx = scmutil.revsingle(repo, rev, None)
505 files = None
506 files = None
506
507
507 if opts['compute']:
508 if opts['compute']:
508 files = metadata.compute_all_files_changes(ctx)
509 files = metadata.compute_all_files_changes(ctx)
509 else:
510 else:
510 sd = repo.changelog.sidedata(ctx.rev())
511 sd = repo.changelog.sidedata(ctx.rev())
511 files_block = sd.get(sidedata.SD_FILES)
512 files_block = sd.get(sidedata.SD_FILES)
512 if files_block is not None:
513 if files_block is not None:
513 files = metadata.decode_files_sidedata(sd)
514 files = metadata.decode_files_sidedata(sd)
514 if files is not None:
515 if files is not None:
515 for f in sorted(files.touched):
516 for f in sorted(files.touched):
516 if f in files.added:
517 if f in files.added:
517 action = b"added"
518 action = b"added"
518 elif f in files.removed:
519 elif f in files.removed:
519 action = b"removed"
520 action = b"removed"
520 elif f in files.merged:
521 elif f in files.merged:
521 action = b"merged"
522 action = b"merged"
522 elif f in files.salvaged:
523 elif f in files.salvaged:
523 action = b"salvaged"
524 action = b"salvaged"
524 else:
525 else:
525 action = b"touched"
526 action = b"touched"
526
527
527 copy_parent = b""
528 copy_parent = b""
528 copy_source = b""
529 copy_source = b""
529 if f in files.copied_from_p1:
530 if f in files.copied_from_p1:
530 copy_parent = b"p1"
531 copy_parent = b"p1"
531 copy_source = files.copied_from_p1[f]
532 copy_source = files.copied_from_p1[f]
532 elif f in files.copied_from_p2:
533 elif f in files.copied_from_p2:
533 copy_parent = b"p2"
534 copy_parent = b"p2"
534 copy_source = files.copied_from_p2[f]
535 copy_source = files.copied_from_p2[f]
535
536
536 data = (action, copy_parent, f, copy_source)
537 data = (action, copy_parent, f, copy_source)
537 template = b"%-8s %2s: %s, %s;\n"
538 template = b"%-8s %2s: %s, %s;\n"
538 ui.write(template % data)
539 ui.write(template % data)
539
540
540
541
541 @command(b'debugcheckstate', [], b'')
542 @command(b'debugcheckstate', [], b'')
542 def debugcheckstate(ui, repo):
543 def debugcheckstate(ui, repo):
543 """validate the correctness of the current dirstate"""
544 """validate the correctness of the current dirstate"""
544 parent1, parent2 = repo.dirstate.parents()
545 parent1, parent2 = repo.dirstate.parents()
545 m1 = repo[parent1].manifest()
546 m1 = repo[parent1].manifest()
546 m2 = repo[parent2].manifest()
547 m2 = repo[parent2].manifest()
547 errors = 0
548 errors = 0
548 for f in repo.dirstate:
549 for f in repo.dirstate:
549 state = repo.dirstate[f]
550 state = repo.dirstate[f]
550 if state in b"nr" and f not in m1:
551 if state in b"nr" and f not in m1:
551 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
552 errors += 1
553 errors += 1
553 if state in b"a" and f in m1:
554 if state in b"a" and f in m1:
554 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
555 errors += 1
556 errors += 1
556 if state in b"m" and f not in m1 and f not in m2:
557 if state in b"m" and f not in m1 and f not in m2:
557 ui.warn(
558 ui.warn(
558 _(b"%s in state %s, but not in either manifest\n") % (f, state)
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
559 )
560 )
560 errors += 1
561 errors += 1
561 for f in m1:
562 for f in m1:
562 state = repo.dirstate[f]
563 state = repo.dirstate[f]
563 if state not in b"nrm":
564 if state not in b"nrm":
564 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
565 errors += 1
566 errors += 1
566 if errors:
567 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
569 raise error.Abort(errstr)
569
570
570
571
571 @command(
572 @command(
572 b'debugcolor',
573 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
575 b'hg debugcolor',
575 )
576 )
576 def debugcolor(ui, repo, **opts):
577 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
578 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
580 if opts.get('style'):
580 return _debugdisplaystyle(ui)
581 return _debugdisplaystyle(ui)
581 else:
582 else:
582 return _debugdisplaycolor(ui)
583 return _debugdisplaycolor(ui)
583
584
584
585
585 def _debugdisplaycolor(ui):
586 def _debugdisplaycolor(ui):
586 ui = ui.copy()
587 ui = ui.copy()
587 ui._styles.clear()
588 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
589 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
590 ui._styles[effect] = effect
590 if ui._terminfoparams:
591 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
592 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
593 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
594 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
595 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
596 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
597 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
598 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
600 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
601 ui.write(b'%s\n' % colorname, label=label)
601
602
602
603
603 def _debugdisplaystyle(ui):
604 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
605 ui.write(_(b'available style:\n'))
605 if not ui._styles:
606 if not ui._styles:
606 return
607 return
607 width = max(len(s) for s in ui._styles)
608 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
609 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
610 ui.write(b'%s' % label, label=label)
610 if effects:
611 if effects:
611 # 50
612 # 50
612 ui.write(b': ')
613 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
616 ui.write(b'\n')
616
617
617
618
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
621 """create a stream clone bundle file
621
622
622 Stream bundles are special bundles that are essentially archives of
623 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
624 revlog files. They are commonly used for cloning very quickly.
624 """
625 """
625 # TODO we may want to turn this into an abort when this functionality
626 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
627 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
628 if phases.hassecret(repo):
628 ui.warn(
629 ui.warn(
629 _(
630 _(
630 b'(warning: stream clone bundle will contain secret '
631 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
632 b'revisions)\n'
632 )
633 )
633 )
634 )
634
635
635 requirements, gen = streamclone.generatebundlev1(repo)
636 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
637 changegroup.writechunks(ui, gen, fname)
637
638
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
640
640
641
641 @command(
642 @command(
642 b'debugdag',
643 b'debugdag',
643 [
644 [
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
649 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
650 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
651 optionalrepo=True,
651 )
652 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
654 """format the changelog or an index DAG as a concise textual description
654
655
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
657 revision numbers, they get labeled in the output as rN.
657
658
658 Otherwise, the changelog DAG of the current repo is emitted.
659 Otherwise, the changelog DAG of the current repo is emitted.
659 """
660 """
660 spaces = opts.get('spaces')
661 spaces = opts.get('spaces')
661 dots = opts.get('dots')
662 dots = opts.get('dots')
662 if file_:
663 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
665 revs = {int(r) for r in revs}
665
666
666 def events():
667 def events():
667 for r in rlog:
668 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
670 if r in revs:
670 yield b'l', (r, b"r%i" % r)
671 yield b'l', (r, b"r%i" % r)
671
672
672 elif repo:
673 elif repo:
673 cl = repo.changelog
674 cl = repo.changelog
674 tags = opts.get('tags')
675 tags = opts.get('tags')
675 branches = opts.get('branches')
676 branches = opts.get('branches')
676 if tags:
677 if tags:
677 labels = {}
678 labels = {}
678 for l, n in repo.tags().items():
679 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
680 labels.setdefault(cl.rev(n), []).append(l)
680
681
681 def events():
682 def events():
682 b = b"default"
683 b = b"default"
683 for r in cl:
684 for r in cl:
684 if branches:
685 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
686 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
687 if newb != b:
687 yield b'a', newb
688 yield b'a', newb
688 b = newb
689 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
691 if tags:
691 ls = labels.get(r)
692 ls = labels.get(r)
692 if ls:
693 if ls:
693 for l in ls:
694 for l in ls:
694 yield b'l', (r, l)
695 yield b'l', (r, l)
695
696
696 else:
697 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
698 raise error.Abort(_(b'need repo for changelog dag'))
698
699
699 for line in dagparser.dagtextlines(
700 for line in dagparser.dagtextlines(
700 events(),
701 events(),
701 addspaces=spaces,
702 addspaces=spaces,
702 wraplabels=True,
703 wraplabels=True,
703 wrapannotations=True,
704 wrapannotations=True,
704 wrapnonlinear=dots,
705 wrapnonlinear=dots,
705 usedots=dots,
706 usedots=dots,
706 maxlinewidth=70,
707 maxlinewidth=70,
707 ):
708 ):
708 ui.write(line)
709 ui.write(line)
709 ui.write(b"\n")
710 ui.write(b"\n")
710
711
711
712
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
715 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
716 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
718 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
720 file_, rev = None, file_
720 elif rev is None:
721 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
724 try:
724 ui.write(r.rawdata(r.lookup(rev)))
725 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
726 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
728
728
729
729 @command(
730 @command(
730 b'debugdate',
731 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
733 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
734 norepo=True,
734 optionalrepo=True,
735 optionalrepo=True,
735 )
736 )
736 def debugdate(ui, date, range=None, **opts):
737 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
738 """parse and display a date"""
738 if opts["extended"]:
739 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
741 else:
741 d = dateutil.parsedate(date)
742 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
745 if range:
745 m = dateutil.matchdate(range)
746 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
748
748
749
749 @command(
750 @command(
750 b'debugdeltachain',
751 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
753 _(b'-c|-m|FILE'),
753 optionalrepo=True,
754 optionalrepo=True,
754 )
755 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
757 """dump information about delta chains in a revlog
757
758
758 Output can be templatized. Available template keywords are:
759 Output can be templatized. Available template keywords are:
759
760
760 :``rev``: revision number
761 :``rev``: revision number
761 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainlen``: delta chain length to this revision
763 :``chainlen``: delta chain length to this revision
763 :``prevrev``: previous revision in delta chain
764 :``prevrev``: previous revision in delta chain
764 :``deltatype``: role of delta / how it was computed
765 :``deltatype``: role of delta / how it was computed
765 :``compsize``: compressed size of revision
766 :``compsize``: compressed size of revision
766 :``uncompsize``: uncompressed size of revision
767 :``uncompsize``: uncompressed size of revision
767 :``chainsize``: total size of compressed revisions in chain
768 :``chainsize``: total size of compressed revisions in chain
768 :``chainratio``: total chain size divided by uncompressed revision size
769 :``chainratio``: total chain size divided by uncompressed revision size
769 (new delta chains typically start at ratio 2.00)
770 (new delta chains typically start at ratio 2.00)
770 :``lindist``: linear distance from base revision in delta chain to end
771 :``lindist``: linear distance from base revision in delta chain to end
771 of this revision
772 of this revision
772 :``extradist``: total size of revisions not part of this delta chain from
773 :``extradist``: total size of revisions not part of this delta chain from
773 base of delta chain to end of this revision; a measurement
774 base of delta chain to end of this revision; a measurement
774 of how much extra data we need to read/seek across to read
775 of how much extra data we need to read/seek across to read
775 the delta chain for this revision
776 the delta chain for this revision
776 :``extraratio``: extradist divided by chainsize; another representation of
777 :``extraratio``: extradist divided by chainsize; another representation of
777 how much unrelated data is needed to load this delta chain
778 how much unrelated data is needed to load this delta chain
778
779
779 If the repository is configured to use the sparse read, additional keywords
780 If the repository is configured to use the sparse read, additional keywords
780 are available:
781 are available:
781
782
782 :``readsize``: total size of data read from the disk for a revision
783 :``readsize``: total size of data read from the disk for a revision
783 (sum of the sizes of all the blocks)
784 (sum of the sizes of all the blocks)
784 :``largestblock``: size of the largest block of data read from the disk
785 :``largestblock``: size of the largest block of data read from the disk
785 :``readdensity``: density of useful bytes in the data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
786 :``srchunks``: in how many data hunks the whole revision would be read
787 :``srchunks``: in how many data hunks the whole revision would be read
787
788
788 The sparse read can be enabled with experimental.sparse-read = True
789 The sparse read can be enabled with experimental.sparse-read = True
789 """
790 """
790 opts = pycompat.byteskwargs(opts)
791 opts = pycompat.byteskwargs(opts)
791 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
792 index = r.index
793 index = r.index
793 start = r.start
794 start = r.start
794 length = r.length
795 length = r.length
795 generaldelta = r.version & revlog.FLAG_GENERALDELTA
796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
796 withsparseread = getattr(r, '_withsparseread', False)
797 withsparseread = getattr(r, '_withsparseread', False)
797
798
798 def revinfo(rev):
799 def revinfo(rev):
799 e = index[rev]
800 e = index[rev]
800 compsize = e[1]
801 compsize = e[1]
801 uncompsize = e[2]
802 uncompsize = e[2]
802 chainsize = 0
803 chainsize = 0
803
804
804 if generaldelta:
805 if generaldelta:
805 if e[3] == e[5]:
806 if e[3] == e[5]:
806 deltatype = b'p1'
807 deltatype = b'p1'
807 elif e[3] == e[6]:
808 elif e[3] == e[6]:
808 deltatype = b'p2'
809 deltatype = b'p2'
809 elif e[3] == rev - 1:
810 elif e[3] == rev - 1:
810 deltatype = b'prev'
811 deltatype = b'prev'
811 elif e[3] == rev:
812 elif e[3] == rev:
812 deltatype = b'base'
813 deltatype = b'base'
813 else:
814 else:
814 deltatype = b'other'
815 deltatype = b'other'
815 else:
816 else:
816 if e[3] == rev:
817 if e[3] == rev:
817 deltatype = b'base'
818 deltatype = b'base'
818 else:
819 else:
819 deltatype = b'prev'
820 deltatype = b'prev'
820
821
821 chain = r._deltachain(rev)[0]
822 chain = r._deltachain(rev)[0]
822 for iterrev in chain:
823 for iterrev in chain:
823 e = index[iterrev]
824 e = index[iterrev]
824 chainsize += e[1]
825 chainsize += e[1]
825
826
826 return compsize, uncompsize, deltatype, chain, chainsize
827 return compsize, uncompsize, deltatype, chain, chainsize
827
828
828 fm = ui.formatter(b'debugdeltachain', opts)
829 fm = ui.formatter(b'debugdeltachain', opts)
829
830
830 fm.plain(
831 fm.plain(
831 b' rev chain# chainlen prev delta '
832 b' rev chain# chainlen prev delta '
832 b'size rawsize chainsize ratio lindist extradist '
833 b'size rawsize chainsize ratio lindist extradist '
833 b'extraratio'
834 b'extraratio'
834 )
835 )
835 if withsparseread:
836 if withsparseread:
836 fm.plain(b' readsize largestblk rddensity srchunks')
837 fm.plain(b' readsize largestblk rddensity srchunks')
837 fm.plain(b'\n')
838 fm.plain(b'\n')
838
839
839 chainbases = {}
840 chainbases = {}
840 for rev in r:
841 for rev in r:
841 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 chainbase = chain[0]
843 chainbase = chain[0]
843 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 basestart = start(chainbase)
845 basestart = start(chainbase)
845 revstart = start(rev)
846 revstart = start(rev)
846 lineardist = revstart + comp - basestart
847 lineardist = revstart + comp - basestart
847 extradist = lineardist - chainsize
848 extradist = lineardist - chainsize
848 try:
849 try:
849 prevrev = chain[-2]
850 prevrev = chain[-2]
850 except IndexError:
851 except IndexError:
851 prevrev = -1
852 prevrev = -1
852
853
853 if uncomp != 0:
854 if uncomp != 0:
854 chainratio = float(chainsize) / float(uncomp)
855 chainratio = float(chainsize) / float(uncomp)
855 else:
856 else:
856 chainratio = chainsize
857 chainratio = chainsize
857
858
858 if chainsize != 0:
859 if chainsize != 0:
859 extraratio = float(extradist) / float(chainsize)
860 extraratio = float(extradist) / float(chainsize)
860 else:
861 else:
861 extraratio = extradist
862 extraratio = extradist
862
863
863 fm.startitem()
864 fm.startitem()
864 fm.write(
865 fm.write(
865 b'rev chainid chainlen prevrev deltatype compsize '
866 b'rev chainid chainlen prevrev deltatype compsize '
866 b'uncompsize chainsize chainratio lindist extradist '
867 b'uncompsize chainsize chainratio lindist extradist '
867 b'extraratio',
868 b'extraratio',
868 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 rev,
870 rev,
870 chainid,
871 chainid,
871 len(chain),
872 len(chain),
872 prevrev,
873 prevrev,
873 deltatype,
874 deltatype,
874 comp,
875 comp,
875 uncomp,
876 uncomp,
876 chainsize,
877 chainsize,
877 chainratio,
878 chainratio,
878 lineardist,
879 lineardist,
879 extradist,
880 extradist,
880 extraratio,
881 extraratio,
881 rev=rev,
882 rev=rev,
882 chainid=chainid,
883 chainid=chainid,
883 chainlen=len(chain),
884 chainlen=len(chain),
884 prevrev=prevrev,
885 prevrev=prevrev,
885 deltatype=deltatype,
886 deltatype=deltatype,
886 compsize=comp,
887 compsize=comp,
887 uncompsize=uncomp,
888 uncompsize=uncomp,
888 chainsize=chainsize,
889 chainsize=chainsize,
889 chainratio=chainratio,
890 chainratio=chainratio,
890 lindist=lineardist,
891 lindist=lineardist,
891 extradist=extradist,
892 extradist=extradist,
892 extraratio=extraratio,
893 extraratio=extraratio,
893 )
894 )
894 if withsparseread:
895 if withsparseread:
895 readsize = 0
896 readsize = 0
896 largestblock = 0
897 largestblock = 0
897 srchunks = 0
898 srchunks = 0
898
899
899 for revschunk in deltautil.slicechunk(r, chain):
900 for revschunk in deltautil.slicechunk(r, chain):
900 srchunks += 1
901 srchunks += 1
901 blkend = start(revschunk[-1]) + length(revschunk[-1])
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
902 blksize = blkend - start(revschunk[0])
903 blksize = blkend - start(revschunk[0])
903
904
904 readsize += blksize
905 readsize += blksize
905 if largestblock < blksize:
906 if largestblock < blksize:
906 largestblock = blksize
907 largestblock = blksize
907
908
908 if readsize:
909 if readsize:
909 readdensity = float(chainsize) / float(readsize)
910 readdensity = float(chainsize) / float(readsize)
910 else:
911 else:
911 readdensity = 1
912 readdensity = 1
912
913
913 fm.write(
914 fm.write(
914 b'readsize largestblock readdensity srchunks',
915 b'readsize largestblock readdensity srchunks',
915 b' %10d %10d %9.5f %8d',
916 b' %10d %10d %9.5f %8d',
916 readsize,
917 readsize,
917 largestblock,
918 largestblock,
918 readdensity,
919 readdensity,
919 srchunks,
920 srchunks,
920 readsize=readsize,
921 readsize=readsize,
921 largestblock=largestblock,
922 largestblock=largestblock,
922 readdensity=readdensity,
923 readdensity=readdensity,
923 srchunks=srchunks,
924 srchunks=srchunks,
924 )
925 )
925
926
926 fm.plain(b'\n')
927 fm.plain(b'\n')
927
928
928 fm.end()
929 fm.end()
929
930
930
931
931 @command(
932 @command(
932 b'debugdirstate|debugstate',
933 b'debugdirstate|debugstate',
933 [
934 [
934 (
935 (
935 b'',
936 b'',
936 b'nodates',
937 b'nodates',
937 None,
938 None,
938 _(b'do not display the saved mtime (DEPRECATED)'),
939 _(b'do not display the saved mtime (DEPRECATED)'),
939 ),
940 ),
940 (b'', b'dates', True, _(b'display the saved mtime')),
941 (b'', b'dates', True, _(b'display the saved mtime')),
941 (b'', b'datesort', None, _(b'sort by saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
942 ],
943 ],
943 _(b'[OPTION]...'),
944 _(b'[OPTION]...'),
944 )
945 )
945 def debugstate(ui, repo, **opts):
946 def debugstate(ui, repo, **opts):
946 """show the contents of the current dirstate"""
947 """show the contents of the current dirstate"""
947
948
948 nodates = not opts['dates']
949 nodates = not opts['dates']
949 if opts.get('nodates') is not None:
950 if opts.get('nodates') is not None:
950 nodates = True
951 nodates = True
951 datesort = opts.get('datesort')
952 datesort = opts.get('datesort')
952
953
953 if datesort:
954 if datesort:
954 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
955 else:
956 else:
956 keyfunc = None # sort by filename
957 keyfunc = None # sort by filename
957 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
958 if ent[3] == -1:
959 if ent[3] == -1:
959 timestr = b'unset '
960 timestr = b'unset '
960 elif nodates:
961 elif nodates:
961 timestr = b'set '
962 timestr = b'set '
962 else:
963 else:
963 timestr = time.strftime(
964 timestr = time.strftime(
964 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
965 )
966 )
966 timestr = encoding.strtolocal(timestr)
967 timestr = encoding.strtolocal(timestr)
967 if ent[1] & 0o20000:
968 if ent[1] & 0o20000:
968 mode = b'lnk'
969 mode = b'lnk'
969 else:
970 else:
970 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
971 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
972 for f in repo.dirstate.copies():
973 for f in repo.dirstate.copies():
973 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974
975
975
976
976 @command(
977 @command(
977 b'debugdiscovery',
978 b'debugdiscovery',
978 [
979 [
979 (b'', b'old', None, _(b'use old-style discovery')),
980 (b'', b'old', None, _(b'use old-style discovery')),
980 (
981 (
981 b'',
982 b'',
982 b'nonheads',
983 b'nonheads',
983 None,
984 None,
984 _(b'use old-style discovery with non-heads included'),
985 _(b'use old-style discovery with non-heads included'),
985 ),
986 ),
986 (b'', b'rev', [], b'restrict discovery to this set of revs'),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
987 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
988 (
989 (
989 b'',
990 b'',
990 b'local-as-revs',
991 b'local-as-revs',
991 "",
992 "",
992 'treat local has having these revisions only',
993 'treat local has having these revisions only',
993 ),
994 ),
994 (
995 (
995 b'',
996 b'',
996 b'remote-as-revs',
997 b'remote-as-revs',
997 "",
998 "",
998 'use local as remote, with only these these revisions',
999 'use local as remote, with only these these revisions',
999 ),
1000 ),
1000 ]
1001 ]
1001 + cmdutil.remoteopts
1002 + cmdutil.remoteopts
1002 + cmdutil.formatteropts,
1003 + cmdutil.formatteropts,
1003 _(b'[--rev REV] [OTHER]'),
1004 _(b'[--rev REV] [OTHER]'),
1004 )
1005 )
1005 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1006 """runs the changeset discovery protocol in isolation
1007 """runs the changeset discovery protocol in isolation
1007
1008
1008 The local peer can be "replaced" by a subset of the local repository by
1009 The local peer can be "replaced" by a subset of the local repository by
1009 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1010 be "replaced" by a subset of the local repository using the
1011 be "replaced" by a subset of the local repository using the
1011 `--local-as-revs` flag. This is useful to efficiently debug pathological
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1012 discovery situation.
1013 discovery situation.
1013 """
1014 """
1014 opts = pycompat.byteskwargs(opts)
1015 opts = pycompat.byteskwargs(opts)
1015 unfi = repo.unfiltered()
1016 unfi = repo.unfiltered()
1016
1017
1017 # setup potential extra filtering
1018 # setup potential extra filtering
1018 local_revs = opts[b"local_as_revs"]
1019 local_revs = opts[b"local_as_revs"]
1019 remote_revs = opts[b"remote_as_revs"]
1020 remote_revs = opts[b"remote_as_revs"]
1020
1021
1021 # make sure tests are repeatable
1022 # make sure tests are repeatable
1022 random.seed(int(opts[b'seed']))
1023 random.seed(int(opts[b'seed']))
1023
1024
1024 if not remote_revs:
1025 if not remote_revs:
1025
1026
1026 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1027 remote = hg.peer(repo, opts, remoteurl)
1028 remote = hg.peer(repo, opts, remoteurl)
1028 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1029 else:
1030 else:
1030 branches = (None, [])
1031 branches = (None, [])
1031 remote_filtered_revs = scmutil.revrange(
1032 remote_filtered_revs = scmutil.revrange(
1032 unfi, [b"not (::(%s))" % remote_revs]
1033 unfi, [b"not (::(%s))" % remote_revs]
1033 )
1034 )
1034 remote_filtered_revs = frozenset(remote_filtered_revs)
1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1035
1036
1036 def remote_func(x):
1037 def remote_func(x):
1037 return remote_filtered_revs
1038 return remote_filtered_revs
1038
1039
1039 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1040
1041
1041 remote = repo.peer()
1042 remote = repo.peer()
1042 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1043
1044
1044 if local_revs:
1045 if local_revs:
1045 local_filtered_revs = scmutil.revrange(
1046 local_filtered_revs = scmutil.revrange(
1046 unfi, [b"not (::(%s))" % local_revs]
1047 unfi, [b"not (::(%s))" % local_revs]
1047 )
1048 )
1048 local_filtered_revs = frozenset(local_filtered_revs)
1049 local_filtered_revs = frozenset(local_filtered_revs)
1049
1050
1050 def local_func(x):
1051 def local_func(x):
1051 return local_filtered_revs
1052 return local_filtered_revs
1052
1053
1053 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1054 repo = repo.filtered(b'debug-discovery-local-filter')
1055 repo = repo.filtered(b'debug-discovery-local-filter')
1055
1056
1056 data = {}
1057 data = {}
1057 if opts.get(b'old'):
1058 if opts.get(b'old'):
1058
1059
1059 def doit(pushedrevs, remoteheads, remote=remote):
1060 def doit(pushedrevs, remoteheads, remote=remote):
1060 if not util.safehasattr(remote, b'branches'):
1061 if not util.safehasattr(remote, b'branches'):
1061 # enable in-client legacy support
1062 # enable in-client legacy support
1062 remote = localrepo.locallegacypeer(remote.local())
1063 remote = localrepo.locallegacypeer(remote.local())
1063 common, _in, hds = treediscovery.findcommonincoming(
1064 common, _in, hds = treediscovery.findcommonincoming(
1064 repo, remote, force=True, audit=data
1065 repo, remote, force=True, audit=data
1065 )
1066 )
1066 common = set(common)
1067 common = set(common)
1067 if not opts.get(b'nonheads'):
1068 if not opts.get(b'nonheads'):
1068 ui.writenoi18n(
1069 ui.writenoi18n(
1069 b"unpruned common: %s\n"
1070 b"unpruned common: %s\n"
1070 % b" ".join(sorted(short(n) for n in common))
1071 % b" ".join(sorted(short(n) for n in common))
1071 )
1072 )
1072
1073
1073 clnode = repo.changelog.node
1074 clnode = repo.changelog.node
1074 common = repo.revs(b'heads(::%ln)', common)
1075 common = repo.revs(b'heads(::%ln)', common)
1075 common = {clnode(r) for r in common}
1076 common = {clnode(r) for r in common}
1076 return common, hds
1077 return common, hds
1077
1078
1078 else:
1079 else:
1079
1080
1080 def doit(pushedrevs, remoteheads, remote=remote):
1081 def doit(pushedrevs, remoteheads, remote=remote):
1081 nodes = None
1082 nodes = None
1082 if pushedrevs:
1083 if pushedrevs:
1083 revs = scmutil.revrange(repo, pushedrevs)
1084 revs = scmutil.revrange(repo, pushedrevs)
1084 nodes = [repo[r].node() for r in revs]
1085 nodes = [repo[r].node() for r in revs]
1085 common, any, hds = setdiscovery.findcommonheads(
1086 common, any, hds = setdiscovery.findcommonheads(
1086 ui, repo, remote, ancestorsof=nodes, audit=data
1087 ui, repo, remote, ancestorsof=nodes, audit=data
1087 )
1088 )
1088 return common, hds
1089 return common, hds
1089
1090
1090 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1091 localrevs = opts[b'rev']
1092 localrevs = opts[b'rev']
1092 with util.timedcm('debug-discovery') as t:
1093
1093 common, hds = doit(localrevs, remoterevs)
1094 fm = ui.formatter(b'debugdiscovery', opts)
1095 if fm.strict_format:
1096
1097 @contextlib.contextmanager
1098 def may_capture_output():
1099 ui.pushbuffer()
1100 yield
1101 data[b'output'] = ui.popbuffer()
1102
1103 else:
1104 may_capture_output = util.nullcontextmanager
1105 with may_capture_output():
1106 with util.timedcm('debug-discovery') as t:
1107 common, hds = doit(localrevs, remoterevs)
1094
1108
1095 # compute all statistics
1109 # compute all statistics
1096 heads_common = set(common)
1110 heads_common = set(common)
1097 heads_remote = set(hds)
1111 heads_remote = set(hds)
1098 heads_local = set(repo.heads())
1112 heads_local = set(repo.heads())
1099 # note: they cannot be a local or remote head that is in common and not
1113 # note: they cannot be a local or remote head that is in common and not
1100 # itself a head of common.
1114 # itself a head of common.
1101 heads_common_local = heads_common & heads_local
1115 heads_common_local = heads_common & heads_local
1102 heads_common_remote = heads_common & heads_remote
1116 heads_common_remote = heads_common & heads_remote
1103 heads_common_both = heads_common & heads_remote & heads_local
1117 heads_common_both = heads_common & heads_remote & heads_local
1104
1118
1105 all = repo.revs(b'all()')
1119 all = repo.revs(b'all()')
1106 common = repo.revs(b'::%ln', common)
1120 common = repo.revs(b'::%ln', common)
1107 roots_common = repo.revs(b'roots(::%ld)', common)
1121 roots_common = repo.revs(b'roots(::%ld)', common)
1108 missing = repo.revs(b'not ::%ld', common)
1122 missing = repo.revs(b'not ::%ld', common)
1109 heads_missing = repo.revs(b'heads(%ld)', missing)
1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1110 roots_missing = repo.revs(b'roots(%ld)', missing)
1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1111 assert len(common) + len(missing) == len(all)
1125 assert len(common) + len(missing) == len(all)
1112
1126
1113 initial_undecided = repo.revs(
1127 initial_undecided = repo.revs(
1114 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1115 )
1129 )
1116 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1117 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1118 common_initial_undecided = initial_undecided & common
1132 common_initial_undecided = initial_undecided & common
1119 missing_initial_undecided = initial_undecided & missing
1133 missing_initial_undecided = initial_undecided & missing
1120
1134
1121 data[b'elapsed'] = t.elapsed
1135 data[b'elapsed'] = t.elapsed
1122 data[b'nb-common-heads'] = len(heads_common)
1136 data[b'nb-common-heads'] = len(heads_common)
1123 data[b'nb-common-heads-local'] = len(heads_common_local)
1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1124 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1125 data[b'nb-common-heads-both'] = len(heads_common_both)
1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1126 data[b'nb-common-roots'] = len(roots_common)
1140 data[b'nb-common-roots'] = len(roots_common)
1127 data[b'nb-head-local'] = len(heads_local)
1141 data[b'nb-head-local'] = len(heads_local)
1128 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1129 data[b'nb-head-remote'] = len(heads_remote)
1143 data[b'nb-head-remote'] = len(heads_remote)
1130 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1131 heads_common_remote
1145 heads_common_remote
1132 )
1146 )
1133 data[b'nb-revs'] = len(all)
1147 data[b'nb-revs'] = len(all)
1134 data[b'nb-revs-common'] = len(common)
1148 data[b'nb-revs-common'] = len(common)
1135 data[b'nb-revs-missing'] = len(missing)
1149 data[b'nb-revs-missing'] = len(missing)
1136 data[b'nb-missing-heads'] = len(heads_missing)
1150 data[b'nb-missing-heads'] = len(heads_missing)
1137 data[b'nb-missing-roots'] = len(roots_missing)
1151 data[b'nb-missing-roots'] = len(roots_missing)
1138 data[b'nb-ini_und'] = len(initial_undecided)
1152 data[b'nb-ini_und'] = len(initial_undecided)
1139 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1140 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1141 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1142 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1143
1157
1144 fm = ui.formatter(b'debugdiscovery', opts)
1145 fm.startitem()
1158 fm.startitem()
1146 fm.data(**pycompat.strkwargs(data))
1159 fm.data(**pycompat.strkwargs(data))
1147 # display discovery summary
1160 # display discovery summary
1148 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1149 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1150 fm.plain(b"heads summary:\n")
1163 fm.plain(b"heads summary:\n")
1151 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1152 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1153 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1154 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1155 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1156 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1157 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1158 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1159 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1160 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1161 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1162 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1163 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1164 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1165 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1166 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1167 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1168 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1169 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1170 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1171 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1172 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1173
1186
1174 if ui.verbose:
1187 if ui.verbose:
1175 fm.plain(
1188 fm.plain(
1176 b"common heads: %s\n"
1189 b"common heads: %s\n"
1177 % b" ".join(sorted(short(n) for n in heads_common))
1190 % b" ".join(sorted(short(n) for n in heads_common))
1178 )
1191 )
1179 fm.end()
1192 fm.end()
1180
1193
1181
1194
1182 _chunksize = 4 << 10
1195 _chunksize = 4 << 10
1183
1196
1184
1197
1185 @command(
1198 @command(
1186 b'debugdownload',
1199 b'debugdownload',
1187 [
1200 [
1188 (b'o', b'output', b'', _(b'path')),
1201 (b'o', b'output', b'', _(b'path')),
1189 ],
1202 ],
1190 optionalrepo=True,
1203 optionalrepo=True,
1191 )
1204 )
1192 def debugdownload(ui, repo, url, output=None, **opts):
1205 def debugdownload(ui, repo, url, output=None, **opts):
1193 """download a resource using Mercurial logic and config"""
1206 """download a resource using Mercurial logic and config"""
1194 fh = urlmod.open(ui, url, output)
1207 fh = urlmod.open(ui, url, output)
1195
1208
1196 dest = ui
1209 dest = ui
1197 if output:
1210 if output:
1198 dest = open(output, b"wb", _chunksize)
1211 dest = open(output, b"wb", _chunksize)
1199 try:
1212 try:
1200 data = fh.read(_chunksize)
1213 data = fh.read(_chunksize)
1201 while data:
1214 while data:
1202 dest.write(data)
1215 dest.write(data)
1203 data = fh.read(_chunksize)
1216 data = fh.read(_chunksize)
1204 finally:
1217 finally:
1205 if output:
1218 if output:
1206 dest.close()
1219 dest.close()
1207
1220
1208
1221
1209 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1210 def debugextensions(ui, repo, **opts):
1223 def debugextensions(ui, repo, **opts):
1211 '''show information about active extensions'''
1224 '''show information about active extensions'''
1212 opts = pycompat.byteskwargs(opts)
1225 opts = pycompat.byteskwargs(opts)
1213 exts = extensions.extensions(ui)
1226 exts = extensions.extensions(ui)
1214 hgver = util.version()
1227 hgver = util.version()
1215 fm = ui.formatter(b'debugextensions', opts)
1228 fm = ui.formatter(b'debugextensions', opts)
1216 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1217 isinternal = extensions.ismoduleinternal(extmod)
1230 isinternal = extensions.ismoduleinternal(extmod)
1218 extsource = None
1231 extsource = None
1219
1232
1220 if util.safehasattr(extmod, '__file__'):
1233 if util.safehasattr(extmod, '__file__'):
1221 extsource = pycompat.fsencode(extmod.__file__)
1234 extsource = pycompat.fsencode(extmod.__file__)
1222 elif getattr(sys, 'oxidized', False):
1235 elif getattr(sys, 'oxidized', False):
1223 extsource = pycompat.sysexecutable
1236 extsource = pycompat.sysexecutable
1224 if isinternal:
1237 if isinternal:
1225 exttestedwith = [] # never expose magic string to users
1238 exttestedwith = [] # never expose magic string to users
1226 else:
1239 else:
1227 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1228 extbuglink = getattr(extmod, 'buglink', None)
1241 extbuglink = getattr(extmod, 'buglink', None)
1229
1242
1230 fm.startitem()
1243 fm.startitem()
1231
1244
1232 if ui.quiet or ui.verbose:
1245 if ui.quiet or ui.verbose:
1233 fm.write(b'name', b'%s\n', extname)
1246 fm.write(b'name', b'%s\n', extname)
1234 else:
1247 else:
1235 fm.write(b'name', b'%s', extname)
1248 fm.write(b'name', b'%s', extname)
1236 if isinternal or hgver in exttestedwith:
1249 if isinternal or hgver in exttestedwith:
1237 fm.plain(b'\n')
1250 fm.plain(b'\n')
1238 elif not exttestedwith:
1251 elif not exttestedwith:
1239 fm.plain(_(b' (untested!)\n'))
1252 fm.plain(_(b' (untested!)\n'))
1240 else:
1253 else:
1241 lasttestedversion = exttestedwith[-1]
1254 lasttestedversion = exttestedwith[-1]
1242 fm.plain(b' (%s!)\n' % lasttestedversion)
1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1243
1256
1244 fm.condwrite(
1257 fm.condwrite(
1245 ui.verbose and extsource,
1258 ui.verbose and extsource,
1246 b'source',
1259 b'source',
1247 _(b' location: %s\n'),
1260 _(b' location: %s\n'),
1248 extsource or b"",
1261 extsource or b"",
1249 )
1262 )
1250
1263
1251 if ui.verbose:
1264 if ui.verbose:
1252 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1253 fm.data(bundled=isinternal)
1266 fm.data(bundled=isinternal)
1254
1267
1255 fm.condwrite(
1268 fm.condwrite(
1256 ui.verbose and exttestedwith,
1269 ui.verbose and exttestedwith,
1257 b'testedwith',
1270 b'testedwith',
1258 _(b' tested with: %s\n'),
1271 _(b' tested with: %s\n'),
1259 fm.formatlist(exttestedwith, name=b'ver'),
1272 fm.formatlist(exttestedwith, name=b'ver'),
1260 )
1273 )
1261
1274
1262 fm.condwrite(
1275 fm.condwrite(
1263 ui.verbose and extbuglink,
1276 ui.verbose and extbuglink,
1264 b'buglink',
1277 b'buglink',
1265 _(b' bug reporting: %s\n'),
1278 _(b' bug reporting: %s\n'),
1266 extbuglink or b"",
1279 extbuglink or b"",
1267 )
1280 )
1268
1281
1269 fm.end()
1282 fm.end()
1270
1283
1271
1284
1272 @command(
1285 @command(
1273 b'debugfileset',
1286 b'debugfileset',
1274 [
1287 [
1275 (
1288 (
1276 b'r',
1289 b'r',
1277 b'rev',
1290 b'rev',
1278 b'',
1291 b'',
1279 _(b'apply the filespec on this revision'),
1292 _(b'apply the filespec on this revision'),
1280 _(b'REV'),
1293 _(b'REV'),
1281 ),
1294 ),
1282 (
1295 (
1283 b'',
1296 b'',
1284 b'all-files',
1297 b'all-files',
1285 False,
1298 False,
1286 _(b'test files from all revisions and working directory'),
1299 _(b'test files from all revisions and working directory'),
1287 ),
1300 ),
1288 (
1301 (
1289 b's',
1302 b's',
1290 b'show-matcher',
1303 b'show-matcher',
1291 None,
1304 None,
1292 _(b'print internal representation of matcher'),
1305 _(b'print internal representation of matcher'),
1293 ),
1306 ),
1294 (
1307 (
1295 b'p',
1308 b'p',
1296 b'show-stage',
1309 b'show-stage',
1297 [],
1310 [],
1298 _(b'print parsed tree at the given stage'),
1311 _(b'print parsed tree at the given stage'),
1299 _(b'NAME'),
1312 _(b'NAME'),
1300 ),
1313 ),
1301 ],
1314 ],
1302 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1303 )
1316 )
1304 def debugfileset(ui, repo, expr, **opts):
1317 def debugfileset(ui, repo, expr, **opts):
1305 '''parse and apply a fileset specification'''
1318 '''parse and apply a fileset specification'''
1306 from . import fileset
1319 from . import fileset
1307
1320
1308 fileset.symbols # force import of fileset so we have predicates to optimize
1321 fileset.symbols # force import of fileset so we have predicates to optimize
1309 opts = pycompat.byteskwargs(opts)
1322 opts = pycompat.byteskwargs(opts)
1310 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1311
1324
1312 stages = [
1325 stages = [
1313 (b'parsed', pycompat.identity),
1326 (b'parsed', pycompat.identity),
1314 (b'analyzed', filesetlang.analyze),
1327 (b'analyzed', filesetlang.analyze),
1315 (b'optimized', filesetlang.optimize),
1328 (b'optimized', filesetlang.optimize),
1316 ]
1329 ]
1317 stagenames = {n for n, f in stages}
1330 stagenames = {n for n, f in stages}
1318
1331
1319 showalways = set()
1332 showalways = set()
1320 if ui.verbose and not opts[b'show_stage']:
1333 if ui.verbose and not opts[b'show_stage']:
1321 # show parsed tree by --verbose (deprecated)
1334 # show parsed tree by --verbose (deprecated)
1322 showalways.add(b'parsed')
1335 showalways.add(b'parsed')
1323 if opts[b'show_stage'] == [b'all']:
1336 if opts[b'show_stage'] == [b'all']:
1324 showalways.update(stagenames)
1337 showalways.update(stagenames)
1325 else:
1338 else:
1326 for n in opts[b'show_stage']:
1339 for n in opts[b'show_stage']:
1327 if n not in stagenames:
1340 if n not in stagenames:
1328 raise error.Abort(_(b'invalid stage name: %s') % n)
1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1329 showalways.update(opts[b'show_stage'])
1342 showalways.update(opts[b'show_stage'])
1330
1343
1331 tree = filesetlang.parse(expr)
1344 tree = filesetlang.parse(expr)
1332 for n, f in stages:
1345 for n, f in stages:
1333 tree = f(tree)
1346 tree = f(tree)
1334 if n in showalways:
1347 if n in showalways:
1335 if opts[b'show_stage'] or n != b'parsed':
1348 if opts[b'show_stage'] or n != b'parsed':
1336 ui.write(b"* %s:\n" % n)
1349 ui.write(b"* %s:\n" % n)
1337 ui.write(filesetlang.prettyformat(tree), b"\n")
1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1338
1351
1339 files = set()
1352 files = set()
1340 if opts[b'all_files']:
1353 if opts[b'all_files']:
1341 for r in repo:
1354 for r in repo:
1342 c = repo[r]
1355 c = repo[r]
1343 files.update(c.files())
1356 files.update(c.files())
1344 files.update(c.substate)
1357 files.update(c.substate)
1345 if opts[b'all_files'] or ctx.rev() is None:
1358 if opts[b'all_files'] or ctx.rev() is None:
1346 wctx = repo[None]
1359 wctx = repo[None]
1347 files.update(
1360 files.update(
1348 repo.dirstate.walk(
1361 repo.dirstate.walk(
1349 scmutil.matchall(repo),
1362 scmutil.matchall(repo),
1350 subrepos=list(wctx.substate),
1363 subrepos=list(wctx.substate),
1351 unknown=True,
1364 unknown=True,
1352 ignored=True,
1365 ignored=True,
1353 )
1366 )
1354 )
1367 )
1355 files.update(wctx.substate)
1368 files.update(wctx.substate)
1356 else:
1369 else:
1357 files.update(ctx.files())
1370 files.update(ctx.files())
1358 files.update(ctx.substate)
1371 files.update(ctx.substate)
1359
1372
1360 m = ctx.matchfileset(repo.getcwd(), expr)
1373 m = ctx.matchfileset(repo.getcwd(), expr)
1361 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1362 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1363 for f in sorted(files):
1376 for f in sorted(files):
1364 if not m(f):
1377 if not m(f):
1365 continue
1378 continue
1366 ui.write(b"%s\n" % f)
1379 ui.write(b"%s\n" % f)
1367
1380
1368
1381
1369 @command(b'debugformat', [] + cmdutil.formatteropts)
1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1370 def debugformat(ui, repo, **opts):
1383 def debugformat(ui, repo, **opts):
1371 """display format information about the current repository
1384 """display format information about the current repository
1372
1385
1373 Use --verbose to get extra information about current config value and
1386 Use --verbose to get extra information about current config value and
1374 Mercurial default."""
1387 Mercurial default."""
1375 opts = pycompat.byteskwargs(opts)
1388 opts = pycompat.byteskwargs(opts)
1376 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1377 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1378
1391
1379 def makeformatname(name):
1392 def makeformatname(name):
1380 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1381
1394
1382 fm = ui.formatter(b'debugformat', opts)
1395 fm = ui.formatter(b'debugformat', opts)
1383 if fm.isplain():
1396 if fm.isplain():
1384
1397
1385 def formatvalue(value):
1398 def formatvalue(value):
1386 if util.safehasattr(value, b'startswith'):
1399 if util.safehasattr(value, b'startswith'):
1387 return value
1400 return value
1388 if value:
1401 if value:
1389 return b'yes'
1402 return b'yes'
1390 else:
1403 else:
1391 return b'no'
1404 return b'no'
1392
1405
1393 else:
1406 else:
1394 formatvalue = pycompat.identity
1407 formatvalue = pycompat.identity
1395
1408
1396 fm.plain(b'format-variant')
1409 fm.plain(b'format-variant')
1397 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1398 fm.plain(b' repo')
1411 fm.plain(b' repo')
1399 if ui.verbose:
1412 if ui.verbose:
1400 fm.plain(b' config default')
1413 fm.plain(b' config default')
1401 fm.plain(b'\n')
1414 fm.plain(b'\n')
1402 for fv in upgrade.allformatvariant:
1415 for fv in upgrade.allformatvariant:
1403 fm.startitem()
1416 fm.startitem()
1404 repovalue = fv.fromrepo(repo)
1417 repovalue = fv.fromrepo(repo)
1405 configvalue = fv.fromconfig(repo)
1418 configvalue = fv.fromconfig(repo)
1406
1419
1407 if repovalue != configvalue:
1420 if repovalue != configvalue:
1408 namelabel = b'formatvariant.name.mismatchconfig'
1421 namelabel = b'formatvariant.name.mismatchconfig'
1409 repolabel = b'formatvariant.repo.mismatchconfig'
1422 repolabel = b'formatvariant.repo.mismatchconfig'
1410 elif repovalue != fv.default:
1423 elif repovalue != fv.default:
1411 namelabel = b'formatvariant.name.mismatchdefault'
1424 namelabel = b'formatvariant.name.mismatchdefault'
1412 repolabel = b'formatvariant.repo.mismatchdefault'
1425 repolabel = b'formatvariant.repo.mismatchdefault'
1413 else:
1426 else:
1414 namelabel = b'formatvariant.name.uptodate'
1427 namelabel = b'formatvariant.name.uptodate'
1415 repolabel = b'formatvariant.repo.uptodate'
1428 repolabel = b'formatvariant.repo.uptodate'
1416
1429
1417 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1418 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1419 if fv.default != configvalue:
1432 if fv.default != configvalue:
1420 configlabel = b'formatvariant.config.special'
1433 configlabel = b'formatvariant.config.special'
1421 else:
1434 else:
1422 configlabel = b'formatvariant.config.default'
1435 configlabel = b'formatvariant.config.default'
1423 fm.condwrite(
1436 fm.condwrite(
1424 ui.verbose,
1437 ui.verbose,
1425 b'config',
1438 b'config',
1426 b' %6s',
1439 b' %6s',
1427 formatvalue(configvalue),
1440 formatvalue(configvalue),
1428 label=configlabel,
1441 label=configlabel,
1429 )
1442 )
1430 fm.condwrite(
1443 fm.condwrite(
1431 ui.verbose,
1444 ui.verbose,
1432 b'default',
1445 b'default',
1433 b' %7s',
1446 b' %7s',
1434 formatvalue(fv.default),
1447 formatvalue(fv.default),
1435 label=b'formatvariant.default',
1448 label=b'formatvariant.default',
1436 )
1449 )
1437 fm.plain(b'\n')
1450 fm.plain(b'\n')
1438 fm.end()
1451 fm.end()
1439
1452
1440
1453
1441 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1442 def debugfsinfo(ui, path=b"."):
1455 def debugfsinfo(ui, path=b"."):
1443 """show information detected about current filesystem"""
1456 """show information detected about current filesystem"""
1444 ui.writenoi18n(b'path: %s\n' % path)
1457 ui.writenoi18n(b'path: %s\n' % path)
1445 ui.writenoi18n(
1458 ui.writenoi18n(
1446 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1447 )
1460 )
1448 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1449 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1450 ui.writenoi18n(
1463 ui.writenoi18n(
1451 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1452 )
1465 )
1453 ui.writenoi18n(
1466 ui.writenoi18n(
1454 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1455 )
1468 )
1456 casesensitive = b'(unknown)'
1469 casesensitive = b'(unknown)'
1457 try:
1470 try:
1458 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1459 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1460 except OSError:
1473 except OSError:
1461 pass
1474 pass
1462 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1463
1476
1464
1477
1465 @command(
1478 @command(
1466 b'debuggetbundle',
1479 b'debuggetbundle',
1467 [
1480 [
1468 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1469 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1470 (
1483 (
1471 b't',
1484 b't',
1472 b'type',
1485 b'type',
1473 b'bzip2',
1486 b'bzip2',
1474 _(b'bundle compression type to use'),
1487 _(b'bundle compression type to use'),
1475 _(b'TYPE'),
1488 _(b'TYPE'),
1476 ),
1489 ),
1477 ],
1490 ],
1478 _(b'REPO FILE [-H|-C ID]...'),
1491 _(b'REPO FILE [-H|-C ID]...'),
1479 norepo=True,
1492 norepo=True,
1480 )
1493 )
1481 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1482 """retrieves a bundle from a repo
1495 """retrieves a bundle from a repo
1483
1496
1484 Every ID must be a full-length hex node id string. Saves the bundle to the
1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1485 given file.
1498 given file.
1486 """
1499 """
1487 opts = pycompat.byteskwargs(opts)
1500 opts = pycompat.byteskwargs(opts)
1488 repo = hg.peer(ui, opts, repopath)
1501 repo = hg.peer(ui, opts, repopath)
1489 if not repo.capable(b'getbundle'):
1502 if not repo.capable(b'getbundle'):
1490 raise error.Abort(b"getbundle() not supported by target repository")
1503 raise error.Abort(b"getbundle() not supported by target repository")
1491 args = {}
1504 args = {}
1492 if common:
1505 if common:
1493 args['common'] = [bin(s) for s in common]
1506 args['common'] = [bin(s) for s in common]
1494 if head:
1507 if head:
1495 args['heads'] = [bin(s) for s in head]
1508 args['heads'] = [bin(s) for s in head]
1496 # TODO: get desired bundlecaps from command line.
1509 # TODO: get desired bundlecaps from command line.
1497 args['bundlecaps'] = None
1510 args['bundlecaps'] = None
1498 bundle = repo.getbundle(b'debug', **args)
1511 bundle = repo.getbundle(b'debug', **args)
1499
1512
1500 bundletype = opts.get(b'type', b'bzip2').lower()
1513 bundletype = opts.get(b'type', b'bzip2').lower()
1501 btypes = {
1514 btypes = {
1502 b'none': b'HG10UN',
1515 b'none': b'HG10UN',
1503 b'bzip2': b'HG10BZ',
1516 b'bzip2': b'HG10BZ',
1504 b'gzip': b'HG10GZ',
1517 b'gzip': b'HG10GZ',
1505 b'bundle2': b'HG20',
1518 b'bundle2': b'HG20',
1506 }
1519 }
1507 bundletype = btypes.get(bundletype)
1520 bundletype = btypes.get(bundletype)
1508 if bundletype not in bundle2.bundletypes:
1521 if bundletype not in bundle2.bundletypes:
1509 raise error.Abort(_(b'unknown bundle type specified with --type'))
1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1510 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1511
1524
1512
1525
1513 @command(b'debugignore', [], b'[FILE]')
1526 @command(b'debugignore', [], b'[FILE]')
1514 def debugignore(ui, repo, *files, **opts):
1527 def debugignore(ui, repo, *files, **opts):
1515 """display the combined ignore pattern and information about ignored files
1528 """display the combined ignore pattern and information about ignored files
1516
1529
1517 With no argument display the combined ignore pattern.
1530 With no argument display the combined ignore pattern.
1518
1531
1519 Given space separated file names, shows if the given file is ignored and
1532 Given space separated file names, shows if the given file is ignored and
1520 if so, show the ignore rule (file and line number) that matched it.
1533 if so, show the ignore rule (file and line number) that matched it.
1521 """
1534 """
1522 ignore = repo.dirstate._ignore
1535 ignore = repo.dirstate._ignore
1523 if not files:
1536 if not files:
1524 # Show all the patterns
1537 # Show all the patterns
1525 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1526 else:
1539 else:
1527 m = scmutil.match(repo[None], pats=files)
1540 m = scmutil.match(repo[None], pats=files)
1528 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1529 for f in m.files():
1542 for f in m.files():
1530 nf = util.normpath(f)
1543 nf = util.normpath(f)
1531 ignored = None
1544 ignored = None
1532 ignoredata = None
1545 ignoredata = None
1533 if nf != b'.':
1546 if nf != b'.':
1534 if ignore(nf):
1547 if ignore(nf):
1535 ignored = nf
1548 ignored = nf
1536 ignoredata = repo.dirstate._ignorefileandline(nf)
1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1537 else:
1550 else:
1538 for p in pathutil.finddirs(nf):
1551 for p in pathutil.finddirs(nf):
1539 if ignore(p):
1552 if ignore(p):
1540 ignored = p
1553 ignored = p
1541 ignoredata = repo.dirstate._ignorefileandline(p)
1554 ignoredata = repo.dirstate._ignorefileandline(p)
1542 break
1555 break
1543 if ignored:
1556 if ignored:
1544 if ignored == nf:
1557 if ignored == nf:
1545 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1546 else:
1559 else:
1547 ui.write(
1560 ui.write(
1548 _(
1561 _(
1549 b"%s is ignored because of "
1562 b"%s is ignored because of "
1550 b"containing directory %s\n"
1563 b"containing directory %s\n"
1551 )
1564 )
1552 % (uipathfn(f), ignored)
1565 % (uipathfn(f), ignored)
1553 )
1566 )
1554 ignorefile, lineno, line = ignoredata
1567 ignorefile, lineno, line = ignoredata
1555 ui.write(
1568 ui.write(
1556 _(b"(ignore rule in %s, line %d: '%s')\n")
1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1557 % (ignorefile, lineno, line)
1570 % (ignorefile, lineno, line)
1558 )
1571 )
1559 else:
1572 else:
1560 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1561
1574
1562
1575
1563 @command(
1576 @command(
1564 b'debugindex',
1577 b'debugindex',
1565 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1566 _(b'-c|-m|FILE'),
1579 _(b'-c|-m|FILE'),
1567 )
1580 )
1568 def debugindex(ui, repo, file_=None, **opts):
1581 def debugindex(ui, repo, file_=None, **opts):
1569 """dump index data for a storage primitive"""
1582 """dump index data for a storage primitive"""
1570 opts = pycompat.byteskwargs(opts)
1583 opts = pycompat.byteskwargs(opts)
1571 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1572
1585
1573 if ui.debugflag:
1586 if ui.debugflag:
1574 shortfn = hex
1587 shortfn = hex
1575 else:
1588 else:
1576 shortfn = short
1589 shortfn = short
1577
1590
1578 idlen = 12
1591 idlen = 12
1579 for i in store:
1592 for i in store:
1580 idlen = len(shortfn(store.node(i)))
1593 idlen = len(shortfn(store.node(i)))
1581 break
1594 break
1582
1595
1583 fm = ui.formatter(b'debugindex', opts)
1596 fm = ui.formatter(b'debugindex', opts)
1584 fm.plain(
1597 fm.plain(
1585 b' rev linkrev %s %s p2\n'
1598 b' rev linkrev %s %s p2\n'
1586 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1587 )
1600 )
1588
1601
1589 for rev in store:
1602 for rev in store:
1590 node = store.node(rev)
1603 node = store.node(rev)
1591 parents = store.parents(node)
1604 parents = store.parents(node)
1592
1605
1593 fm.startitem()
1606 fm.startitem()
1594 fm.write(b'rev', b'%6d ', rev)
1607 fm.write(b'rev', b'%6d ', rev)
1595 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1596 fm.write(b'node', b'%s ', shortfn(node))
1609 fm.write(b'node', b'%s ', shortfn(node))
1597 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1598 fm.write(b'p2', b'%s', shortfn(parents[1]))
1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1599 fm.plain(b'\n')
1612 fm.plain(b'\n')
1600
1613
1601 fm.end()
1614 fm.end()
1602
1615
1603
1616
1604 @command(
1617 @command(
1605 b'debugindexdot',
1618 b'debugindexdot',
1606 cmdutil.debugrevlogopts,
1619 cmdutil.debugrevlogopts,
1607 _(b'-c|-m|FILE'),
1620 _(b'-c|-m|FILE'),
1608 optionalrepo=True,
1621 optionalrepo=True,
1609 )
1622 )
1610 def debugindexdot(ui, repo, file_=None, **opts):
1623 def debugindexdot(ui, repo, file_=None, **opts):
1611 """dump an index DAG as a graphviz dot file"""
1624 """dump an index DAG as a graphviz dot file"""
1612 opts = pycompat.byteskwargs(opts)
1625 opts = pycompat.byteskwargs(opts)
1613 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1614 ui.writenoi18n(b"digraph G {\n")
1627 ui.writenoi18n(b"digraph G {\n")
1615 for i in r:
1628 for i in r:
1616 node = r.node(i)
1629 node = r.node(i)
1617 pp = r.parents(node)
1630 pp = r.parents(node)
1618 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1619 if pp[1] != nullid:
1632 if pp[1] != nullid:
1620 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1621 ui.write(b"}\n")
1634 ui.write(b"}\n")
1622
1635
1623
1636
1624 @command(b'debugindexstats', [])
1637 @command(b'debugindexstats', [])
1625 def debugindexstats(ui, repo):
1638 def debugindexstats(ui, repo):
1626 """show stats related to the changelog index"""
1639 """show stats related to the changelog index"""
1627 repo.changelog.shortest(nullid, 1)
1640 repo.changelog.shortest(nullid, 1)
1628 index = repo.changelog.index
1641 index = repo.changelog.index
1629 if not util.safehasattr(index, b'stats'):
1642 if not util.safehasattr(index, b'stats'):
1630 raise error.Abort(_(b'debugindexstats only works with native code'))
1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1631 for k, v in sorted(index.stats().items()):
1644 for k, v in sorted(index.stats().items()):
1632 ui.write(b'%s: %d\n' % (k, v))
1645 ui.write(b'%s: %d\n' % (k, v))
1633
1646
1634
1647
1635 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1636 def debuginstall(ui, **opts):
1649 def debuginstall(ui, **opts):
1637 """test Mercurial installation
1650 """test Mercurial installation
1638
1651
1639 Returns 0 on success.
1652 Returns 0 on success.
1640 """
1653 """
1641 opts = pycompat.byteskwargs(opts)
1654 opts = pycompat.byteskwargs(opts)
1642
1655
1643 problems = 0
1656 problems = 0
1644
1657
1645 fm = ui.formatter(b'debuginstall', opts)
1658 fm = ui.formatter(b'debuginstall', opts)
1646 fm.startitem()
1659 fm.startitem()
1647
1660
1648 # encoding might be unknown or wrong. don't translate these messages.
1661 # encoding might be unknown or wrong. don't translate these messages.
1649 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1650 err = None
1663 err = None
1651 try:
1664 try:
1652 codecs.lookup(pycompat.sysstr(encoding.encoding))
1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1653 except LookupError as inst:
1666 except LookupError as inst:
1654 err = stringutil.forcebytestr(inst)
1667 err = stringutil.forcebytestr(inst)
1655 problems += 1
1668 problems += 1
1656 fm.condwrite(
1669 fm.condwrite(
1657 err,
1670 err,
1658 b'encodingerror',
1671 b'encodingerror',
1659 b" %s\n (check that your locale is properly set)\n",
1672 b" %s\n (check that your locale is properly set)\n",
1660 err,
1673 err,
1661 )
1674 )
1662
1675
1663 # Python
1676 # Python
1664 pythonlib = None
1677 pythonlib = None
1665 if util.safehasattr(os, '__file__'):
1678 if util.safehasattr(os, '__file__'):
1666 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1667 elif getattr(sys, 'oxidized', False):
1680 elif getattr(sys, 'oxidized', False):
1668 pythonlib = pycompat.sysexecutable
1681 pythonlib = pycompat.sysexecutable
1669
1682
1670 fm.write(
1683 fm.write(
1671 b'pythonexe',
1684 b'pythonexe',
1672 _(b"checking Python executable (%s)\n"),
1685 _(b"checking Python executable (%s)\n"),
1673 pycompat.sysexecutable or _(b"unknown"),
1686 pycompat.sysexecutable or _(b"unknown"),
1674 )
1687 )
1675 fm.write(
1688 fm.write(
1676 b'pythonimplementation',
1689 b'pythonimplementation',
1677 _(b"checking Python implementation (%s)\n"),
1690 _(b"checking Python implementation (%s)\n"),
1678 pycompat.sysbytes(platform.python_implementation()),
1691 pycompat.sysbytes(platform.python_implementation()),
1679 )
1692 )
1680 fm.write(
1693 fm.write(
1681 b'pythonver',
1694 b'pythonver',
1682 _(b"checking Python version (%s)\n"),
1695 _(b"checking Python version (%s)\n"),
1683 (b"%d.%d.%d" % sys.version_info[:3]),
1696 (b"%d.%d.%d" % sys.version_info[:3]),
1684 )
1697 )
1685 fm.write(
1698 fm.write(
1686 b'pythonlib',
1699 b'pythonlib',
1687 _(b"checking Python lib (%s)...\n"),
1700 _(b"checking Python lib (%s)...\n"),
1688 pythonlib or _(b"unknown"),
1701 pythonlib or _(b"unknown"),
1689 )
1702 )
1690
1703
1691 try:
1704 try:
1692 from . import rustext
1705 from . import rustext
1693
1706
1694 rustext.__doc__ # trigger lazy import
1707 rustext.__doc__ # trigger lazy import
1695 except ImportError:
1708 except ImportError:
1696 rustext = None
1709 rustext = None
1697
1710
1698 security = set(sslutil.supportedprotocols)
1711 security = set(sslutil.supportedprotocols)
1699 if sslutil.hassni:
1712 if sslutil.hassni:
1700 security.add(b'sni')
1713 security.add(b'sni')
1701
1714
1702 fm.write(
1715 fm.write(
1703 b'pythonsecurity',
1716 b'pythonsecurity',
1704 _(b"checking Python security support (%s)\n"),
1717 _(b"checking Python security support (%s)\n"),
1705 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1706 )
1719 )
1707
1720
1708 # These are warnings, not errors. So don't increment problem count. This
1721 # These are warnings, not errors. So don't increment problem count. This
1709 # may change in the future.
1722 # may change in the future.
1710 if b'tls1.2' not in security:
1723 if b'tls1.2' not in security:
1711 fm.plain(
1724 fm.plain(
1712 _(
1725 _(
1713 b' TLS 1.2 not supported by Python install; '
1726 b' TLS 1.2 not supported by Python install; '
1714 b'network connections lack modern security\n'
1727 b'network connections lack modern security\n'
1715 )
1728 )
1716 )
1729 )
1717 if b'sni' not in security:
1730 if b'sni' not in security:
1718 fm.plain(
1731 fm.plain(
1719 _(
1732 _(
1720 b' SNI not supported by Python install; may have '
1733 b' SNI not supported by Python install; may have '
1721 b'connectivity issues with some servers\n'
1734 b'connectivity issues with some servers\n'
1722 )
1735 )
1723 )
1736 )
1724
1737
1725 fm.plain(
1738 fm.plain(
1726 _(
1739 _(
1727 b"checking Rust extensions (%s)\n"
1740 b"checking Rust extensions (%s)\n"
1728 % (b'missing' if rustext is None else b'installed')
1741 % (b'missing' if rustext is None else b'installed')
1729 ),
1742 ),
1730 )
1743 )
1731
1744
1732 # TODO print CA cert info
1745 # TODO print CA cert info
1733
1746
1734 # hg version
1747 # hg version
1735 hgver = util.version()
1748 hgver = util.version()
1736 fm.write(
1749 fm.write(
1737 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1738 )
1751 )
1739 fm.write(
1752 fm.write(
1740 b'hgverextra',
1753 b'hgverextra',
1741 _(b"checking Mercurial custom build (%s)\n"),
1754 _(b"checking Mercurial custom build (%s)\n"),
1742 b'+'.join(hgver.split(b'+')[1:]),
1755 b'+'.join(hgver.split(b'+')[1:]),
1743 )
1756 )
1744
1757
1745 # compiled modules
1758 # compiled modules
1746 hgmodules = None
1759 hgmodules = None
1747 if util.safehasattr(sys.modules[__name__], '__file__'):
1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1748 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1749 elif getattr(sys, 'oxidized', False):
1762 elif getattr(sys, 'oxidized', False):
1750 hgmodules = pycompat.sysexecutable
1763 hgmodules = pycompat.sysexecutable
1751
1764
1752 fm.write(
1765 fm.write(
1753 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1754 )
1767 )
1755 fm.write(
1768 fm.write(
1756 b'hgmodules',
1769 b'hgmodules',
1757 _(b"checking installed modules (%s)...\n"),
1770 _(b"checking installed modules (%s)...\n"),
1758 hgmodules or _(b"unknown"),
1771 hgmodules or _(b"unknown"),
1759 )
1772 )
1760
1773
1761 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1762 rustext = rustandc # for now, that's the only case
1775 rustext = rustandc # for now, that's the only case
1763 cext = policy.policy in (b'c', b'allow') or rustandc
1776 cext = policy.policy in (b'c', b'allow') or rustandc
1764 nopure = cext or rustext
1777 nopure = cext or rustext
1765 if nopure:
1778 if nopure:
1766 err = None
1779 err = None
1767 try:
1780 try:
1768 if cext:
1781 if cext:
1769 from .cext import ( # pytype: disable=import-error
1782 from .cext import ( # pytype: disable=import-error
1770 base85,
1783 base85,
1771 bdiff,
1784 bdiff,
1772 mpatch,
1785 mpatch,
1773 osutil,
1786 osutil,
1774 )
1787 )
1775
1788
1776 # quiet pyflakes
1789 # quiet pyflakes
1777 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1778 if rustext:
1791 if rustext:
1779 from .rustext import ( # pytype: disable=import-error
1792 from .rustext import ( # pytype: disable=import-error
1780 ancestor,
1793 ancestor,
1781 dirstate,
1794 dirstate,
1782 )
1795 )
1783
1796
1784 dir(ancestor), dir(dirstate) # quiet pyflakes
1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1785 except Exception as inst:
1798 except Exception as inst:
1786 err = stringutil.forcebytestr(inst)
1799 err = stringutil.forcebytestr(inst)
1787 problems += 1
1800 problems += 1
1788 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1789
1802
1790 compengines = util.compengines._engines.values()
1803 compengines = util.compengines._engines.values()
1791 fm.write(
1804 fm.write(
1792 b'compengines',
1805 b'compengines',
1793 _(b'checking registered compression engines (%s)\n'),
1806 _(b'checking registered compression engines (%s)\n'),
1794 fm.formatlist(
1807 fm.formatlist(
1795 sorted(e.name() for e in compengines),
1808 sorted(e.name() for e in compengines),
1796 name=b'compengine',
1809 name=b'compengine',
1797 fmt=b'%s',
1810 fmt=b'%s',
1798 sep=b', ',
1811 sep=b', ',
1799 ),
1812 ),
1800 )
1813 )
1801 fm.write(
1814 fm.write(
1802 b'compenginesavail',
1815 b'compenginesavail',
1803 _(b'checking available compression engines (%s)\n'),
1816 _(b'checking available compression engines (%s)\n'),
1804 fm.formatlist(
1817 fm.formatlist(
1805 sorted(e.name() for e in compengines if e.available()),
1818 sorted(e.name() for e in compengines if e.available()),
1806 name=b'compengine',
1819 name=b'compengine',
1807 fmt=b'%s',
1820 fmt=b'%s',
1808 sep=b', ',
1821 sep=b', ',
1809 ),
1822 ),
1810 )
1823 )
1811 wirecompengines = compression.compengines.supportedwireengines(
1824 wirecompengines = compression.compengines.supportedwireengines(
1812 compression.SERVERROLE
1825 compression.SERVERROLE
1813 )
1826 )
1814 fm.write(
1827 fm.write(
1815 b'compenginesserver',
1828 b'compenginesserver',
1816 _(
1829 _(
1817 b'checking available compression engines '
1830 b'checking available compression engines '
1818 b'for wire protocol (%s)\n'
1831 b'for wire protocol (%s)\n'
1819 ),
1832 ),
1820 fm.formatlist(
1833 fm.formatlist(
1821 [e.name() for e in wirecompengines if e.wireprotosupport()],
1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1822 name=b'compengine',
1835 name=b'compengine',
1823 fmt=b'%s',
1836 fmt=b'%s',
1824 sep=b', ',
1837 sep=b', ',
1825 ),
1838 ),
1826 )
1839 )
1827 re2 = b'missing'
1840 re2 = b'missing'
1828 if util._re2:
1841 if util._re2:
1829 re2 = b'available'
1842 re2 = b'available'
1830 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1831 fm.data(re2=bool(util._re2))
1844 fm.data(re2=bool(util._re2))
1832
1845
1833 # templates
1846 # templates
1834 p = templater.templatedir()
1847 p = templater.templatedir()
1835 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1836 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1837 if p:
1850 if p:
1838 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1839 if m:
1852 if m:
1840 # template found, check if it is working
1853 # template found, check if it is working
1841 err = None
1854 err = None
1842 try:
1855 try:
1843 templater.templater.frommapfile(m)
1856 templater.templater.frommapfile(m)
1844 except Exception as inst:
1857 except Exception as inst:
1845 err = stringutil.forcebytestr(inst)
1858 err = stringutil.forcebytestr(inst)
1846 p = None
1859 p = None
1847 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1848 else:
1861 else:
1849 p = None
1862 p = None
1850 fm.condwrite(
1863 fm.condwrite(
1851 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1852 )
1865 )
1853 fm.condwrite(
1866 fm.condwrite(
1854 not m,
1867 not m,
1855 b'defaulttemplatenotfound',
1868 b'defaulttemplatenotfound',
1856 _(b" template '%s' not found\n"),
1869 _(b" template '%s' not found\n"),
1857 b"default",
1870 b"default",
1858 )
1871 )
1859 if not p:
1872 if not p:
1860 problems += 1
1873 problems += 1
1861 fm.condwrite(
1874 fm.condwrite(
1862 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1863 )
1876 )
1864
1877
1865 # editor
1878 # editor
1866 editor = ui.geteditor()
1879 editor = ui.geteditor()
1867 editor = util.expandpath(editor)
1880 editor = util.expandpath(editor)
1868 editorbin = procutil.shellsplit(editor)[0]
1881 editorbin = procutil.shellsplit(editor)[0]
1869 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1870 cmdpath = procutil.findexe(editorbin)
1883 cmdpath = procutil.findexe(editorbin)
1871 fm.condwrite(
1884 fm.condwrite(
1872 not cmdpath and editor == b'vi',
1885 not cmdpath and editor == b'vi',
1873 b'vinotfound',
1886 b'vinotfound',
1874 _(
1887 _(
1875 b" No commit editor set and can't find %s in PATH\n"
1888 b" No commit editor set and can't find %s in PATH\n"
1876 b" (specify a commit editor in your configuration"
1889 b" (specify a commit editor in your configuration"
1877 b" file)\n"
1890 b" file)\n"
1878 ),
1891 ),
1879 not cmdpath and editor == b'vi' and editorbin,
1892 not cmdpath and editor == b'vi' and editorbin,
1880 )
1893 )
1881 fm.condwrite(
1894 fm.condwrite(
1882 not cmdpath and editor != b'vi',
1895 not cmdpath and editor != b'vi',
1883 b'editornotfound',
1896 b'editornotfound',
1884 _(
1897 _(
1885 b" Can't find editor '%s' in PATH\n"
1898 b" Can't find editor '%s' in PATH\n"
1886 b" (specify a commit editor in your configuration"
1899 b" (specify a commit editor in your configuration"
1887 b" file)\n"
1900 b" file)\n"
1888 ),
1901 ),
1889 not cmdpath and editorbin,
1902 not cmdpath and editorbin,
1890 )
1903 )
1891 if not cmdpath and editor != b'vi':
1904 if not cmdpath and editor != b'vi':
1892 problems += 1
1905 problems += 1
1893
1906
1894 # check username
1907 # check username
1895 username = None
1908 username = None
1896 err = None
1909 err = None
1897 try:
1910 try:
1898 username = ui.username()
1911 username = ui.username()
1899 except error.Abort as e:
1912 except error.Abort as e:
1900 err = e.message
1913 err = e.message
1901 problems += 1
1914 problems += 1
1902
1915
1903 fm.condwrite(
1916 fm.condwrite(
1904 username, b'username', _(b"checking username (%s)\n"), username
1917 username, b'username', _(b"checking username (%s)\n"), username
1905 )
1918 )
1906 fm.condwrite(
1919 fm.condwrite(
1907 err,
1920 err,
1908 b'usernameerror',
1921 b'usernameerror',
1909 _(
1922 _(
1910 b"checking username...\n %s\n"
1923 b"checking username...\n %s\n"
1911 b" (specify a username in your configuration file)\n"
1924 b" (specify a username in your configuration file)\n"
1912 ),
1925 ),
1913 err,
1926 err,
1914 )
1927 )
1915
1928
1916 for name, mod in extensions.extensions():
1929 for name, mod in extensions.extensions():
1917 handler = getattr(mod, 'debuginstall', None)
1930 handler = getattr(mod, 'debuginstall', None)
1918 if handler is not None:
1931 if handler is not None:
1919 problems += handler(ui, fm)
1932 problems += handler(ui, fm)
1920
1933
1921 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1922 if not problems:
1935 if not problems:
1923 fm.data(problems=problems)
1936 fm.data(problems=problems)
1924 fm.condwrite(
1937 fm.condwrite(
1925 problems,
1938 problems,
1926 b'problems',
1939 b'problems',
1927 _(b"%d problems detected, please check your install!\n"),
1940 _(b"%d problems detected, please check your install!\n"),
1928 problems,
1941 problems,
1929 )
1942 )
1930 fm.end()
1943 fm.end()
1931
1944
1932 return problems
1945 return problems
1933
1946
1934
1947
1935 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1936 def debugknown(ui, repopath, *ids, **opts):
1949 def debugknown(ui, repopath, *ids, **opts):
1937 """test whether node ids are known to a repo
1950 """test whether node ids are known to a repo
1938
1951
1939 Every ID must be a full-length hex node id string. Returns a list of 0s
1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1940 and 1s indicating unknown/known.
1953 and 1s indicating unknown/known.
1941 """
1954 """
1942 opts = pycompat.byteskwargs(opts)
1955 opts = pycompat.byteskwargs(opts)
1943 repo = hg.peer(ui, opts, repopath)
1956 repo = hg.peer(ui, opts, repopath)
1944 if not repo.capable(b'known'):
1957 if not repo.capable(b'known'):
1945 raise error.Abort(b"known() not supported by target repository")
1958 raise error.Abort(b"known() not supported by target repository")
1946 flags = repo.known([bin(s) for s in ids])
1959 flags = repo.known([bin(s) for s in ids])
1947 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1948
1961
1949
1962
1950 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1951 def debuglabelcomplete(ui, repo, *args):
1964 def debuglabelcomplete(ui, repo, *args):
1952 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1953 debugnamecomplete(ui, repo, *args)
1966 debugnamecomplete(ui, repo, *args)
1954
1967
1955
1968
1956 @command(
1969 @command(
1957 b'debuglocks',
1970 b'debuglocks',
1958 [
1971 [
1959 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1960 (
1973 (
1961 b'W',
1974 b'W',
1962 b'force-free-wlock',
1975 b'force-free-wlock',
1963 None,
1976 None,
1964 _(b'free the working state lock (DANGEROUS)'),
1977 _(b'free the working state lock (DANGEROUS)'),
1965 ),
1978 ),
1966 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1967 (
1980 (
1968 b'S',
1981 b'S',
1969 b'set-wlock',
1982 b'set-wlock',
1970 None,
1983 None,
1971 _(b'set the working state lock until stopped'),
1984 _(b'set the working state lock until stopped'),
1972 ),
1985 ),
1973 ],
1986 ],
1974 _(b'[OPTION]...'),
1987 _(b'[OPTION]...'),
1975 )
1988 )
1976 def debuglocks(ui, repo, **opts):
1989 def debuglocks(ui, repo, **opts):
1977 """show or modify state of locks
1990 """show or modify state of locks
1978
1991
1979 By default, this command will show which locks are held. This
1992 By default, this command will show which locks are held. This
1980 includes the user and process holding the lock, the amount of time
1993 includes the user and process holding the lock, the amount of time
1981 the lock has been held, and the machine name where the process is
1994 the lock has been held, and the machine name where the process is
1982 running if it's not local.
1995 running if it's not local.
1983
1996
1984 Locks protect the integrity of Mercurial's data, so should be
1997 Locks protect the integrity of Mercurial's data, so should be
1985 treated with care. System crashes or other interruptions may cause
1998 treated with care. System crashes or other interruptions may cause
1986 locks to not be properly released, though Mercurial will usually
1999 locks to not be properly released, though Mercurial will usually
1987 detect and remove such stale locks automatically.
2000 detect and remove such stale locks automatically.
1988
2001
1989 However, detecting stale locks may not always be possible (for
2002 However, detecting stale locks may not always be possible (for
1990 instance, on a shared filesystem). Removing locks may also be
2003 instance, on a shared filesystem). Removing locks may also be
1991 blocked by filesystem permissions.
2004 blocked by filesystem permissions.
1992
2005
1993 Setting a lock will prevent other commands from changing the data.
2006 Setting a lock will prevent other commands from changing the data.
1994 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1995 The set locks are removed when the command exits.
2008 The set locks are removed when the command exits.
1996
2009
1997 Returns 0 if no locks are held.
2010 Returns 0 if no locks are held.
1998
2011
1999 """
2012 """
2000
2013
2001 if opts.get('force_free_lock'):
2014 if opts.get('force_free_lock'):
2002 repo.svfs.unlink(b'lock')
2015 repo.svfs.unlink(b'lock')
2003 if opts.get('force_free_wlock'):
2016 if opts.get('force_free_wlock'):
2004 repo.vfs.unlink(b'wlock')
2017 repo.vfs.unlink(b'wlock')
2005 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2006 return 0
2019 return 0
2007
2020
2008 locks = []
2021 locks = []
2009 try:
2022 try:
2010 if opts.get('set_wlock'):
2023 if opts.get('set_wlock'):
2011 try:
2024 try:
2012 locks.append(repo.wlock(False))
2025 locks.append(repo.wlock(False))
2013 except error.LockHeld:
2026 except error.LockHeld:
2014 raise error.Abort(_(b'wlock is already held'))
2027 raise error.Abort(_(b'wlock is already held'))
2015 if opts.get('set_lock'):
2028 if opts.get('set_lock'):
2016 try:
2029 try:
2017 locks.append(repo.lock(False))
2030 locks.append(repo.lock(False))
2018 except error.LockHeld:
2031 except error.LockHeld:
2019 raise error.Abort(_(b'lock is already held'))
2032 raise error.Abort(_(b'lock is already held'))
2020 if len(locks):
2033 if len(locks):
2021 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2022 return 0
2035 return 0
2023 finally:
2036 finally:
2024 release(*locks)
2037 release(*locks)
2025
2038
2026 now = time.time()
2039 now = time.time()
2027 held = 0
2040 held = 0
2028
2041
2029 def report(vfs, name, method):
2042 def report(vfs, name, method):
2030 # this causes stale locks to get reaped for more accurate reporting
2043 # this causes stale locks to get reaped for more accurate reporting
2031 try:
2044 try:
2032 l = method(False)
2045 l = method(False)
2033 except error.LockHeld:
2046 except error.LockHeld:
2034 l = None
2047 l = None
2035
2048
2036 if l:
2049 if l:
2037 l.release()
2050 l.release()
2038 else:
2051 else:
2039 try:
2052 try:
2040 st = vfs.lstat(name)
2053 st = vfs.lstat(name)
2041 age = now - st[stat.ST_MTIME]
2054 age = now - st[stat.ST_MTIME]
2042 user = util.username(st.st_uid)
2055 user = util.username(st.st_uid)
2043 locker = vfs.readlock(name)
2056 locker = vfs.readlock(name)
2044 if b":" in locker:
2057 if b":" in locker:
2045 host, pid = locker.split(b':')
2058 host, pid = locker.split(b':')
2046 if host == socket.gethostname():
2059 if host == socket.gethostname():
2047 locker = b'user %s, process %s' % (user or b'None', pid)
2060 locker = b'user %s, process %s' % (user or b'None', pid)
2048 else:
2061 else:
2049 locker = b'user %s, process %s, host %s' % (
2062 locker = b'user %s, process %s, host %s' % (
2050 user or b'None',
2063 user or b'None',
2051 pid,
2064 pid,
2052 host,
2065 host,
2053 )
2066 )
2054 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2055 return 1
2068 return 1
2056 except OSError as e:
2069 except OSError as e:
2057 if e.errno != errno.ENOENT:
2070 if e.errno != errno.ENOENT:
2058 raise
2071 raise
2059
2072
2060 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2061 return 0
2074 return 0
2062
2075
2063 held += report(repo.svfs, b"lock", repo.lock)
2076 held += report(repo.svfs, b"lock", repo.lock)
2064 held += report(repo.vfs, b"wlock", repo.wlock)
2077 held += report(repo.vfs, b"wlock", repo.wlock)
2065
2078
2066 return held
2079 return held
2067
2080
2068
2081
2069 @command(
2082 @command(
2070 b'debugmanifestfulltextcache',
2083 b'debugmanifestfulltextcache',
2071 [
2084 [
2072 (b'', b'clear', False, _(b'clear the cache')),
2085 (b'', b'clear', False, _(b'clear the cache')),
2073 (
2086 (
2074 b'a',
2087 b'a',
2075 b'add',
2088 b'add',
2076 [],
2089 [],
2077 _(b'add the given manifest nodes to the cache'),
2090 _(b'add the given manifest nodes to the cache'),
2078 _(b'NODE'),
2091 _(b'NODE'),
2079 ),
2092 ),
2080 ],
2093 ],
2081 b'',
2094 b'',
2082 )
2095 )
2083 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2084 """show, clear or amend the contents of the manifest fulltext cache"""
2097 """show, clear or amend the contents of the manifest fulltext cache"""
2085
2098
2086 def getcache():
2099 def getcache():
2087 r = repo.manifestlog.getstorage(b'')
2100 r = repo.manifestlog.getstorage(b'')
2088 try:
2101 try:
2089 return r._fulltextcache
2102 return r._fulltextcache
2090 except AttributeError:
2103 except AttributeError:
2091 msg = _(
2104 msg = _(
2092 b"Current revlog implementation doesn't appear to have a "
2105 b"Current revlog implementation doesn't appear to have a "
2093 b"manifest fulltext cache\n"
2106 b"manifest fulltext cache\n"
2094 )
2107 )
2095 raise error.Abort(msg)
2108 raise error.Abort(msg)
2096
2109
2097 if opts.get('clear'):
2110 if opts.get('clear'):
2098 with repo.wlock():
2111 with repo.wlock():
2099 cache = getcache()
2112 cache = getcache()
2100 cache.clear(clear_persisted_data=True)
2113 cache.clear(clear_persisted_data=True)
2101 return
2114 return
2102
2115
2103 if add:
2116 if add:
2104 with repo.wlock():
2117 with repo.wlock():
2105 m = repo.manifestlog
2118 m = repo.manifestlog
2106 store = m.getstorage(b'')
2119 store = m.getstorage(b'')
2107 for n in add:
2120 for n in add:
2108 try:
2121 try:
2109 manifest = m[store.lookup(n)]
2122 manifest = m[store.lookup(n)]
2110 except error.LookupError as e:
2123 except error.LookupError as e:
2111 raise error.Abort(e, hint=b"Check your manifest node id")
2124 raise error.Abort(e, hint=b"Check your manifest node id")
2112 manifest.read() # stores revisision in cache too
2125 manifest.read() # stores revisision in cache too
2113 return
2126 return
2114
2127
2115 cache = getcache()
2128 cache = getcache()
2116 if not len(cache):
2129 if not len(cache):
2117 ui.write(_(b'cache empty\n'))
2130 ui.write(_(b'cache empty\n'))
2118 else:
2131 else:
2119 ui.write(
2132 ui.write(
2120 _(
2133 _(
2121 b'cache contains %d manifest entries, in order of most to '
2134 b'cache contains %d manifest entries, in order of most to '
2122 b'least recent:\n'
2135 b'least recent:\n'
2123 )
2136 )
2124 % (len(cache),)
2137 % (len(cache),)
2125 )
2138 )
2126 totalsize = 0
2139 totalsize = 0
2127 for nodeid in cache:
2140 for nodeid in cache:
2128 # Use cache.get to not update the LRU order
2141 # Use cache.get to not update the LRU order
2129 data = cache.peek(nodeid)
2142 data = cache.peek(nodeid)
2130 size = len(data)
2143 size = len(data)
2131 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2144 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2132 ui.write(
2145 ui.write(
2133 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2146 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2134 )
2147 )
2135 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2148 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2136 ui.write(
2149 ui.write(
2137 _(b'total cache data size %s, on-disk %s\n')
2150 _(b'total cache data size %s, on-disk %s\n')
2138 % (util.bytecount(totalsize), util.bytecount(ondisk))
2151 % (util.bytecount(totalsize), util.bytecount(ondisk))
2139 )
2152 )
2140
2153
2141
2154
2142 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2155 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2143 def debugmergestate(ui, repo, *args, **opts):
2156 def debugmergestate(ui, repo, *args, **opts):
2144 """print merge state
2157 """print merge state
2145
2158
2146 Use --verbose to print out information about whether v1 or v2 merge state
2159 Use --verbose to print out information about whether v1 or v2 merge state
2147 was chosen."""
2160 was chosen."""
2148
2161
2149 if ui.verbose:
2162 if ui.verbose:
2150 ms = mergestatemod.mergestate(repo)
2163 ms = mergestatemod.mergestate(repo)
2151
2164
2152 # sort so that reasonable information is on top
2165 # sort so that reasonable information is on top
2153 v1records = ms._readrecordsv1()
2166 v1records = ms._readrecordsv1()
2154 v2records = ms._readrecordsv2()
2167 v2records = ms._readrecordsv2()
2155
2168
2156 if not v1records and not v2records:
2169 if not v1records and not v2records:
2157 pass
2170 pass
2158 elif not v2records:
2171 elif not v2records:
2159 ui.writenoi18n(b'no version 2 merge state\n')
2172 ui.writenoi18n(b'no version 2 merge state\n')
2160 elif ms._v1v2match(v1records, v2records):
2173 elif ms._v1v2match(v1records, v2records):
2161 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2174 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2162 else:
2175 else:
2163 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2176 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2164
2177
2165 opts = pycompat.byteskwargs(opts)
2178 opts = pycompat.byteskwargs(opts)
2166 if not opts[b'template']:
2179 if not opts[b'template']:
2167 opts[b'template'] = (
2180 opts[b'template'] = (
2168 b'{if(commits, "", "no merge state found\n")}'
2181 b'{if(commits, "", "no merge state found\n")}'
2169 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2182 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2170 b'{files % "file: {path} (state \\"{state}\\")\n'
2183 b'{files % "file: {path} (state \\"{state}\\")\n'
2171 b'{if(local_path, "'
2184 b'{if(local_path, "'
2172 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2185 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2173 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2186 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2174 b' other path: {other_path} (node {other_node})\n'
2187 b' other path: {other_path} (node {other_node})\n'
2175 b'")}'
2188 b'")}'
2176 b'{if(rename_side, "'
2189 b'{if(rename_side, "'
2177 b' rename side: {rename_side}\n'
2190 b' rename side: {rename_side}\n'
2178 b' renamed path: {renamed_path}\n'
2191 b' renamed path: {renamed_path}\n'
2179 b'")}'
2192 b'")}'
2180 b'{extras % " extra: {key} = {value}\n"}'
2193 b'{extras % " extra: {key} = {value}\n"}'
2181 b'"}'
2194 b'"}'
2182 b'{extras % "extra: {file} ({key} = {value})\n"}'
2195 b'{extras % "extra: {file} ({key} = {value})\n"}'
2183 )
2196 )
2184
2197
2185 ms = mergestatemod.mergestate.read(repo)
2198 ms = mergestatemod.mergestate.read(repo)
2186
2199
2187 fm = ui.formatter(b'debugmergestate', opts)
2200 fm = ui.formatter(b'debugmergestate', opts)
2188 fm.startitem()
2201 fm.startitem()
2189
2202
2190 fm_commits = fm.nested(b'commits')
2203 fm_commits = fm.nested(b'commits')
2191 if ms.active():
2204 if ms.active():
2192 for name, node, label_index in (
2205 for name, node, label_index in (
2193 (b'local', ms.local, 0),
2206 (b'local', ms.local, 0),
2194 (b'other', ms.other, 1),
2207 (b'other', ms.other, 1),
2195 ):
2208 ):
2196 fm_commits.startitem()
2209 fm_commits.startitem()
2197 fm_commits.data(name=name)
2210 fm_commits.data(name=name)
2198 fm_commits.data(node=hex(node))
2211 fm_commits.data(node=hex(node))
2199 if ms._labels and len(ms._labels) > label_index:
2212 if ms._labels and len(ms._labels) > label_index:
2200 fm_commits.data(label=ms._labels[label_index])
2213 fm_commits.data(label=ms._labels[label_index])
2201 fm_commits.end()
2214 fm_commits.end()
2202
2215
2203 fm_files = fm.nested(b'files')
2216 fm_files = fm.nested(b'files')
2204 if ms.active():
2217 if ms.active():
2205 for f in ms:
2218 for f in ms:
2206 fm_files.startitem()
2219 fm_files.startitem()
2207 fm_files.data(path=f)
2220 fm_files.data(path=f)
2208 state = ms._state[f]
2221 state = ms._state[f]
2209 fm_files.data(state=state[0])
2222 fm_files.data(state=state[0])
2210 if state[0] in (
2223 if state[0] in (
2211 mergestatemod.MERGE_RECORD_UNRESOLVED,
2224 mergestatemod.MERGE_RECORD_UNRESOLVED,
2212 mergestatemod.MERGE_RECORD_RESOLVED,
2225 mergestatemod.MERGE_RECORD_RESOLVED,
2213 ):
2226 ):
2214 fm_files.data(local_key=state[1])
2227 fm_files.data(local_key=state[1])
2215 fm_files.data(local_path=state[2])
2228 fm_files.data(local_path=state[2])
2216 fm_files.data(ancestor_path=state[3])
2229 fm_files.data(ancestor_path=state[3])
2217 fm_files.data(ancestor_node=state[4])
2230 fm_files.data(ancestor_node=state[4])
2218 fm_files.data(other_path=state[5])
2231 fm_files.data(other_path=state[5])
2219 fm_files.data(other_node=state[6])
2232 fm_files.data(other_node=state[6])
2220 fm_files.data(local_flags=state[7])
2233 fm_files.data(local_flags=state[7])
2221 elif state[0] in (
2234 elif state[0] in (
2222 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2235 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2223 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2236 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2224 ):
2237 ):
2225 fm_files.data(renamed_path=state[1])
2238 fm_files.data(renamed_path=state[1])
2226 fm_files.data(rename_side=state[2])
2239 fm_files.data(rename_side=state[2])
2227 fm_extras = fm_files.nested(b'extras')
2240 fm_extras = fm_files.nested(b'extras')
2228 for k, v in sorted(ms.extras(f).items()):
2241 for k, v in sorted(ms.extras(f).items()):
2229 fm_extras.startitem()
2242 fm_extras.startitem()
2230 fm_extras.data(key=k)
2243 fm_extras.data(key=k)
2231 fm_extras.data(value=v)
2244 fm_extras.data(value=v)
2232 fm_extras.end()
2245 fm_extras.end()
2233
2246
2234 fm_files.end()
2247 fm_files.end()
2235
2248
2236 fm_extras = fm.nested(b'extras')
2249 fm_extras = fm.nested(b'extras')
2237 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2250 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2238 if f in ms:
2251 if f in ms:
2239 # If file is in mergestate, we have already processed it's extras
2252 # If file is in mergestate, we have already processed it's extras
2240 continue
2253 continue
2241 for k, v in pycompat.iteritems(d):
2254 for k, v in pycompat.iteritems(d):
2242 fm_extras.startitem()
2255 fm_extras.startitem()
2243 fm_extras.data(file=f)
2256 fm_extras.data(file=f)
2244 fm_extras.data(key=k)
2257 fm_extras.data(key=k)
2245 fm_extras.data(value=v)
2258 fm_extras.data(value=v)
2246 fm_extras.end()
2259 fm_extras.end()
2247
2260
2248 fm.end()
2261 fm.end()
2249
2262
2250
2263
2251 @command(b'debugnamecomplete', [], _(b'NAME...'))
2264 @command(b'debugnamecomplete', [], _(b'NAME...'))
2252 def debugnamecomplete(ui, repo, *args):
2265 def debugnamecomplete(ui, repo, *args):
2253 '''complete "names" - tags, open branch names, bookmark names'''
2266 '''complete "names" - tags, open branch names, bookmark names'''
2254
2267
2255 names = set()
2268 names = set()
2256 # since we previously only listed open branches, we will handle that
2269 # since we previously only listed open branches, we will handle that
2257 # specially (after this for loop)
2270 # specially (after this for loop)
2258 for name, ns in pycompat.iteritems(repo.names):
2271 for name, ns in pycompat.iteritems(repo.names):
2259 if name != b'branches':
2272 if name != b'branches':
2260 names.update(ns.listnames(repo))
2273 names.update(ns.listnames(repo))
2261 names.update(
2274 names.update(
2262 tag
2275 tag
2263 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2276 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2264 if not closed
2277 if not closed
2265 )
2278 )
2266 completions = set()
2279 completions = set()
2267 if not args:
2280 if not args:
2268 args = [b'']
2281 args = [b'']
2269 for a in args:
2282 for a in args:
2270 completions.update(n for n in names if n.startswith(a))
2283 completions.update(n for n in names if n.startswith(a))
2271 ui.write(b'\n'.join(sorted(completions)))
2284 ui.write(b'\n'.join(sorted(completions)))
2272 ui.write(b'\n')
2285 ui.write(b'\n')
2273
2286
2274
2287
2275 @command(
2288 @command(
2276 b'debugnodemap',
2289 b'debugnodemap',
2277 [
2290 [
2278 (
2291 (
2279 b'',
2292 b'',
2280 b'dump-new',
2293 b'dump-new',
2281 False,
2294 False,
2282 _(b'write a (new) persistent binary nodemap on stdout'),
2295 _(b'write a (new) persistent binary nodemap on stdout'),
2283 ),
2296 ),
2284 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2297 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2285 (
2298 (
2286 b'',
2299 b'',
2287 b'check',
2300 b'check',
2288 False,
2301 False,
2289 _(b'check that the data on disk data are correct.'),
2302 _(b'check that the data on disk data are correct.'),
2290 ),
2303 ),
2291 (
2304 (
2292 b'',
2305 b'',
2293 b'metadata',
2306 b'metadata',
2294 False,
2307 False,
2295 _(b'display the on disk meta data for the nodemap'),
2308 _(b'display the on disk meta data for the nodemap'),
2296 ),
2309 ),
2297 ],
2310 ],
2298 )
2311 )
2299 def debugnodemap(ui, repo, **opts):
2312 def debugnodemap(ui, repo, **opts):
2300 """write and inspect on disk nodemap"""
2313 """write and inspect on disk nodemap"""
2301 if opts['dump_new']:
2314 if opts['dump_new']:
2302 unfi = repo.unfiltered()
2315 unfi = repo.unfiltered()
2303 cl = unfi.changelog
2316 cl = unfi.changelog
2304 if util.safehasattr(cl.index, "nodemap_data_all"):
2317 if util.safehasattr(cl.index, "nodemap_data_all"):
2305 data = cl.index.nodemap_data_all()
2318 data = cl.index.nodemap_data_all()
2306 else:
2319 else:
2307 data = nodemap.persistent_data(cl.index)
2320 data = nodemap.persistent_data(cl.index)
2308 ui.write(data)
2321 ui.write(data)
2309 elif opts['dump_disk']:
2322 elif opts['dump_disk']:
2310 unfi = repo.unfiltered()
2323 unfi = repo.unfiltered()
2311 cl = unfi.changelog
2324 cl = unfi.changelog
2312 nm_data = nodemap.persisted_data(cl)
2325 nm_data = nodemap.persisted_data(cl)
2313 if nm_data is not None:
2326 if nm_data is not None:
2314 docket, data = nm_data
2327 docket, data = nm_data
2315 ui.write(data[:])
2328 ui.write(data[:])
2316 elif opts['check']:
2329 elif opts['check']:
2317 unfi = repo.unfiltered()
2330 unfi = repo.unfiltered()
2318 cl = unfi.changelog
2331 cl = unfi.changelog
2319 nm_data = nodemap.persisted_data(cl)
2332 nm_data = nodemap.persisted_data(cl)
2320 if nm_data is not None:
2333 if nm_data is not None:
2321 docket, data = nm_data
2334 docket, data = nm_data
2322 return nodemap.check_data(ui, cl.index, data)
2335 return nodemap.check_data(ui, cl.index, data)
2323 elif opts['metadata']:
2336 elif opts['metadata']:
2324 unfi = repo.unfiltered()
2337 unfi = repo.unfiltered()
2325 cl = unfi.changelog
2338 cl = unfi.changelog
2326 nm_data = nodemap.persisted_data(cl)
2339 nm_data = nodemap.persisted_data(cl)
2327 if nm_data is not None:
2340 if nm_data is not None:
2328 docket, data = nm_data
2341 docket, data = nm_data
2329 ui.write((b"uid: %s\n") % docket.uid)
2342 ui.write((b"uid: %s\n") % docket.uid)
2330 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2343 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2331 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2344 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2332 ui.write((b"data-length: %d\n") % docket.data_length)
2345 ui.write((b"data-length: %d\n") % docket.data_length)
2333 ui.write((b"data-unused: %d\n") % docket.data_unused)
2346 ui.write((b"data-unused: %d\n") % docket.data_unused)
2334 unused_perc = docket.data_unused * 100.0 / docket.data_length
2347 unused_perc = docket.data_unused * 100.0 / docket.data_length
2335 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2348 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2336
2349
2337
2350
2338 @command(
2351 @command(
2339 b'debugobsolete',
2352 b'debugobsolete',
2340 [
2353 [
2341 (b'', b'flags', 0, _(b'markers flag')),
2354 (b'', b'flags', 0, _(b'markers flag')),
2342 (
2355 (
2343 b'',
2356 b'',
2344 b'record-parents',
2357 b'record-parents',
2345 False,
2358 False,
2346 _(b'record parent information for the precursor'),
2359 _(b'record parent information for the precursor'),
2347 ),
2360 ),
2348 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2361 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2349 (
2362 (
2350 b'',
2363 b'',
2351 b'exclusive',
2364 b'exclusive',
2352 False,
2365 False,
2353 _(b'restrict display to markers only relevant to REV'),
2366 _(b'restrict display to markers only relevant to REV'),
2354 ),
2367 ),
2355 (b'', b'index', False, _(b'display index of the marker')),
2368 (b'', b'index', False, _(b'display index of the marker')),
2356 (b'', b'delete', [], _(b'delete markers specified by indices')),
2369 (b'', b'delete', [], _(b'delete markers specified by indices')),
2357 ]
2370 ]
2358 + cmdutil.commitopts2
2371 + cmdutil.commitopts2
2359 + cmdutil.formatteropts,
2372 + cmdutil.formatteropts,
2360 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2373 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2361 )
2374 )
2362 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2363 """create arbitrary obsolete marker
2376 """create arbitrary obsolete marker
2364
2377
2365 With no arguments, displays the list of obsolescence markers."""
2378 With no arguments, displays the list of obsolescence markers."""
2366
2379
2367 opts = pycompat.byteskwargs(opts)
2380 opts = pycompat.byteskwargs(opts)
2368
2381
2369 def parsenodeid(s):
2382 def parsenodeid(s):
2370 try:
2383 try:
2371 # We do not use revsingle/revrange functions here to accept
2384 # We do not use revsingle/revrange functions here to accept
2372 # arbitrary node identifiers, possibly not present in the
2385 # arbitrary node identifiers, possibly not present in the
2373 # local repository.
2386 # local repository.
2374 n = bin(s)
2387 n = bin(s)
2375 if len(n) != len(nullid):
2388 if len(n) != len(nullid):
2376 raise TypeError()
2389 raise TypeError()
2377 return n
2390 return n
2378 except TypeError:
2391 except TypeError:
2379 raise error.InputError(
2392 raise error.InputError(
2380 b'changeset references must be full hexadecimal '
2393 b'changeset references must be full hexadecimal '
2381 b'node identifiers'
2394 b'node identifiers'
2382 )
2395 )
2383
2396
2384 if opts.get(b'delete'):
2397 if opts.get(b'delete'):
2385 indices = []
2398 indices = []
2386 for v in opts.get(b'delete'):
2399 for v in opts.get(b'delete'):
2387 try:
2400 try:
2388 indices.append(int(v))
2401 indices.append(int(v))
2389 except ValueError:
2402 except ValueError:
2390 raise error.InputError(
2403 raise error.InputError(
2391 _(b'invalid index value: %r') % v,
2404 _(b'invalid index value: %r') % v,
2392 hint=_(b'use integers for indices'),
2405 hint=_(b'use integers for indices'),
2393 )
2406 )
2394
2407
2395 if repo.currenttransaction():
2408 if repo.currenttransaction():
2396 raise error.Abort(
2409 raise error.Abort(
2397 _(b'cannot delete obsmarkers in the middle of transaction.')
2410 _(b'cannot delete obsmarkers in the middle of transaction.')
2398 )
2411 )
2399
2412
2400 with repo.lock():
2413 with repo.lock():
2401 n = repair.deleteobsmarkers(repo.obsstore, indices)
2414 n = repair.deleteobsmarkers(repo.obsstore, indices)
2402 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2415 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2403
2416
2404 return
2417 return
2405
2418
2406 if precursor is not None:
2419 if precursor is not None:
2407 if opts[b'rev']:
2420 if opts[b'rev']:
2408 raise error.InputError(
2421 raise error.InputError(
2409 b'cannot select revision when creating marker'
2422 b'cannot select revision when creating marker'
2410 )
2423 )
2411 metadata = {}
2424 metadata = {}
2412 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2425 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2413 succs = tuple(parsenodeid(succ) for succ in successors)
2426 succs = tuple(parsenodeid(succ) for succ in successors)
2414 l = repo.lock()
2427 l = repo.lock()
2415 try:
2428 try:
2416 tr = repo.transaction(b'debugobsolete')
2429 tr = repo.transaction(b'debugobsolete')
2417 try:
2430 try:
2418 date = opts.get(b'date')
2431 date = opts.get(b'date')
2419 if date:
2432 if date:
2420 date = dateutil.parsedate(date)
2433 date = dateutil.parsedate(date)
2421 else:
2434 else:
2422 date = None
2435 date = None
2423 prec = parsenodeid(precursor)
2436 prec = parsenodeid(precursor)
2424 parents = None
2437 parents = None
2425 if opts[b'record_parents']:
2438 if opts[b'record_parents']:
2426 if prec not in repo.unfiltered():
2439 if prec not in repo.unfiltered():
2427 raise error.Abort(
2440 raise error.Abort(
2428 b'cannot used --record-parents on '
2441 b'cannot used --record-parents on '
2429 b'unknown changesets'
2442 b'unknown changesets'
2430 )
2443 )
2431 parents = repo.unfiltered()[prec].parents()
2444 parents = repo.unfiltered()[prec].parents()
2432 parents = tuple(p.node() for p in parents)
2445 parents = tuple(p.node() for p in parents)
2433 repo.obsstore.create(
2446 repo.obsstore.create(
2434 tr,
2447 tr,
2435 prec,
2448 prec,
2436 succs,
2449 succs,
2437 opts[b'flags'],
2450 opts[b'flags'],
2438 parents=parents,
2451 parents=parents,
2439 date=date,
2452 date=date,
2440 metadata=metadata,
2453 metadata=metadata,
2441 ui=ui,
2454 ui=ui,
2442 )
2455 )
2443 tr.close()
2456 tr.close()
2444 except ValueError as exc:
2457 except ValueError as exc:
2445 raise error.Abort(
2458 raise error.Abort(
2446 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2459 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2447 )
2460 )
2448 finally:
2461 finally:
2449 tr.release()
2462 tr.release()
2450 finally:
2463 finally:
2451 l.release()
2464 l.release()
2452 else:
2465 else:
2453 if opts[b'rev']:
2466 if opts[b'rev']:
2454 revs = scmutil.revrange(repo, opts[b'rev'])
2467 revs = scmutil.revrange(repo, opts[b'rev'])
2455 nodes = [repo[r].node() for r in revs]
2468 nodes = [repo[r].node() for r in revs]
2456 markers = list(
2469 markers = list(
2457 obsutil.getmarkers(
2470 obsutil.getmarkers(
2458 repo, nodes=nodes, exclusive=opts[b'exclusive']
2471 repo, nodes=nodes, exclusive=opts[b'exclusive']
2459 )
2472 )
2460 )
2473 )
2461 markers.sort(key=lambda x: x._data)
2474 markers.sort(key=lambda x: x._data)
2462 else:
2475 else:
2463 markers = obsutil.getmarkers(repo)
2476 markers = obsutil.getmarkers(repo)
2464
2477
2465 markerstoiter = markers
2478 markerstoiter = markers
2466 isrelevant = lambda m: True
2479 isrelevant = lambda m: True
2467 if opts.get(b'rev') and opts.get(b'index'):
2480 if opts.get(b'rev') and opts.get(b'index'):
2468 markerstoiter = obsutil.getmarkers(repo)
2481 markerstoiter = obsutil.getmarkers(repo)
2469 markerset = set(markers)
2482 markerset = set(markers)
2470 isrelevant = lambda m: m in markerset
2483 isrelevant = lambda m: m in markerset
2471
2484
2472 fm = ui.formatter(b'debugobsolete', opts)
2485 fm = ui.formatter(b'debugobsolete', opts)
2473 for i, m in enumerate(markerstoiter):
2486 for i, m in enumerate(markerstoiter):
2474 if not isrelevant(m):
2487 if not isrelevant(m):
2475 # marker can be irrelevant when we're iterating over a set
2488 # marker can be irrelevant when we're iterating over a set
2476 # of markers (markerstoiter) which is bigger than the set
2489 # of markers (markerstoiter) which is bigger than the set
2477 # of markers we want to display (markers)
2490 # of markers we want to display (markers)
2478 # this can happen if both --index and --rev options are
2491 # this can happen if both --index and --rev options are
2479 # provided and thus we need to iterate over all of the markers
2492 # provided and thus we need to iterate over all of the markers
2480 # to get the correct indices, but only display the ones that
2493 # to get the correct indices, but only display the ones that
2481 # are relevant to --rev value
2494 # are relevant to --rev value
2482 continue
2495 continue
2483 fm.startitem()
2496 fm.startitem()
2484 ind = i if opts.get(b'index') else None
2497 ind = i if opts.get(b'index') else None
2485 cmdutil.showmarker(fm, m, index=ind)
2498 cmdutil.showmarker(fm, m, index=ind)
2486 fm.end()
2499 fm.end()
2487
2500
2488
2501
2489 @command(
2502 @command(
2490 b'debugp1copies',
2503 b'debugp1copies',
2491 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2504 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2492 _(b'[-r REV]'),
2505 _(b'[-r REV]'),
2493 )
2506 )
2494 def debugp1copies(ui, repo, **opts):
2507 def debugp1copies(ui, repo, **opts):
2495 """dump copy information compared to p1"""
2508 """dump copy information compared to p1"""
2496
2509
2497 opts = pycompat.byteskwargs(opts)
2510 opts = pycompat.byteskwargs(opts)
2498 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2511 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2499 for dst, src in ctx.p1copies().items():
2512 for dst, src in ctx.p1copies().items():
2500 ui.write(b'%s -> %s\n' % (src, dst))
2513 ui.write(b'%s -> %s\n' % (src, dst))
2501
2514
2502
2515
2503 @command(
2516 @command(
2504 b'debugp2copies',
2517 b'debugp2copies',
2505 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2518 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2506 _(b'[-r REV]'),
2519 _(b'[-r REV]'),
2507 )
2520 )
2508 def debugp1copies(ui, repo, **opts):
2521 def debugp1copies(ui, repo, **opts):
2509 """dump copy information compared to p2"""
2522 """dump copy information compared to p2"""
2510
2523
2511 opts = pycompat.byteskwargs(opts)
2524 opts = pycompat.byteskwargs(opts)
2512 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2513 for dst, src in ctx.p2copies().items():
2526 for dst, src in ctx.p2copies().items():
2514 ui.write(b'%s -> %s\n' % (src, dst))
2527 ui.write(b'%s -> %s\n' % (src, dst))
2515
2528
2516
2529
2517 @command(
2530 @command(
2518 b'debugpathcomplete',
2531 b'debugpathcomplete',
2519 [
2532 [
2520 (b'f', b'full', None, _(b'complete an entire path')),
2533 (b'f', b'full', None, _(b'complete an entire path')),
2521 (b'n', b'normal', None, _(b'show only normal files')),
2534 (b'n', b'normal', None, _(b'show only normal files')),
2522 (b'a', b'added', None, _(b'show only added files')),
2535 (b'a', b'added', None, _(b'show only added files')),
2523 (b'r', b'removed', None, _(b'show only removed files')),
2536 (b'r', b'removed', None, _(b'show only removed files')),
2524 ],
2537 ],
2525 _(b'FILESPEC...'),
2538 _(b'FILESPEC...'),
2526 )
2539 )
2527 def debugpathcomplete(ui, repo, *specs, **opts):
2540 def debugpathcomplete(ui, repo, *specs, **opts):
2528 """complete part or all of a tracked path
2541 """complete part or all of a tracked path
2529
2542
2530 This command supports shells that offer path name completion. It
2543 This command supports shells that offer path name completion. It
2531 currently completes only files already known to the dirstate.
2544 currently completes only files already known to the dirstate.
2532
2545
2533 Completion extends only to the next path segment unless
2546 Completion extends only to the next path segment unless
2534 --full is specified, in which case entire paths are used."""
2547 --full is specified, in which case entire paths are used."""
2535
2548
2536 def complete(path, acceptable):
2549 def complete(path, acceptable):
2537 dirstate = repo.dirstate
2550 dirstate = repo.dirstate
2538 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2551 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2539 rootdir = repo.root + pycompat.ossep
2552 rootdir = repo.root + pycompat.ossep
2540 if spec != repo.root and not spec.startswith(rootdir):
2553 if spec != repo.root and not spec.startswith(rootdir):
2541 return [], []
2554 return [], []
2542 if os.path.isdir(spec):
2555 if os.path.isdir(spec):
2543 spec += b'/'
2556 spec += b'/'
2544 spec = spec[len(rootdir) :]
2557 spec = spec[len(rootdir) :]
2545 fixpaths = pycompat.ossep != b'/'
2558 fixpaths = pycompat.ossep != b'/'
2546 if fixpaths:
2559 if fixpaths:
2547 spec = spec.replace(pycompat.ossep, b'/')
2560 spec = spec.replace(pycompat.ossep, b'/')
2548 speclen = len(spec)
2561 speclen = len(spec)
2549 fullpaths = opts['full']
2562 fullpaths = opts['full']
2550 files, dirs = set(), set()
2563 files, dirs = set(), set()
2551 adddir, addfile = dirs.add, files.add
2564 adddir, addfile = dirs.add, files.add
2552 for f, st in pycompat.iteritems(dirstate):
2565 for f, st in pycompat.iteritems(dirstate):
2553 if f.startswith(spec) and st[0] in acceptable:
2566 if f.startswith(spec) and st[0] in acceptable:
2554 if fixpaths:
2567 if fixpaths:
2555 f = f.replace(b'/', pycompat.ossep)
2568 f = f.replace(b'/', pycompat.ossep)
2556 if fullpaths:
2569 if fullpaths:
2557 addfile(f)
2570 addfile(f)
2558 continue
2571 continue
2559 s = f.find(pycompat.ossep, speclen)
2572 s = f.find(pycompat.ossep, speclen)
2560 if s >= 0:
2573 if s >= 0:
2561 adddir(f[:s])
2574 adddir(f[:s])
2562 else:
2575 else:
2563 addfile(f)
2576 addfile(f)
2564 return files, dirs
2577 return files, dirs
2565
2578
2566 acceptable = b''
2579 acceptable = b''
2567 if opts['normal']:
2580 if opts['normal']:
2568 acceptable += b'nm'
2581 acceptable += b'nm'
2569 if opts['added']:
2582 if opts['added']:
2570 acceptable += b'a'
2583 acceptable += b'a'
2571 if opts['removed']:
2584 if opts['removed']:
2572 acceptable += b'r'
2585 acceptable += b'r'
2573 cwd = repo.getcwd()
2586 cwd = repo.getcwd()
2574 if not specs:
2587 if not specs:
2575 specs = [b'.']
2588 specs = [b'.']
2576
2589
2577 files, dirs = set(), set()
2590 files, dirs = set(), set()
2578 for spec in specs:
2591 for spec in specs:
2579 f, d = complete(spec, acceptable or b'nmar')
2592 f, d = complete(spec, acceptable or b'nmar')
2580 files.update(f)
2593 files.update(f)
2581 dirs.update(d)
2594 dirs.update(d)
2582 files.update(dirs)
2595 files.update(dirs)
2583 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2596 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2584 ui.write(b'\n')
2597 ui.write(b'\n')
2585
2598
2586
2599
2587 @command(
2600 @command(
2588 b'debugpathcopies',
2601 b'debugpathcopies',
2589 cmdutil.walkopts,
2602 cmdutil.walkopts,
2590 b'hg debugpathcopies REV1 REV2 [FILE]',
2603 b'hg debugpathcopies REV1 REV2 [FILE]',
2591 inferrepo=True,
2604 inferrepo=True,
2592 )
2605 )
2593 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2606 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2594 """show copies between two revisions"""
2607 """show copies between two revisions"""
2595 ctx1 = scmutil.revsingle(repo, rev1)
2608 ctx1 = scmutil.revsingle(repo, rev1)
2596 ctx2 = scmutil.revsingle(repo, rev2)
2609 ctx2 = scmutil.revsingle(repo, rev2)
2597 m = scmutil.match(ctx1, pats, opts)
2610 m = scmutil.match(ctx1, pats, opts)
2598 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2611 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2599 ui.write(b'%s -> %s\n' % (src, dst))
2612 ui.write(b'%s -> %s\n' % (src, dst))
2600
2613
2601
2614
2602 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2615 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2603 def debugpeer(ui, path):
2616 def debugpeer(ui, path):
2604 """establish a connection to a peer repository"""
2617 """establish a connection to a peer repository"""
2605 # Always enable peer request logging. Requires --debug to display
2618 # Always enable peer request logging. Requires --debug to display
2606 # though.
2619 # though.
2607 overrides = {
2620 overrides = {
2608 (b'devel', b'debug.peer-request'): True,
2621 (b'devel', b'debug.peer-request'): True,
2609 }
2622 }
2610
2623
2611 with ui.configoverride(overrides):
2624 with ui.configoverride(overrides):
2612 peer = hg.peer(ui, {}, path)
2625 peer = hg.peer(ui, {}, path)
2613
2626
2614 try:
2627 try:
2615 local = peer.local() is not None
2628 local = peer.local() is not None
2616 canpush = peer.canpush()
2629 canpush = peer.canpush()
2617
2630
2618 ui.write(_(b'url: %s\n') % peer.url())
2631 ui.write(_(b'url: %s\n') % peer.url())
2619 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2632 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2620 ui.write(
2633 ui.write(
2621 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2634 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2622 )
2635 )
2623 finally:
2636 finally:
2624 peer.close()
2637 peer.close()
2625
2638
2626
2639
2627 @command(
2640 @command(
2628 b'debugpickmergetool',
2641 b'debugpickmergetool',
2629 [
2642 [
2630 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2643 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2631 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2644 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2632 ]
2645 ]
2633 + cmdutil.walkopts
2646 + cmdutil.walkopts
2634 + cmdutil.mergetoolopts,
2647 + cmdutil.mergetoolopts,
2635 _(b'[PATTERN]...'),
2648 _(b'[PATTERN]...'),
2636 inferrepo=True,
2649 inferrepo=True,
2637 )
2650 )
2638 def debugpickmergetool(ui, repo, *pats, **opts):
2651 def debugpickmergetool(ui, repo, *pats, **opts):
2639 """examine which merge tool is chosen for specified file
2652 """examine which merge tool is chosen for specified file
2640
2653
2641 As described in :hg:`help merge-tools`, Mercurial examines
2654 As described in :hg:`help merge-tools`, Mercurial examines
2642 configurations below in this order to decide which merge tool is
2655 configurations below in this order to decide which merge tool is
2643 chosen for specified file.
2656 chosen for specified file.
2644
2657
2645 1. ``--tool`` option
2658 1. ``--tool`` option
2646 2. ``HGMERGE`` environment variable
2659 2. ``HGMERGE`` environment variable
2647 3. configurations in ``merge-patterns`` section
2660 3. configurations in ``merge-patterns`` section
2648 4. configuration of ``ui.merge``
2661 4. configuration of ``ui.merge``
2649 5. configurations in ``merge-tools`` section
2662 5. configurations in ``merge-tools`` section
2650 6. ``hgmerge`` tool (for historical reason only)
2663 6. ``hgmerge`` tool (for historical reason only)
2651 7. default tool for fallback (``:merge`` or ``:prompt``)
2664 7. default tool for fallback (``:merge`` or ``:prompt``)
2652
2665
2653 This command writes out examination result in the style below::
2666 This command writes out examination result in the style below::
2654
2667
2655 FILE = MERGETOOL
2668 FILE = MERGETOOL
2656
2669
2657 By default, all files known in the first parent context of the
2670 By default, all files known in the first parent context of the
2658 working directory are examined. Use file patterns and/or -I/-X
2671 working directory are examined. Use file patterns and/or -I/-X
2659 options to limit target files. -r/--rev is also useful to examine
2672 options to limit target files. -r/--rev is also useful to examine
2660 files in another context without actual updating to it.
2673 files in another context without actual updating to it.
2661
2674
2662 With --debug, this command shows warning messages while matching
2675 With --debug, this command shows warning messages while matching
2663 against ``merge-patterns`` and so on, too. It is recommended to
2676 against ``merge-patterns`` and so on, too. It is recommended to
2664 use this option with explicit file patterns and/or -I/-X options,
2677 use this option with explicit file patterns and/or -I/-X options,
2665 because this option increases amount of output per file according
2678 because this option increases amount of output per file according
2666 to configurations in hgrc.
2679 to configurations in hgrc.
2667
2680
2668 With -v/--verbose, this command shows configurations below at
2681 With -v/--verbose, this command shows configurations below at
2669 first (only if specified).
2682 first (only if specified).
2670
2683
2671 - ``--tool`` option
2684 - ``--tool`` option
2672 - ``HGMERGE`` environment variable
2685 - ``HGMERGE`` environment variable
2673 - configuration of ``ui.merge``
2686 - configuration of ``ui.merge``
2674
2687
2675 If merge tool is chosen before matching against
2688 If merge tool is chosen before matching against
2676 ``merge-patterns``, this command can't show any helpful
2689 ``merge-patterns``, this command can't show any helpful
2677 information, even with --debug. In such case, information above is
2690 information, even with --debug. In such case, information above is
2678 useful to know why a merge tool is chosen.
2691 useful to know why a merge tool is chosen.
2679 """
2692 """
2680 opts = pycompat.byteskwargs(opts)
2693 opts = pycompat.byteskwargs(opts)
2681 overrides = {}
2694 overrides = {}
2682 if opts[b'tool']:
2695 if opts[b'tool']:
2683 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2696 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2684 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2697 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2685
2698
2686 with ui.configoverride(overrides, b'debugmergepatterns'):
2699 with ui.configoverride(overrides, b'debugmergepatterns'):
2687 hgmerge = encoding.environ.get(b"HGMERGE")
2700 hgmerge = encoding.environ.get(b"HGMERGE")
2688 if hgmerge is not None:
2701 if hgmerge is not None:
2689 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2702 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2690 uimerge = ui.config(b"ui", b"merge")
2703 uimerge = ui.config(b"ui", b"merge")
2691 if uimerge:
2704 if uimerge:
2692 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2705 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2693
2706
2694 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2707 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2695 m = scmutil.match(ctx, pats, opts)
2708 m = scmutil.match(ctx, pats, opts)
2696 changedelete = opts[b'changedelete']
2709 changedelete = opts[b'changedelete']
2697 for path in ctx.walk(m):
2710 for path in ctx.walk(m):
2698 fctx = ctx[path]
2711 fctx = ctx[path]
2699 try:
2712 try:
2700 if not ui.debugflag:
2713 if not ui.debugflag:
2701 ui.pushbuffer(error=True)
2714 ui.pushbuffer(error=True)
2702 tool, toolpath = filemerge._picktool(
2715 tool, toolpath = filemerge._picktool(
2703 repo,
2716 repo,
2704 ui,
2717 ui,
2705 path,
2718 path,
2706 fctx.isbinary(),
2719 fctx.isbinary(),
2707 b'l' in fctx.flags(),
2720 b'l' in fctx.flags(),
2708 changedelete,
2721 changedelete,
2709 )
2722 )
2710 finally:
2723 finally:
2711 if not ui.debugflag:
2724 if not ui.debugflag:
2712 ui.popbuffer()
2725 ui.popbuffer()
2713 ui.write(b'%s = %s\n' % (path, tool))
2726 ui.write(b'%s = %s\n' % (path, tool))
2714
2727
2715
2728
2716 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2729 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2717 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2730 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2718 """access the pushkey key/value protocol
2731 """access the pushkey key/value protocol
2719
2732
2720 With two args, list the keys in the given namespace.
2733 With two args, list the keys in the given namespace.
2721
2734
2722 With five args, set a key to new if it currently is set to old.
2735 With five args, set a key to new if it currently is set to old.
2723 Reports success or failure.
2736 Reports success or failure.
2724 """
2737 """
2725
2738
2726 target = hg.peer(ui, {}, repopath)
2739 target = hg.peer(ui, {}, repopath)
2727 try:
2740 try:
2728 if keyinfo:
2741 if keyinfo:
2729 key, old, new = keyinfo
2742 key, old, new = keyinfo
2730 with target.commandexecutor() as e:
2743 with target.commandexecutor() as e:
2731 r = e.callcommand(
2744 r = e.callcommand(
2732 b'pushkey',
2745 b'pushkey',
2733 {
2746 {
2734 b'namespace': namespace,
2747 b'namespace': namespace,
2735 b'key': key,
2748 b'key': key,
2736 b'old': old,
2749 b'old': old,
2737 b'new': new,
2750 b'new': new,
2738 },
2751 },
2739 ).result()
2752 ).result()
2740
2753
2741 ui.status(pycompat.bytestr(r) + b'\n')
2754 ui.status(pycompat.bytestr(r) + b'\n')
2742 return not r
2755 return not r
2743 else:
2756 else:
2744 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2757 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2745 ui.write(
2758 ui.write(
2746 b"%s\t%s\n"
2759 b"%s\t%s\n"
2747 % (stringutil.escapestr(k), stringutil.escapestr(v))
2760 % (stringutil.escapestr(k), stringutil.escapestr(v))
2748 )
2761 )
2749 finally:
2762 finally:
2750 target.close()
2763 target.close()
2751
2764
2752
2765
2753 @command(b'debugpvec', [], _(b'A B'))
2766 @command(b'debugpvec', [], _(b'A B'))
2754 def debugpvec(ui, repo, a, b=None):
2767 def debugpvec(ui, repo, a, b=None):
2755 ca = scmutil.revsingle(repo, a)
2768 ca = scmutil.revsingle(repo, a)
2756 cb = scmutil.revsingle(repo, b)
2769 cb = scmutil.revsingle(repo, b)
2757 pa = pvec.ctxpvec(ca)
2770 pa = pvec.ctxpvec(ca)
2758 pb = pvec.ctxpvec(cb)
2771 pb = pvec.ctxpvec(cb)
2759 if pa == pb:
2772 if pa == pb:
2760 rel = b"="
2773 rel = b"="
2761 elif pa > pb:
2774 elif pa > pb:
2762 rel = b">"
2775 rel = b">"
2763 elif pa < pb:
2776 elif pa < pb:
2764 rel = b"<"
2777 rel = b"<"
2765 elif pa | pb:
2778 elif pa | pb:
2766 rel = b"|"
2779 rel = b"|"
2767 ui.write(_(b"a: %s\n") % pa)
2780 ui.write(_(b"a: %s\n") % pa)
2768 ui.write(_(b"b: %s\n") % pb)
2781 ui.write(_(b"b: %s\n") % pb)
2769 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2782 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2770 ui.write(
2783 ui.write(
2771 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2784 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2772 % (
2785 % (
2773 abs(pa._depth - pb._depth),
2786 abs(pa._depth - pb._depth),
2774 pvec._hamming(pa._vec, pb._vec),
2787 pvec._hamming(pa._vec, pb._vec),
2775 pa.distance(pb),
2788 pa.distance(pb),
2776 rel,
2789 rel,
2777 )
2790 )
2778 )
2791 )
2779
2792
2780
2793
2781 @command(
2794 @command(
2782 b'debugrebuilddirstate|debugrebuildstate',
2795 b'debugrebuilddirstate|debugrebuildstate',
2783 [
2796 [
2784 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2797 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2785 (
2798 (
2786 b'',
2799 b'',
2787 b'minimal',
2800 b'minimal',
2788 None,
2801 None,
2789 _(
2802 _(
2790 b'only rebuild files that are inconsistent with '
2803 b'only rebuild files that are inconsistent with '
2791 b'the working copy parent'
2804 b'the working copy parent'
2792 ),
2805 ),
2793 ),
2806 ),
2794 ],
2807 ],
2795 _(b'[-r REV]'),
2808 _(b'[-r REV]'),
2796 )
2809 )
2797 def debugrebuilddirstate(ui, repo, rev, **opts):
2810 def debugrebuilddirstate(ui, repo, rev, **opts):
2798 """rebuild the dirstate as it would look like for the given revision
2811 """rebuild the dirstate as it would look like for the given revision
2799
2812
2800 If no revision is specified the first current parent will be used.
2813 If no revision is specified the first current parent will be used.
2801
2814
2802 The dirstate will be set to the files of the given revision.
2815 The dirstate will be set to the files of the given revision.
2803 The actual working directory content or existing dirstate
2816 The actual working directory content or existing dirstate
2804 information such as adds or removes is not considered.
2817 information such as adds or removes is not considered.
2805
2818
2806 ``minimal`` will only rebuild the dirstate status for files that claim to be
2819 ``minimal`` will only rebuild the dirstate status for files that claim to be
2807 tracked but are not in the parent manifest, or that exist in the parent
2820 tracked but are not in the parent manifest, or that exist in the parent
2808 manifest but are not in the dirstate. It will not change adds, removes, or
2821 manifest but are not in the dirstate. It will not change adds, removes, or
2809 modified files that are in the working copy parent.
2822 modified files that are in the working copy parent.
2810
2823
2811 One use of this command is to make the next :hg:`status` invocation
2824 One use of this command is to make the next :hg:`status` invocation
2812 check the actual file content.
2825 check the actual file content.
2813 """
2826 """
2814 ctx = scmutil.revsingle(repo, rev)
2827 ctx = scmutil.revsingle(repo, rev)
2815 with repo.wlock():
2828 with repo.wlock():
2816 dirstate = repo.dirstate
2829 dirstate = repo.dirstate
2817 changedfiles = None
2830 changedfiles = None
2818 # See command doc for what minimal does.
2831 # See command doc for what minimal does.
2819 if opts.get('minimal'):
2832 if opts.get('minimal'):
2820 manifestfiles = set(ctx.manifest().keys())
2833 manifestfiles = set(ctx.manifest().keys())
2821 dirstatefiles = set(dirstate)
2834 dirstatefiles = set(dirstate)
2822 manifestonly = manifestfiles - dirstatefiles
2835 manifestonly = manifestfiles - dirstatefiles
2823 dsonly = dirstatefiles - manifestfiles
2836 dsonly = dirstatefiles - manifestfiles
2824 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2837 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2825 changedfiles = manifestonly | dsnotadded
2838 changedfiles = manifestonly | dsnotadded
2826
2839
2827 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2840 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2828
2841
2829
2842
2830 @command(b'debugrebuildfncache', [], b'')
2843 @command(b'debugrebuildfncache', [], b'')
2831 def debugrebuildfncache(ui, repo):
2844 def debugrebuildfncache(ui, repo):
2832 """rebuild the fncache file"""
2845 """rebuild the fncache file"""
2833 repair.rebuildfncache(ui, repo)
2846 repair.rebuildfncache(ui, repo)
2834
2847
2835
2848
2836 @command(
2849 @command(
2837 b'debugrename',
2850 b'debugrename',
2838 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2851 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2839 _(b'[-r REV] [FILE]...'),
2852 _(b'[-r REV] [FILE]...'),
2840 )
2853 )
2841 def debugrename(ui, repo, *pats, **opts):
2854 def debugrename(ui, repo, *pats, **opts):
2842 """dump rename information"""
2855 """dump rename information"""
2843
2856
2844 opts = pycompat.byteskwargs(opts)
2857 opts = pycompat.byteskwargs(opts)
2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2858 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2846 m = scmutil.match(ctx, pats, opts)
2859 m = scmutil.match(ctx, pats, opts)
2847 for abs in ctx.walk(m):
2860 for abs in ctx.walk(m):
2848 fctx = ctx[abs]
2861 fctx = ctx[abs]
2849 o = fctx.filelog().renamed(fctx.filenode())
2862 o = fctx.filelog().renamed(fctx.filenode())
2850 rel = repo.pathto(abs)
2863 rel = repo.pathto(abs)
2851 if o:
2864 if o:
2852 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2865 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2853 else:
2866 else:
2854 ui.write(_(b"%s not renamed\n") % rel)
2867 ui.write(_(b"%s not renamed\n") % rel)
2855
2868
2856
2869
2857 @command(b'debugrequires|debugrequirements', [], b'')
2870 @command(b'debugrequires|debugrequirements', [], b'')
2858 def debugrequirements(ui, repo):
2871 def debugrequirements(ui, repo):
2859 """ print the current repo requirements """
2872 """ print the current repo requirements """
2860 for r in sorted(repo.requirements):
2873 for r in sorted(repo.requirements):
2861 ui.write(b"%s\n" % r)
2874 ui.write(b"%s\n" % r)
2862
2875
2863
2876
2864 @command(
2877 @command(
2865 b'debugrevlog',
2878 b'debugrevlog',
2866 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2879 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2867 _(b'-c|-m|FILE'),
2880 _(b'-c|-m|FILE'),
2868 optionalrepo=True,
2881 optionalrepo=True,
2869 )
2882 )
2870 def debugrevlog(ui, repo, file_=None, **opts):
2883 def debugrevlog(ui, repo, file_=None, **opts):
2871 """show data and statistics about a revlog"""
2884 """show data and statistics about a revlog"""
2872 opts = pycompat.byteskwargs(opts)
2885 opts = pycompat.byteskwargs(opts)
2873 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2886 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2874
2887
2875 if opts.get(b"dump"):
2888 if opts.get(b"dump"):
2876 numrevs = len(r)
2889 numrevs = len(r)
2877 ui.write(
2890 ui.write(
2878 (
2891 (
2879 b"# rev p1rev p2rev start end deltastart base p1 p2"
2892 b"# rev p1rev p2rev start end deltastart base p1 p2"
2880 b" rawsize totalsize compression heads chainlen\n"
2893 b" rawsize totalsize compression heads chainlen\n"
2881 )
2894 )
2882 )
2895 )
2883 ts = 0
2896 ts = 0
2884 heads = set()
2897 heads = set()
2885
2898
2886 for rev in pycompat.xrange(numrevs):
2899 for rev in pycompat.xrange(numrevs):
2887 dbase = r.deltaparent(rev)
2900 dbase = r.deltaparent(rev)
2888 if dbase == -1:
2901 if dbase == -1:
2889 dbase = rev
2902 dbase = rev
2890 cbase = r.chainbase(rev)
2903 cbase = r.chainbase(rev)
2891 clen = r.chainlen(rev)
2904 clen = r.chainlen(rev)
2892 p1, p2 = r.parentrevs(rev)
2905 p1, p2 = r.parentrevs(rev)
2893 rs = r.rawsize(rev)
2906 rs = r.rawsize(rev)
2894 ts = ts + rs
2907 ts = ts + rs
2895 heads -= set(r.parentrevs(rev))
2908 heads -= set(r.parentrevs(rev))
2896 heads.add(rev)
2909 heads.add(rev)
2897 try:
2910 try:
2898 compression = ts / r.end(rev)
2911 compression = ts / r.end(rev)
2899 except ZeroDivisionError:
2912 except ZeroDivisionError:
2900 compression = 0
2913 compression = 0
2901 ui.write(
2914 ui.write(
2902 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2915 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2903 b"%11d %5d %8d\n"
2916 b"%11d %5d %8d\n"
2904 % (
2917 % (
2905 rev,
2918 rev,
2906 p1,
2919 p1,
2907 p2,
2920 p2,
2908 r.start(rev),
2921 r.start(rev),
2909 r.end(rev),
2922 r.end(rev),
2910 r.start(dbase),
2923 r.start(dbase),
2911 r.start(cbase),
2924 r.start(cbase),
2912 r.start(p1),
2925 r.start(p1),
2913 r.start(p2),
2926 r.start(p2),
2914 rs,
2927 rs,
2915 ts,
2928 ts,
2916 compression,
2929 compression,
2917 len(heads),
2930 len(heads),
2918 clen,
2931 clen,
2919 )
2932 )
2920 )
2933 )
2921 return 0
2934 return 0
2922
2935
2923 v = r.version
2936 v = r.version
2924 format = v & 0xFFFF
2937 format = v & 0xFFFF
2925 flags = []
2938 flags = []
2926 gdelta = False
2939 gdelta = False
2927 if v & revlog.FLAG_INLINE_DATA:
2940 if v & revlog.FLAG_INLINE_DATA:
2928 flags.append(b'inline')
2941 flags.append(b'inline')
2929 if v & revlog.FLAG_GENERALDELTA:
2942 if v & revlog.FLAG_GENERALDELTA:
2930 gdelta = True
2943 gdelta = True
2931 flags.append(b'generaldelta')
2944 flags.append(b'generaldelta')
2932 if not flags:
2945 if not flags:
2933 flags = [b'(none)']
2946 flags = [b'(none)']
2934
2947
2935 ### tracks merge vs single parent
2948 ### tracks merge vs single parent
2936 nummerges = 0
2949 nummerges = 0
2937
2950
2938 ### tracks ways the "delta" are build
2951 ### tracks ways the "delta" are build
2939 # nodelta
2952 # nodelta
2940 numempty = 0
2953 numempty = 0
2941 numemptytext = 0
2954 numemptytext = 0
2942 numemptydelta = 0
2955 numemptydelta = 0
2943 # full file content
2956 # full file content
2944 numfull = 0
2957 numfull = 0
2945 # intermediate snapshot against a prior snapshot
2958 # intermediate snapshot against a prior snapshot
2946 numsemi = 0
2959 numsemi = 0
2947 # snapshot count per depth
2960 # snapshot count per depth
2948 numsnapdepth = collections.defaultdict(lambda: 0)
2961 numsnapdepth = collections.defaultdict(lambda: 0)
2949 # delta against previous revision
2962 # delta against previous revision
2950 numprev = 0
2963 numprev = 0
2951 # delta against first or second parent (not prev)
2964 # delta against first or second parent (not prev)
2952 nump1 = 0
2965 nump1 = 0
2953 nump2 = 0
2966 nump2 = 0
2954 # delta against neither prev nor parents
2967 # delta against neither prev nor parents
2955 numother = 0
2968 numother = 0
2956 # delta against prev that are also first or second parent
2969 # delta against prev that are also first or second parent
2957 # (details of `numprev`)
2970 # (details of `numprev`)
2958 nump1prev = 0
2971 nump1prev = 0
2959 nump2prev = 0
2972 nump2prev = 0
2960
2973
2961 # data about delta chain of each revs
2974 # data about delta chain of each revs
2962 chainlengths = []
2975 chainlengths = []
2963 chainbases = []
2976 chainbases = []
2964 chainspans = []
2977 chainspans = []
2965
2978
2966 # data about each revision
2979 # data about each revision
2967 datasize = [None, 0, 0]
2980 datasize = [None, 0, 0]
2968 fullsize = [None, 0, 0]
2981 fullsize = [None, 0, 0]
2969 semisize = [None, 0, 0]
2982 semisize = [None, 0, 0]
2970 # snapshot count per depth
2983 # snapshot count per depth
2971 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2984 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2972 deltasize = [None, 0, 0]
2985 deltasize = [None, 0, 0]
2973 chunktypecounts = {}
2986 chunktypecounts = {}
2974 chunktypesizes = {}
2987 chunktypesizes = {}
2975
2988
2976 def addsize(size, l):
2989 def addsize(size, l):
2977 if l[0] is None or size < l[0]:
2990 if l[0] is None or size < l[0]:
2978 l[0] = size
2991 l[0] = size
2979 if size > l[1]:
2992 if size > l[1]:
2980 l[1] = size
2993 l[1] = size
2981 l[2] += size
2994 l[2] += size
2982
2995
2983 numrevs = len(r)
2996 numrevs = len(r)
2984 for rev in pycompat.xrange(numrevs):
2997 for rev in pycompat.xrange(numrevs):
2985 p1, p2 = r.parentrevs(rev)
2998 p1, p2 = r.parentrevs(rev)
2986 delta = r.deltaparent(rev)
2999 delta = r.deltaparent(rev)
2987 if format > 0:
3000 if format > 0:
2988 addsize(r.rawsize(rev), datasize)
3001 addsize(r.rawsize(rev), datasize)
2989 if p2 != nullrev:
3002 if p2 != nullrev:
2990 nummerges += 1
3003 nummerges += 1
2991 size = r.length(rev)
3004 size = r.length(rev)
2992 if delta == nullrev:
3005 if delta == nullrev:
2993 chainlengths.append(0)
3006 chainlengths.append(0)
2994 chainbases.append(r.start(rev))
3007 chainbases.append(r.start(rev))
2995 chainspans.append(size)
3008 chainspans.append(size)
2996 if size == 0:
3009 if size == 0:
2997 numempty += 1
3010 numempty += 1
2998 numemptytext += 1
3011 numemptytext += 1
2999 else:
3012 else:
3000 numfull += 1
3013 numfull += 1
3001 numsnapdepth[0] += 1
3014 numsnapdepth[0] += 1
3002 addsize(size, fullsize)
3015 addsize(size, fullsize)
3003 addsize(size, snapsizedepth[0])
3016 addsize(size, snapsizedepth[0])
3004 else:
3017 else:
3005 chainlengths.append(chainlengths[delta] + 1)
3018 chainlengths.append(chainlengths[delta] + 1)
3006 baseaddr = chainbases[delta]
3019 baseaddr = chainbases[delta]
3007 revaddr = r.start(rev)
3020 revaddr = r.start(rev)
3008 chainbases.append(baseaddr)
3021 chainbases.append(baseaddr)
3009 chainspans.append((revaddr - baseaddr) + size)
3022 chainspans.append((revaddr - baseaddr) + size)
3010 if size == 0:
3023 if size == 0:
3011 numempty += 1
3024 numempty += 1
3012 numemptydelta += 1
3025 numemptydelta += 1
3013 elif r.issnapshot(rev):
3026 elif r.issnapshot(rev):
3014 addsize(size, semisize)
3027 addsize(size, semisize)
3015 numsemi += 1
3028 numsemi += 1
3016 depth = r.snapshotdepth(rev)
3029 depth = r.snapshotdepth(rev)
3017 numsnapdepth[depth] += 1
3030 numsnapdepth[depth] += 1
3018 addsize(size, snapsizedepth[depth])
3031 addsize(size, snapsizedepth[depth])
3019 else:
3032 else:
3020 addsize(size, deltasize)
3033 addsize(size, deltasize)
3021 if delta == rev - 1:
3034 if delta == rev - 1:
3022 numprev += 1
3035 numprev += 1
3023 if delta == p1:
3036 if delta == p1:
3024 nump1prev += 1
3037 nump1prev += 1
3025 elif delta == p2:
3038 elif delta == p2:
3026 nump2prev += 1
3039 nump2prev += 1
3027 elif delta == p1:
3040 elif delta == p1:
3028 nump1 += 1
3041 nump1 += 1
3029 elif delta == p2:
3042 elif delta == p2:
3030 nump2 += 1
3043 nump2 += 1
3031 elif delta != nullrev:
3044 elif delta != nullrev:
3032 numother += 1
3045 numother += 1
3033
3046
3034 # Obtain data on the raw chunks in the revlog.
3047 # Obtain data on the raw chunks in the revlog.
3035 if util.safehasattr(r, b'_getsegmentforrevs'):
3048 if util.safehasattr(r, b'_getsegmentforrevs'):
3036 segment = r._getsegmentforrevs(rev, rev)[1]
3049 segment = r._getsegmentforrevs(rev, rev)[1]
3037 else:
3050 else:
3038 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3051 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3039 if segment:
3052 if segment:
3040 chunktype = bytes(segment[0:1])
3053 chunktype = bytes(segment[0:1])
3041 else:
3054 else:
3042 chunktype = b'empty'
3055 chunktype = b'empty'
3043
3056
3044 if chunktype not in chunktypecounts:
3057 if chunktype not in chunktypecounts:
3045 chunktypecounts[chunktype] = 0
3058 chunktypecounts[chunktype] = 0
3046 chunktypesizes[chunktype] = 0
3059 chunktypesizes[chunktype] = 0
3047
3060
3048 chunktypecounts[chunktype] += 1
3061 chunktypecounts[chunktype] += 1
3049 chunktypesizes[chunktype] += size
3062 chunktypesizes[chunktype] += size
3050
3063
3051 # Adjust size min value for empty cases
3064 # Adjust size min value for empty cases
3052 for size in (datasize, fullsize, semisize, deltasize):
3065 for size in (datasize, fullsize, semisize, deltasize):
3053 if size[0] is None:
3066 if size[0] is None:
3054 size[0] = 0
3067 size[0] = 0
3055
3068
3056 numdeltas = numrevs - numfull - numempty - numsemi
3069 numdeltas = numrevs - numfull - numempty - numsemi
3057 numoprev = numprev - nump1prev - nump2prev
3070 numoprev = numprev - nump1prev - nump2prev
3058 totalrawsize = datasize[2]
3071 totalrawsize = datasize[2]
3059 datasize[2] /= numrevs
3072 datasize[2] /= numrevs
3060 fulltotal = fullsize[2]
3073 fulltotal = fullsize[2]
3061 if numfull == 0:
3074 if numfull == 0:
3062 fullsize[2] = 0
3075 fullsize[2] = 0
3063 else:
3076 else:
3064 fullsize[2] /= numfull
3077 fullsize[2] /= numfull
3065 semitotal = semisize[2]
3078 semitotal = semisize[2]
3066 snaptotal = {}
3079 snaptotal = {}
3067 if numsemi > 0:
3080 if numsemi > 0:
3068 semisize[2] /= numsemi
3081 semisize[2] /= numsemi
3069 for depth in snapsizedepth:
3082 for depth in snapsizedepth:
3070 snaptotal[depth] = snapsizedepth[depth][2]
3083 snaptotal[depth] = snapsizedepth[depth][2]
3071 snapsizedepth[depth][2] /= numsnapdepth[depth]
3084 snapsizedepth[depth][2] /= numsnapdepth[depth]
3072
3085
3073 deltatotal = deltasize[2]
3086 deltatotal = deltasize[2]
3074 if numdeltas > 0:
3087 if numdeltas > 0:
3075 deltasize[2] /= numdeltas
3088 deltasize[2] /= numdeltas
3076 totalsize = fulltotal + semitotal + deltatotal
3089 totalsize = fulltotal + semitotal + deltatotal
3077 avgchainlen = sum(chainlengths) / numrevs
3090 avgchainlen = sum(chainlengths) / numrevs
3078 maxchainlen = max(chainlengths)
3091 maxchainlen = max(chainlengths)
3079 maxchainspan = max(chainspans)
3092 maxchainspan = max(chainspans)
3080 compratio = 1
3093 compratio = 1
3081 if totalsize:
3094 if totalsize:
3082 compratio = totalrawsize / totalsize
3095 compratio = totalrawsize / totalsize
3083
3096
3084 basedfmtstr = b'%%%dd\n'
3097 basedfmtstr = b'%%%dd\n'
3085 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3098 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3086
3099
3087 def dfmtstr(max):
3100 def dfmtstr(max):
3088 return basedfmtstr % len(str(max))
3101 return basedfmtstr % len(str(max))
3089
3102
3090 def pcfmtstr(max, padding=0):
3103 def pcfmtstr(max, padding=0):
3091 return basepcfmtstr % (len(str(max)), b' ' * padding)
3104 return basepcfmtstr % (len(str(max)), b' ' * padding)
3092
3105
3093 def pcfmt(value, total):
3106 def pcfmt(value, total):
3094 if total:
3107 if total:
3095 return (value, 100 * float(value) / total)
3108 return (value, 100 * float(value) / total)
3096 else:
3109 else:
3097 return value, 100.0
3110 return value, 100.0
3098
3111
3099 ui.writenoi18n(b'format : %d\n' % format)
3112 ui.writenoi18n(b'format : %d\n' % format)
3100 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3113 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3101
3114
3102 ui.write(b'\n')
3115 ui.write(b'\n')
3103 fmt = pcfmtstr(totalsize)
3116 fmt = pcfmtstr(totalsize)
3104 fmt2 = dfmtstr(totalsize)
3117 fmt2 = dfmtstr(totalsize)
3105 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3118 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3106 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3119 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3107 ui.writenoi18n(
3120 ui.writenoi18n(
3108 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3121 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3109 )
3122 )
3110 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3123 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3111 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3124 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3112 ui.writenoi18n(
3125 ui.writenoi18n(
3113 b' text : '
3126 b' text : '
3114 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3127 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3115 )
3128 )
3116 ui.writenoi18n(
3129 ui.writenoi18n(
3117 b' delta : '
3130 b' delta : '
3118 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3131 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3119 )
3132 )
3120 ui.writenoi18n(
3133 ui.writenoi18n(
3121 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3134 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3122 )
3135 )
3123 for depth in sorted(numsnapdepth):
3136 for depth in sorted(numsnapdepth):
3124 ui.write(
3137 ui.write(
3125 (b' lvl-%-3d : ' % depth)
3138 (b' lvl-%-3d : ' % depth)
3126 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3139 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3127 )
3140 )
3128 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3141 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3129 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3142 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3130 ui.writenoi18n(
3143 ui.writenoi18n(
3131 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3144 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3132 )
3145 )
3133 for depth in sorted(numsnapdepth):
3146 for depth in sorted(numsnapdepth):
3134 ui.write(
3147 ui.write(
3135 (b' lvl-%-3d : ' % depth)
3148 (b' lvl-%-3d : ' % depth)
3136 + fmt % pcfmt(snaptotal[depth], totalsize)
3149 + fmt % pcfmt(snaptotal[depth], totalsize)
3137 )
3150 )
3138 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3151 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3139
3152
3140 def fmtchunktype(chunktype):
3153 def fmtchunktype(chunktype):
3141 if chunktype == b'empty':
3154 if chunktype == b'empty':
3142 return b' %s : ' % chunktype
3155 return b' %s : ' % chunktype
3143 elif chunktype in pycompat.bytestr(string.ascii_letters):
3156 elif chunktype in pycompat.bytestr(string.ascii_letters):
3144 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3157 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3145 else:
3158 else:
3146 return b' 0x%s : ' % hex(chunktype)
3159 return b' 0x%s : ' % hex(chunktype)
3147
3160
3148 ui.write(b'\n')
3161 ui.write(b'\n')
3149 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3162 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3150 for chunktype in sorted(chunktypecounts):
3163 for chunktype in sorted(chunktypecounts):
3151 ui.write(fmtchunktype(chunktype))
3164 ui.write(fmtchunktype(chunktype))
3152 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3165 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3153 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3166 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3154 for chunktype in sorted(chunktypecounts):
3167 for chunktype in sorted(chunktypecounts):
3155 ui.write(fmtchunktype(chunktype))
3168 ui.write(fmtchunktype(chunktype))
3156 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3169 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3157
3170
3158 ui.write(b'\n')
3171 ui.write(b'\n')
3159 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3172 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3160 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3173 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3161 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3174 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3162 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3175 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3163 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3176 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3164
3177
3165 if format > 0:
3178 if format > 0:
3166 ui.write(b'\n')
3179 ui.write(b'\n')
3167 ui.writenoi18n(
3180 ui.writenoi18n(
3168 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3181 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3169 % tuple(datasize)
3182 % tuple(datasize)
3170 )
3183 )
3171 ui.writenoi18n(
3184 ui.writenoi18n(
3172 b'full revision size (min/max/avg) : %d / %d / %d\n'
3185 b'full revision size (min/max/avg) : %d / %d / %d\n'
3173 % tuple(fullsize)
3186 % tuple(fullsize)
3174 )
3187 )
3175 ui.writenoi18n(
3188 ui.writenoi18n(
3176 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3189 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3177 % tuple(semisize)
3190 % tuple(semisize)
3178 )
3191 )
3179 for depth in sorted(snapsizedepth):
3192 for depth in sorted(snapsizedepth):
3180 if depth == 0:
3193 if depth == 0:
3181 continue
3194 continue
3182 ui.writenoi18n(
3195 ui.writenoi18n(
3183 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3196 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3184 % ((depth,) + tuple(snapsizedepth[depth]))
3197 % ((depth,) + tuple(snapsizedepth[depth]))
3185 )
3198 )
3186 ui.writenoi18n(
3199 ui.writenoi18n(
3187 b'delta size (min/max/avg) : %d / %d / %d\n'
3200 b'delta size (min/max/avg) : %d / %d / %d\n'
3188 % tuple(deltasize)
3201 % tuple(deltasize)
3189 )
3202 )
3190
3203
3191 if numdeltas > 0:
3204 if numdeltas > 0:
3192 ui.write(b'\n')
3205 ui.write(b'\n')
3193 fmt = pcfmtstr(numdeltas)
3206 fmt = pcfmtstr(numdeltas)
3194 fmt2 = pcfmtstr(numdeltas, 4)
3207 fmt2 = pcfmtstr(numdeltas, 4)
3195 ui.writenoi18n(
3208 ui.writenoi18n(
3196 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3209 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3197 )
3210 )
3198 if numprev > 0:
3211 if numprev > 0:
3199 ui.writenoi18n(
3212 ui.writenoi18n(
3200 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3213 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3201 )
3214 )
3202 ui.writenoi18n(
3215 ui.writenoi18n(
3203 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3216 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3204 )
3217 )
3205 ui.writenoi18n(
3218 ui.writenoi18n(
3206 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3219 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3207 )
3220 )
3208 if gdelta:
3221 if gdelta:
3209 ui.writenoi18n(
3222 ui.writenoi18n(
3210 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3223 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3211 )
3224 )
3212 ui.writenoi18n(
3225 ui.writenoi18n(
3213 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3226 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3214 )
3227 )
3215 ui.writenoi18n(
3228 ui.writenoi18n(
3216 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3229 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3217 )
3230 )
3218
3231
3219
3232
3220 @command(
3233 @command(
3221 b'debugrevlogindex',
3234 b'debugrevlogindex',
3222 cmdutil.debugrevlogopts
3235 cmdutil.debugrevlogopts
3223 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3224 _(b'[-f FORMAT] -c|-m|FILE'),
3237 _(b'[-f FORMAT] -c|-m|FILE'),
3225 optionalrepo=True,
3238 optionalrepo=True,
3226 )
3239 )
3227 def debugrevlogindex(ui, repo, file_=None, **opts):
3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3228 """dump the contents of a revlog index"""
3241 """dump the contents of a revlog index"""
3229 opts = pycompat.byteskwargs(opts)
3242 opts = pycompat.byteskwargs(opts)
3230 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3231 format = opts.get(b'format', 0)
3244 format = opts.get(b'format', 0)
3232 if format not in (0, 1):
3245 if format not in (0, 1):
3233 raise error.Abort(_(b"unknown format %d") % format)
3246 raise error.Abort(_(b"unknown format %d") % format)
3234
3247
3235 if ui.debugflag:
3248 if ui.debugflag:
3236 shortfn = hex
3249 shortfn = hex
3237 else:
3250 else:
3238 shortfn = short
3251 shortfn = short
3239
3252
3240 # There might not be anything in r, so have a sane default
3253 # There might not be anything in r, so have a sane default
3241 idlen = 12
3254 idlen = 12
3242 for i in r:
3255 for i in r:
3243 idlen = len(shortfn(r.node(i)))
3256 idlen = len(shortfn(r.node(i)))
3244 break
3257 break
3245
3258
3246 if format == 0:
3259 if format == 0:
3247 if ui.verbose:
3260 if ui.verbose:
3248 ui.writenoi18n(
3261 ui.writenoi18n(
3249 b" rev offset length linkrev %s %s p2\n"
3262 b" rev offset length linkrev %s %s p2\n"
3250 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3251 )
3264 )
3252 else:
3265 else:
3253 ui.writenoi18n(
3266 ui.writenoi18n(
3254 b" rev linkrev %s %s p2\n"
3267 b" rev linkrev %s %s p2\n"
3255 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3256 )
3269 )
3257 elif format == 1:
3270 elif format == 1:
3258 if ui.verbose:
3271 if ui.verbose:
3259 ui.writenoi18n(
3272 ui.writenoi18n(
3260 (
3273 (
3261 b" rev flag offset length size link p1"
3274 b" rev flag offset length size link p1"
3262 b" p2 %s\n"
3275 b" p2 %s\n"
3263 )
3276 )
3264 % b"nodeid".rjust(idlen)
3277 % b"nodeid".rjust(idlen)
3265 )
3278 )
3266 else:
3279 else:
3267 ui.writenoi18n(
3280 ui.writenoi18n(
3268 b" rev flag size link p1 p2 %s\n"
3281 b" rev flag size link p1 p2 %s\n"
3269 % b"nodeid".rjust(idlen)
3282 % b"nodeid".rjust(idlen)
3270 )
3283 )
3271
3284
3272 for i in r:
3285 for i in r:
3273 node = r.node(i)
3286 node = r.node(i)
3274 if format == 0:
3287 if format == 0:
3275 try:
3288 try:
3276 pp = r.parents(node)
3289 pp = r.parents(node)
3277 except Exception:
3290 except Exception:
3278 pp = [nullid, nullid]
3291 pp = [nullid, nullid]
3279 if ui.verbose:
3292 if ui.verbose:
3280 ui.write(
3293 ui.write(
3281 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3282 % (
3295 % (
3283 i,
3296 i,
3284 r.start(i),
3297 r.start(i),
3285 r.length(i),
3298 r.length(i),
3286 r.linkrev(i),
3299 r.linkrev(i),
3287 shortfn(node),
3300 shortfn(node),
3288 shortfn(pp[0]),
3301 shortfn(pp[0]),
3289 shortfn(pp[1]),
3302 shortfn(pp[1]),
3290 )
3303 )
3291 )
3304 )
3292 else:
3305 else:
3293 ui.write(
3306 ui.write(
3294 b"% 6d % 7d %s %s %s\n"
3307 b"% 6d % 7d %s %s %s\n"
3295 % (
3308 % (
3296 i,
3309 i,
3297 r.linkrev(i),
3310 r.linkrev(i),
3298 shortfn(node),
3311 shortfn(node),
3299 shortfn(pp[0]),
3312 shortfn(pp[0]),
3300 shortfn(pp[1]),
3313 shortfn(pp[1]),
3301 )
3314 )
3302 )
3315 )
3303 elif format == 1:
3316 elif format == 1:
3304 pr = r.parentrevs(i)
3317 pr = r.parentrevs(i)
3305 if ui.verbose:
3318 if ui.verbose:
3306 ui.write(
3319 ui.write(
3307 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3308 % (
3321 % (
3309 i,
3322 i,
3310 r.flags(i),
3323 r.flags(i),
3311 r.start(i),
3324 r.start(i),
3312 r.length(i),
3325 r.length(i),
3313 r.rawsize(i),
3326 r.rawsize(i),
3314 r.linkrev(i),
3327 r.linkrev(i),
3315 pr[0],
3328 pr[0],
3316 pr[1],
3329 pr[1],
3317 shortfn(node),
3330 shortfn(node),
3318 )
3331 )
3319 )
3332 )
3320 else:
3333 else:
3321 ui.write(
3334 ui.write(
3322 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3323 % (
3336 % (
3324 i,
3337 i,
3325 r.flags(i),
3338 r.flags(i),
3326 r.rawsize(i),
3339 r.rawsize(i),
3327 r.linkrev(i),
3340 r.linkrev(i),
3328 pr[0],
3341 pr[0],
3329 pr[1],
3342 pr[1],
3330 shortfn(node),
3343 shortfn(node),
3331 )
3344 )
3332 )
3345 )
3333
3346
3334
3347
3335 @command(
3348 @command(
3336 b'debugrevspec',
3349 b'debugrevspec',
3337 [
3350 [
3338 (
3351 (
3339 b'',
3352 b'',
3340 b'optimize',
3353 b'optimize',
3341 None,
3354 None,
3342 _(b'print parsed tree after optimizing (DEPRECATED)'),
3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3343 ),
3356 ),
3344 (
3357 (
3345 b'',
3358 b'',
3346 b'show-revs',
3359 b'show-revs',
3347 True,
3360 True,
3348 _(b'print list of result revisions (default)'),
3361 _(b'print list of result revisions (default)'),
3349 ),
3362 ),
3350 (
3363 (
3351 b's',
3364 b's',
3352 b'show-set',
3365 b'show-set',
3353 None,
3366 None,
3354 _(b'print internal representation of result set'),
3367 _(b'print internal representation of result set'),
3355 ),
3368 ),
3356 (
3369 (
3357 b'p',
3370 b'p',
3358 b'show-stage',
3371 b'show-stage',
3359 [],
3372 [],
3360 _(b'print parsed tree at the given stage'),
3373 _(b'print parsed tree at the given stage'),
3361 _(b'NAME'),
3374 _(b'NAME'),
3362 ),
3375 ),
3363 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3364 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3365 ],
3378 ],
3366 b'REVSPEC',
3379 b'REVSPEC',
3367 )
3380 )
3368 def debugrevspec(ui, repo, expr, **opts):
3381 def debugrevspec(ui, repo, expr, **opts):
3369 """parse and apply a revision specification
3382 """parse and apply a revision specification
3370
3383
3371 Use -p/--show-stage option to print the parsed tree at the given stages.
3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3372 Use -p all to print tree at every stage.
3385 Use -p all to print tree at every stage.
3373
3386
3374 Use --no-show-revs option with -s or -p to print only the set
3387 Use --no-show-revs option with -s or -p to print only the set
3375 representation or the parsed tree respectively.
3388 representation or the parsed tree respectively.
3376
3389
3377 Use --verify-optimized to compare the optimized result with the unoptimized
3390 Use --verify-optimized to compare the optimized result with the unoptimized
3378 one. Returns 1 if the optimized result differs.
3391 one. Returns 1 if the optimized result differs.
3379 """
3392 """
3380 opts = pycompat.byteskwargs(opts)
3393 opts = pycompat.byteskwargs(opts)
3381 aliases = ui.configitems(b'revsetalias')
3394 aliases = ui.configitems(b'revsetalias')
3382 stages = [
3395 stages = [
3383 (b'parsed', lambda tree: tree),
3396 (b'parsed', lambda tree: tree),
3384 (
3397 (
3385 b'expanded',
3398 b'expanded',
3386 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3387 ),
3400 ),
3388 (b'concatenated', revsetlang.foldconcat),
3401 (b'concatenated', revsetlang.foldconcat),
3389 (b'analyzed', revsetlang.analyze),
3402 (b'analyzed', revsetlang.analyze),
3390 (b'optimized', revsetlang.optimize),
3403 (b'optimized', revsetlang.optimize),
3391 ]
3404 ]
3392 if opts[b'no_optimized']:
3405 if opts[b'no_optimized']:
3393 stages = stages[:-1]
3406 stages = stages[:-1]
3394 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3395 raise error.Abort(
3408 raise error.Abort(
3396 _(b'cannot use --verify-optimized with --no-optimized')
3409 _(b'cannot use --verify-optimized with --no-optimized')
3397 )
3410 )
3398 stagenames = {n for n, f in stages}
3411 stagenames = {n for n, f in stages}
3399
3412
3400 showalways = set()
3413 showalways = set()
3401 showchanged = set()
3414 showchanged = set()
3402 if ui.verbose and not opts[b'show_stage']:
3415 if ui.verbose and not opts[b'show_stage']:
3403 # show parsed tree by --verbose (deprecated)
3416 # show parsed tree by --verbose (deprecated)
3404 showalways.add(b'parsed')
3417 showalways.add(b'parsed')
3405 showchanged.update([b'expanded', b'concatenated'])
3418 showchanged.update([b'expanded', b'concatenated'])
3406 if opts[b'optimize']:
3419 if opts[b'optimize']:
3407 showalways.add(b'optimized')
3420 showalways.add(b'optimized')
3408 if opts[b'show_stage'] and opts[b'optimize']:
3421 if opts[b'show_stage'] and opts[b'optimize']:
3409 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3410 if opts[b'show_stage'] == [b'all']:
3423 if opts[b'show_stage'] == [b'all']:
3411 showalways.update(stagenames)
3424 showalways.update(stagenames)
3412 else:
3425 else:
3413 for n in opts[b'show_stage']:
3426 for n in opts[b'show_stage']:
3414 if n not in stagenames:
3427 if n not in stagenames:
3415 raise error.Abort(_(b'invalid stage name: %s') % n)
3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3416 showalways.update(opts[b'show_stage'])
3429 showalways.update(opts[b'show_stage'])
3417
3430
3418 treebystage = {}
3431 treebystage = {}
3419 printedtree = None
3432 printedtree = None
3420 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3421 for n, f in stages:
3434 for n, f in stages:
3422 treebystage[n] = tree = f(tree)
3435 treebystage[n] = tree = f(tree)
3423 if n in showalways or (n in showchanged and tree != printedtree):
3436 if n in showalways or (n in showchanged and tree != printedtree):
3424 if opts[b'show_stage'] or n != b'parsed':
3437 if opts[b'show_stage'] or n != b'parsed':
3425 ui.write(b"* %s:\n" % n)
3438 ui.write(b"* %s:\n" % n)
3426 ui.write(revsetlang.prettyformat(tree), b"\n")
3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3427 printedtree = tree
3440 printedtree = tree
3428
3441
3429 if opts[b'verify_optimized']:
3442 if opts[b'verify_optimized']:
3430 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3431 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3432 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3433 ui.writenoi18n(
3446 ui.writenoi18n(
3434 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3435 )
3448 )
3436 ui.writenoi18n(
3449 ui.writenoi18n(
3437 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3438 )
3451 )
3439 arevs = list(arevs)
3452 arevs = list(arevs)
3440 brevs = list(brevs)
3453 brevs = list(brevs)
3441 if arevs == brevs:
3454 if arevs == brevs:
3442 return 0
3455 return 0
3443 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3444 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3445 sm = difflib.SequenceMatcher(None, arevs, brevs)
3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3446 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3447 if tag in ('delete', 'replace'):
3460 if tag in ('delete', 'replace'):
3448 for c in arevs[alo:ahi]:
3461 for c in arevs[alo:ahi]:
3449 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3450 if tag in ('insert', 'replace'):
3463 if tag in ('insert', 'replace'):
3451 for c in brevs[blo:bhi]:
3464 for c in brevs[blo:bhi]:
3452 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3453 if tag == 'equal':
3466 if tag == 'equal':
3454 for c in arevs[alo:ahi]:
3467 for c in arevs[alo:ahi]:
3455 ui.write(b' %d\n' % c)
3468 ui.write(b' %d\n' % c)
3456 return 1
3469 return 1
3457
3470
3458 func = revset.makematcher(tree)
3471 func = revset.makematcher(tree)
3459 revs = func(repo)
3472 revs = func(repo)
3460 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3461 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3462 if not opts[b'show_revs']:
3475 if not opts[b'show_revs']:
3463 return
3476 return
3464 for c in revs:
3477 for c in revs:
3465 ui.write(b"%d\n" % c)
3478 ui.write(b"%d\n" % c)
3466
3479
3467
3480
3468 @command(
3481 @command(
3469 b'debugserve',
3482 b'debugserve',
3470 [
3483 [
3471 (
3484 (
3472 b'',
3485 b'',
3473 b'sshstdio',
3486 b'sshstdio',
3474 False,
3487 False,
3475 _(b'run an SSH server bound to process handles'),
3488 _(b'run an SSH server bound to process handles'),
3476 ),
3489 ),
3477 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3478 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3479 ],
3492 ],
3480 b'',
3493 b'',
3481 )
3494 )
3482 def debugserve(ui, repo, **opts):
3495 def debugserve(ui, repo, **opts):
3483 """run a server with advanced settings
3496 """run a server with advanced settings
3484
3497
3485 This command is similar to :hg:`serve`. It exists partially as a
3498 This command is similar to :hg:`serve`. It exists partially as a
3486 workaround to the fact that ``hg serve --stdio`` must have specific
3499 workaround to the fact that ``hg serve --stdio`` must have specific
3487 arguments for security reasons.
3500 arguments for security reasons.
3488 """
3501 """
3489 opts = pycompat.byteskwargs(opts)
3502 opts = pycompat.byteskwargs(opts)
3490
3503
3491 if not opts[b'sshstdio']:
3504 if not opts[b'sshstdio']:
3492 raise error.Abort(_(b'only --sshstdio is currently supported'))
3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3493
3506
3494 logfh = None
3507 logfh = None
3495
3508
3496 if opts[b'logiofd'] and opts[b'logiofile']:
3509 if opts[b'logiofd'] and opts[b'logiofile']:
3497 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3498
3511
3499 if opts[b'logiofd']:
3512 if opts[b'logiofd']:
3500 # Ideally we would be line buffered. But line buffering in binary
3513 # Ideally we would be line buffered. But line buffering in binary
3501 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3502 # buffering could have performance impacts. But since this isn't
3515 # buffering could have performance impacts. But since this isn't
3503 # performance critical code, it should be fine.
3516 # performance critical code, it should be fine.
3504 try:
3517 try:
3505 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3506 except OSError as e:
3519 except OSError as e:
3507 if e.errno != errno.ESPIPE:
3520 if e.errno != errno.ESPIPE:
3508 raise
3521 raise
3509 # can't seek a pipe, so `ab` mode fails on py3
3522 # can't seek a pipe, so `ab` mode fails on py3
3510 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3511 elif opts[b'logiofile']:
3524 elif opts[b'logiofile']:
3512 logfh = open(opts[b'logiofile'], b'ab', 0)
3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3513
3526
3514 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3515 s.serve_forever()
3528 s.serve_forever()
3516
3529
3517
3530
3518 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3519 def debugsetparents(ui, repo, rev1, rev2=None):
3532 def debugsetparents(ui, repo, rev1, rev2=None):
3520 """manually set the parents of the current working directory (DANGEROUS)
3533 """manually set the parents of the current working directory (DANGEROUS)
3521
3534
3522 This command is not what you are looking for and should not be used. Using
3535 This command is not what you are looking for and should not be used. Using
3523 this command will most certainly results in slight corruption of the file
3536 this command will most certainly results in slight corruption of the file
3524 level histories withing your repository. DO NOT USE THIS COMMAND.
3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3525
3538
3526 The command update the p1 and p2 field in the dirstate, and not touching
3539 The command update the p1 and p2 field in the dirstate, and not touching
3527 anything else. This useful for writing repository conversion tools, but
3540 anything else. This useful for writing repository conversion tools, but
3528 should be used with extreme care. For example, neither the working
3541 should be used with extreme care. For example, neither the working
3529 directory nor the dirstate is updated, so file status may be incorrect
3542 directory nor the dirstate is updated, so file status may be incorrect
3530 after running this command. Only used if you are one of the few people that
3543 after running this command. Only used if you are one of the few people that
3531 deeply unstand both conversion tools and file level histories. If you are
3544 deeply unstand both conversion tools and file level histories. If you are
3532 reading this help, you are not one of this people (most of them sailed west
3545 reading this help, you are not one of this people (most of them sailed west
3533 from Mithlond anyway.
3546 from Mithlond anyway.
3534
3547
3535 So one last time DO NOT USE THIS COMMAND.
3548 So one last time DO NOT USE THIS COMMAND.
3536
3549
3537 Returns 0 on success.
3550 Returns 0 on success.
3538 """
3551 """
3539
3552
3540 node1 = scmutil.revsingle(repo, rev1).node()
3553 node1 = scmutil.revsingle(repo, rev1).node()
3541 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3542
3555
3543 with repo.wlock():
3556 with repo.wlock():
3544 repo.setparents(node1, node2)
3557 repo.setparents(node1, node2)
3545
3558
3546
3559
3547 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3548 def debugsidedata(ui, repo, file_, rev=None, **opts):
3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3549 """dump the side data for a cl/manifest/file revision
3562 """dump the side data for a cl/manifest/file revision
3550
3563
3551 Use --verbose to dump the sidedata content."""
3564 Use --verbose to dump the sidedata content."""
3552 opts = pycompat.byteskwargs(opts)
3565 opts = pycompat.byteskwargs(opts)
3553 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3554 if rev is not None:
3567 if rev is not None:
3555 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3568 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3556 file_, rev = None, file_
3569 file_, rev = None, file_
3557 elif rev is None:
3570 elif rev is None:
3558 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3571 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3559 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3572 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3560 r = getattr(r, '_revlog', r)
3573 r = getattr(r, '_revlog', r)
3561 try:
3574 try:
3562 sidedata = r.sidedata(r.lookup(rev))
3575 sidedata = r.sidedata(r.lookup(rev))
3563 except KeyError:
3576 except KeyError:
3564 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3577 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3565 if sidedata:
3578 if sidedata:
3566 sidedata = list(sidedata.items())
3579 sidedata = list(sidedata.items())
3567 sidedata.sort()
3580 sidedata.sort()
3568 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3581 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3569 for key, value in sidedata:
3582 for key, value in sidedata:
3570 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3583 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3571 if ui.verbose:
3584 if ui.verbose:
3572 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3585 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3573
3586
3574
3587
3575 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3588 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3576 def debugssl(ui, repo, source=None, **opts):
3589 def debugssl(ui, repo, source=None, **opts):
3577 """test a secure connection to a server
3590 """test a secure connection to a server
3578
3591
3579 This builds the certificate chain for the server on Windows, installing the
3592 This builds the certificate chain for the server on Windows, installing the
3580 missing intermediates and trusted root via Windows Update if necessary. It
3593 missing intermediates and trusted root via Windows Update if necessary. It
3581 does nothing on other platforms.
3594 does nothing on other platforms.
3582
3595
3583 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3596 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3584 that server is used. See :hg:`help urls` for more information.
3597 that server is used. See :hg:`help urls` for more information.
3585
3598
3586 If the update succeeds, retry the original operation. Otherwise, the cause
3599 If the update succeeds, retry the original operation. Otherwise, the cause
3587 of the SSL error is likely another issue.
3600 of the SSL error is likely another issue.
3588 """
3601 """
3589 if not pycompat.iswindows:
3602 if not pycompat.iswindows:
3590 raise error.Abort(
3603 raise error.Abort(
3591 _(b'certificate chain building is only possible on Windows')
3604 _(b'certificate chain building is only possible on Windows')
3592 )
3605 )
3593
3606
3594 if not source:
3607 if not source:
3595 if not repo:
3608 if not repo:
3596 raise error.Abort(
3609 raise error.Abort(
3597 _(
3610 _(
3598 b"there is no Mercurial repository here, and no "
3611 b"there is no Mercurial repository here, and no "
3599 b"server specified"
3612 b"server specified"
3600 )
3613 )
3601 )
3614 )
3602 source = b"default"
3615 source = b"default"
3603
3616
3604 source, branches = hg.parseurl(ui.expandpath(source))
3617 source, branches = hg.parseurl(ui.expandpath(source))
3605 url = util.url(source)
3618 url = util.url(source)
3606
3619
3607 defaultport = {b'https': 443, b'ssh': 22}
3620 defaultport = {b'https': 443, b'ssh': 22}
3608 if url.scheme in defaultport:
3621 if url.scheme in defaultport:
3609 try:
3622 try:
3610 addr = (url.host, int(url.port or defaultport[url.scheme]))
3623 addr = (url.host, int(url.port or defaultport[url.scheme]))
3611 except ValueError:
3624 except ValueError:
3612 raise error.Abort(_(b"malformed port number in URL"))
3625 raise error.Abort(_(b"malformed port number in URL"))
3613 else:
3626 else:
3614 raise error.Abort(_(b"only https and ssh connections are supported"))
3627 raise error.Abort(_(b"only https and ssh connections are supported"))
3615
3628
3616 from . import win32
3629 from . import win32
3617
3630
3618 s = ssl.wrap_socket(
3631 s = ssl.wrap_socket(
3619 socket.socket(),
3632 socket.socket(),
3620 ssl_version=ssl.PROTOCOL_TLS,
3633 ssl_version=ssl.PROTOCOL_TLS,
3621 cert_reqs=ssl.CERT_NONE,
3634 cert_reqs=ssl.CERT_NONE,
3622 ca_certs=None,
3635 ca_certs=None,
3623 )
3636 )
3624
3637
3625 try:
3638 try:
3626 s.connect(addr)
3639 s.connect(addr)
3627 cert = s.getpeercert(True)
3640 cert = s.getpeercert(True)
3628
3641
3629 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3642 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3630
3643
3631 complete = win32.checkcertificatechain(cert, build=False)
3644 complete = win32.checkcertificatechain(cert, build=False)
3632
3645
3633 if not complete:
3646 if not complete:
3634 ui.status(_(b'certificate chain is incomplete, updating... '))
3647 ui.status(_(b'certificate chain is incomplete, updating... '))
3635
3648
3636 if not win32.checkcertificatechain(cert):
3649 if not win32.checkcertificatechain(cert):
3637 ui.status(_(b'failed.\n'))
3650 ui.status(_(b'failed.\n'))
3638 else:
3651 else:
3639 ui.status(_(b'done.\n'))
3652 ui.status(_(b'done.\n'))
3640 else:
3653 else:
3641 ui.status(_(b'full certificate chain is available\n'))
3654 ui.status(_(b'full certificate chain is available\n'))
3642 finally:
3655 finally:
3643 s.close()
3656 s.close()
3644
3657
3645
3658
3646 @command(
3659 @command(
3647 b"debugbackupbundle",
3660 b"debugbackupbundle",
3648 [
3661 [
3649 (
3662 (
3650 b"",
3663 b"",
3651 b"recover",
3664 b"recover",
3652 b"",
3665 b"",
3653 b"brings the specified changeset back into the repository",
3666 b"brings the specified changeset back into the repository",
3654 )
3667 )
3655 ]
3668 ]
3656 + cmdutil.logopts,
3669 + cmdutil.logopts,
3657 _(b"hg debugbackupbundle [--recover HASH]"),
3670 _(b"hg debugbackupbundle [--recover HASH]"),
3658 )
3671 )
3659 def debugbackupbundle(ui, repo, *pats, **opts):
3672 def debugbackupbundle(ui, repo, *pats, **opts):
3660 """lists the changesets available in backup bundles
3673 """lists the changesets available in backup bundles
3661
3674
3662 Without any arguments, this command prints a list of the changesets in each
3675 Without any arguments, this command prints a list of the changesets in each
3663 backup bundle.
3676 backup bundle.
3664
3677
3665 --recover takes a changeset hash and unbundles the first bundle that
3678 --recover takes a changeset hash and unbundles the first bundle that
3666 contains that hash, which puts that changeset back in your repository.
3679 contains that hash, which puts that changeset back in your repository.
3667
3680
3668 --verbose will print the entire commit message and the bundle path for that
3681 --verbose will print the entire commit message and the bundle path for that
3669 backup.
3682 backup.
3670 """
3683 """
3671 backups = list(
3684 backups = list(
3672 filter(
3685 filter(
3673 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3686 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3674 )
3687 )
3675 )
3688 )
3676 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3689 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3677
3690
3678 opts = pycompat.byteskwargs(opts)
3691 opts = pycompat.byteskwargs(opts)
3679 opts[b"bundle"] = b""
3692 opts[b"bundle"] = b""
3680 opts[b"force"] = None
3693 opts[b"force"] = None
3681 limit = logcmdutil.getlimit(opts)
3694 limit = logcmdutil.getlimit(opts)
3682
3695
3683 def display(other, chlist, displayer):
3696 def display(other, chlist, displayer):
3684 if opts.get(b"newest_first"):
3697 if opts.get(b"newest_first"):
3685 chlist.reverse()
3698 chlist.reverse()
3686 count = 0
3699 count = 0
3687 for n in chlist:
3700 for n in chlist:
3688 if limit is not None and count >= limit:
3701 if limit is not None and count >= limit:
3689 break
3702 break
3690 parents = [True for p in other.changelog.parents(n) if p != nullid]
3703 parents = [True for p in other.changelog.parents(n) if p != nullid]
3691 if opts.get(b"no_merges") and len(parents) == 2:
3704 if opts.get(b"no_merges") and len(parents) == 2:
3692 continue
3705 continue
3693 count += 1
3706 count += 1
3694 displayer.show(other[n])
3707 displayer.show(other[n])
3695
3708
3696 recovernode = opts.get(b"recover")
3709 recovernode = opts.get(b"recover")
3697 if recovernode:
3710 if recovernode:
3698 if scmutil.isrevsymbol(repo, recovernode):
3711 if scmutil.isrevsymbol(repo, recovernode):
3699 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3712 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3700 return
3713 return
3701 elif backups:
3714 elif backups:
3702 msg = _(
3715 msg = _(
3703 b"Recover changesets using: hg debugbackupbundle --recover "
3716 b"Recover changesets using: hg debugbackupbundle --recover "
3704 b"<changeset hash>\n\nAvailable backup changesets:"
3717 b"<changeset hash>\n\nAvailable backup changesets:"
3705 )
3718 )
3706 ui.status(msg, label=b"status.removed")
3719 ui.status(msg, label=b"status.removed")
3707 else:
3720 else:
3708 ui.status(_(b"no backup changesets found\n"))
3721 ui.status(_(b"no backup changesets found\n"))
3709 return
3722 return
3710
3723
3711 for backup in backups:
3724 for backup in backups:
3712 # Much of this is copied from the hg incoming logic
3725 # Much of this is copied from the hg incoming logic
3713 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3726 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3714 source, branches = hg.parseurl(source, opts.get(b"branch"))
3727 source, branches = hg.parseurl(source, opts.get(b"branch"))
3715 try:
3728 try:
3716 other = hg.peer(repo, opts, source)
3729 other = hg.peer(repo, opts, source)
3717 except error.LookupError as ex:
3730 except error.LookupError as ex:
3718 msg = _(b"\nwarning: unable to open bundle %s") % source
3731 msg = _(b"\nwarning: unable to open bundle %s") % source
3719 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3732 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3720 ui.warn(msg, hint=hint)
3733 ui.warn(msg, hint=hint)
3721 continue
3734 continue
3722 revs, checkout = hg.addbranchrevs(
3735 revs, checkout = hg.addbranchrevs(
3723 repo, other, branches, opts.get(b"rev")
3736 repo, other, branches, opts.get(b"rev")
3724 )
3737 )
3725
3738
3726 if revs:
3739 if revs:
3727 revs = [other.lookup(rev) for rev in revs]
3740 revs = [other.lookup(rev) for rev in revs]
3728
3741
3729 quiet = ui.quiet
3742 quiet = ui.quiet
3730 try:
3743 try:
3731 ui.quiet = True
3744 ui.quiet = True
3732 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3745 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3733 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3746 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3734 )
3747 )
3735 except error.LookupError:
3748 except error.LookupError:
3736 continue
3749 continue
3737 finally:
3750 finally:
3738 ui.quiet = quiet
3751 ui.quiet = quiet
3739
3752
3740 try:
3753 try:
3741 if not chlist:
3754 if not chlist:
3742 continue
3755 continue
3743 if recovernode:
3756 if recovernode:
3744 with repo.lock(), repo.transaction(b"unbundle") as tr:
3757 with repo.lock(), repo.transaction(b"unbundle") as tr:
3745 if scmutil.isrevsymbol(other, recovernode):
3758 if scmutil.isrevsymbol(other, recovernode):
3746 ui.status(_(b"Unbundling %s\n") % (recovernode))
3759 ui.status(_(b"Unbundling %s\n") % (recovernode))
3747 f = hg.openpath(ui, source)
3760 f = hg.openpath(ui, source)
3748 gen = exchange.readbundle(ui, f, source)
3761 gen = exchange.readbundle(ui, f, source)
3749 if isinstance(gen, bundle2.unbundle20):
3762 if isinstance(gen, bundle2.unbundle20):
3750 bundle2.applybundle(
3763 bundle2.applybundle(
3751 repo,
3764 repo,
3752 gen,
3765 gen,
3753 tr,
3766 tr,
3754 source=b"unbundle",
3767 source=b"unbundle",
3755 url=b"bundle:" + source,
3768 url=b"bundle:" + source,
3756 )
3769 )
3757 else:
3770 else:
3758 gen.apply(repo, b"unbundle", b"bundle:" + source)
3771 gen.apply(repo, b"unbundle", b"bundle:" + source)
3759 break
3772 break
3760 else:
3773 else:
3761 backupdate = encoding.strtolocal(
3774 backupdate = encoding.strtolocal(
3762 time.strftime(
3775 time.strftime(
3763 "%a %H:%M, %Y-%m-%d",
3776 "%a %H:%M, %Y-%m-%d",
3764 time.localtime(os.path.getmtime(source)),
3777 time.localtime(os.path.getmtime(source)),
3765 )
3778 )
3766 )
3779 )
3767 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3780 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3768 if ui.verbose:
3781 if ui.verbose:
3769 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3782 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3770 else:
3783 else:
3771 opts[
3784 opts[
3772 b"template"
3785 b"template"
3773 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3786 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3774 displayer = logcmdutil.changesetdisplayer(
3787 displayer = logcmdutil.changesetdisplayer(
3775 ui, other, opts, False
3788 ui, other, opts, False
3776 )
3789 )
3777 display(other, chlist, displayer)
3790 display(other, chlist, displayer)
3778 displayer.close()
3791 displayer.close()
3779 finally:
3792 finally:
3780 cleanupfn()
3793 cleanupfn()
3781
3794
3782
3795
3783 @command(
3796 @command(
3784 b'debugsub',
3797 b'debugsub',
3785 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3798 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3786 _(b'[-r REV] [REV]'),
3799 _(b'[-r REV] [REV]'),
3787 )
3800 )
3788 def debugsub(ui, repo, rev=None):
3801 def debugsub(ui, repo, rev=None):
3789 ctx = scmutil.revsingle(repo, rev, None)
3802 ctx = scmutil.revsingle(repo, rev, None)
3790 for k, v in sorted(ctx.substate.items()):
3803 for k, v in sorted(ctx.substate.items()):
3791 ui.writenoi18n(b'path %s\n' % k)
3804 ui.writenoi18n(b'path %s\n' % k)
3792 ui.writenoi18n(b' source %s\n' % v[0])
3805 ui.writenoi18n(b' source %s\n' % v[0])
3793 ui.writenoi18n(b' revision %s\n' % v[1])
3806 ui.writenoi18n(b' revision %s\n' % v[1])
3794
3807
3795
3808
3796 @command(b'debugshell', optionalrepo=True)
3809 @command(b'debugshell', optionalrepo=True)
3797 def debugshell(ui, repo):
3810 def debugshell(ui, repo):
3798 """run an interactive Python interpreter
3811 """run an interactive Python interpreter
3799
3812
3800 The local namespace is provided with a reference to the ui and
3813 The local namespace is provided with a reference to the ui and
3801 the repo instance (if available).
3814 the repo instance (if available).
3802 """
3815 """
3803 import code
3816 import code
3804
3817
3805 imported_objects = {
3818 imported_objects = {
3806 'ui': ui,
3819 'ui': ui,
3807 'repo': repo,
3820 'repo': repo,
3808 }
3821 }
3809
3822
3810 code.interact(local=imported_objects)
3823 code.interact(local=imported_objects)
3811
3824
3812
3825
3813 @command(
3826 @command(
3814 b'debugsuccessorssets',
3827 b'debugsuccessorssets',
3815 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3828 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3816 _(b'[REV]'),
3829 _(b'[REV]'),
3817 )
3830 )
3818 def debugsuccessorssets(ui, repo, *revs, **opts):
3831 def debugsuccessorssets(ui, repo, *revs, **opts):
3819 """show set of successors for revision
3832 """show set of successors for revision
3820
3833
3821 A successors set of changeset A is a consistent group of revisions that
3834 A successors set of changeset A is a consistent group of revisions that
3822 succeed A. It contains non-obsolete changesets only unless closests
3835 succeed A. It contains non-obsolete changesets only unless closests
3823 successors set is set.
3836 successors set is set.
3824
3837
3825 In most cases a changeset A has a single successors set containing a single
3838 In most cases a changeset A has a single successors set containing a single
3826 successor (changeset A replaced by A').
3839 successor (changeset A replaced by A').
3827
3840
3828 A changeset that is made obsolete with no successors are called "pruned".
3841 A changeset that is made obsolete with no successors are called "pruned".
3829 Such changesets have no successors sets at all.
3842 Such changesets have no successors sets at all.
3830
3843
3831 A changeset that has been "split" will have a successors set containing
3844 A changeset that has been "split" will have a successors set containing
3832 more than one successor.
3845 more than one successor.
3833
3846
3834 A changeset that has been rewritten in multiple different ways is called
3847 A changeset that has been rewritten in multiple different ways is called
3835 "divergent". Such changesets have multiple successor sets (each of which
3848 "divergent". Such changesets have multiple successor sets (each of which
3836 may also be split, i.e. have multiple successors).
3849 may also be split, i.e. have multiple successors).
3837
3850
3838 Results are displayed as follows::
3851 Results are displayed as follows::
3839
3852
3840 <rev1>
3853 <rev1>
3841 <successors-1A>
3854 <successors-1A>
3842 <rev2>
3855 <rev2>
3843 <successors-2A>
3856 <successors-2A>
3844 <successors-2B1> <successors-2B2> <successors-2B3>
3857 <successors-2B1> <successors-2B2> <successors-2B3>
3845
3858
3846 Here rev2 has two possible (i.e. divergent) successors sets. The first
3859 Here rev2 has two possible (i.e. divergent) successors sets. The first
3847 holds one element, whereas the second holds three (i.e. the changeset has
3860 holds one element, whereas the second holds three (i.e. the changeset has
3848 been split).
3861 been split).
3849 """
3862 """
3850 # passed to successorssets caching computation from one call to another
3863 # passed to successorssets caching computation from one call to another
3851 cache = {}
3864 cache = {}
3852 ctx2str = bytes
3865 ctx2str = bytes
3853 node2str = short
3866 node2str = short
3854 for rev in scmutil.revrange(repo, revs):
3867 for rev in scmutil.revrange(repo, revs):
3855 ctx = repo[rev]
3868 ctx = repo[rev]
3856 ui.write(b'%s\n' % ctx2str(ctx))
3869 ui.write(b'%s\n' % ctx2str(ctx))
3857 for succsset in obsutil.successorssets(
3870 for succsset in obsutil.successorssets(
3858 repo, ctx.node(), closest=opts['closest'], cache=cache
3871 repo, ctx.node(), closest=opts['closest'], cache=cache
3859 ):
3872 ):
3860 if succsset:
3873 if succsset:
3861 ui.write(b' ')
3874 ui.write(b' ')
3862 ui.write(node2str(succsset[0]))
3875 ui.write(node2str(succsset[0]))
3863 for node in succsset[1:]:
3876 for node in succsset[1:]:
3864 ui.write(b' ')
3877 ui.write(b' ')
3865 ui.write(node2str(node))
3878 ui.write(node2str(node))
3866 ui.write(b'\n')
3879 ui.write(b'\n')
3867
3880
3868
3881
3869 @command(b'debugtagscache', [])
3882 @command(b'debugtagscache', [])
3870 def debugtagscache(ui, repo):
3883 def debugtagscache(ui, repo):
3871 """display the contents of .hg/cache/hgtagsfnodes1"""
3884 """display the contents of .hg/cache/hgtagsfnodes1"""
3872 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3885 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3873 flog = repo.file(b'.hgtags')
3886 flog = repo.file(b'.hgtags')
3874 for r in repo:
3887 for r in repo:
3875 node = repo[r].node()
3888 node = repo[r].node()
3876 tagsnode = cache.getfnode(node, computemissing=False)
3889 tagsnode = cache.getfnode(node, computemissing=False)
3877 if tagsnode:
3890 if tagsnode:
3878 tagsnodedisplay = hex(tagsnode)
3891 tagsnodedisplay = hex(tagsnode)
3879 if not flog.hasnode(tagsnode):
3892 if not flog.hasnode(tagsnode):
3880 tagsnodedisplay += b' (unknown node)'
3893 tagsnodedisplay += b' (unknown node)'
3881 elif tagsnode is None:
3894 elif tagsnode is None:
3882 tagsnodedisplay = b'missing'
3895 tagsnodedisplay = b'missing'
3883 else:
3896 else:
3884 tagsnodedisplay = b'invalid'
3897 tagsnodedisplay = b'invalid'
3885
3898
3886 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3899 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3887
3900
3888
3901
3889 @command(
3902 @command(
3890 b'debugtemplate',
3903 b'debugtemplate',
3891 [
3904 [
3892 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3905 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3893 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3906 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3894 ],
3907 ],
3895 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3908 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3896 optionalrepo=True,
3909 optionalrepo=True,
3897 )
3910 )
3898 def debugtemplate(ui, repo, tmpl, **opts):
3911 def debugtemplate(ui, repo, tmpl, **opts):
3899 """parse and apply a template
3912 """parse and apply a template
3900
3913
3901 If -r/--rev is given, the template is processed as a log template and
3914 If -r/--rev is given, the template is processed as a log template and
3902 applied to the given changesets. Otherwise, it is processed as a generic
3915 applied to the given changesets. Otherwise, it is processed as a generic
3903 template.
3916 template.
3904
3917
3905 Use --verbose to print the parsed tree.
3918 Use --verbose to print the parsed tree.
3906 """
3919 """
3907 revs = None
3920 revs = None
3908 if opts['rev']:
3921 if opts['rev']:
3909 if repo is None:
3922 if repo is None:
3910 raise error.RepoError(
3923 raise error.RepoError(
3911 _(b'there is no Mercurial repository here (.hg not found)')
3924 _(b'there is no Mercurial repository here (.hg not found)')
3912 )
3925 )
3913 revs = scmutil.revrange(repo, opts['rev'])
3926 revs = scmutil.revrange(repo, opts['rev'])
3914
3927
3915 props = {}
3928 props = {}
3916 for d in opts['define']:
3929 for d in opts['define']:
3917 try:
3930 try:
3918 k, v = (e.strip() for e in d.split(b'=', 1))
3931 k, v = (e.strip() for e in d.split(b'=', 1))
3919 if not k or k == b'ui':
3932 if not k or k == b'ui':
3920 raise ValueError
3933 raise ValueError
3921 props[k] = v
3934 props[k] = v
3922 except ValueError:
3935 except ValueError:
3923 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3936 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3924
3937
3925 if ui.verbose:
3938 if ui.verbose:
3926 aliases = ui.configitems(b'templatealias')
3939 aliases = ui.configitems(b'templatealias')
3927 tree = templater.parse(tmpl)
3940 tree = templater.parse(tmpl)
3928 ui.note(templater.prettyformat(tree), b'\n')
3941 ui.note(templater.prettyformat(tree), b'\n')
3929 newtree = templater.expandaliases(tree, aliases)
3942 newtree = templater.expandaliases(tree, aliases)
3930 if newtree != tree:
3943 if newtree != tree:
3931 ui.notenoi18n(
3944 ui.notenoi18n(
3932 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3945 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3933 )
3946 )
3934
3947
3935 if revs is None:
3948 if revs is None:
3936 tres = formatter.templateresources(ui, repo)
3949 tres = formatter.templateresources(ui, repo)
3937 t = formatter.maketemplater(ui, tmpl, resources=tres)
3950 t = formatter.maketemplater(ui, tmpl, resources=tres)
3938 if ui.verbose:
3951 if ui.verbose:
3939 kwds, funcs = t.symbolsuseddefault()
3952 kwds, funcs = t.symbolsuseddefault()
3940 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3953 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3941 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3954 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3942 ui.write(t.renderdefault(props))
3955 ui.write(t.renderdefault(props))
3943 else:
3956 else:
3944 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3957 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3945 if ui.verbose:
3958 if ui.verbose:
3946 kwds, funcs = displayer.t.symbolsuseddefault()
3959 kwds, funcs = displayer.t.symbolsuseddefault()
3947 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3960 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3948 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3961 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3949 for r in revs:
3962 for r in revs:
3950 displayer.show(repo[r], **pycompat.strkwargs(props))
3963 displayer.show(repo[r], **pycompat.strkwargs(props))
3951 displayer.close()
3964 displayer.close()
3952
3965
3953
3966
3954 @command(
3967 @command(
3955 b'debuguigetpass',
3968 b'debuguigetpass',
3956 [
3969 [
3957 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3970 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3958 ],
3971 ],
3959 _(b'[-p TEXT]'),
3972 _(b'[-p TEXT]'),
3960 norepo=True,
3973 norepo=True,
3961 )
3974 )
3962 def debuguigetpass(ui, prompt=b''):
3975 def debuguigetpass(ui, prompt=b''):
3963 """show prompt to type password"""
3976 """show prompt to type password"""
3964 r = ui.getpass(prompt)
3977 r = ui.getpass(prompt)
3965 if r is None:
3978 if r is None:
3966 r = b"<default response>"
3979 r = b"<default response>"
3967 ui.writenoi18n(b'response: %s\n' % r)
3980 ui.writenoi18n(b'response: %s\n' % r)
3968
3981
3969
3982
3970 @command(
3983 @command(
3971 b'debuguiprompt',
3984 b'debuguiprompt',
3972 [
3985 [
3973 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3986 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3974 ],
3987 ],
3975 _(b'[-p TEXT]'),
3988 _(b'[-p TEXT]'),
3976 norepo=True,
3989 norepo=True,
3977 )
3990 )
3978 def debuguiprompt(ui, prompt=b''):
3991 def debuguiprompt(ui, prompt=b''):
3979 """show plain prompt"""
3992 """show plain prompt"""
3980 r = ui.prompt(prompt)
3993 r = ui.prompt(prompt)
3981 ui.writenoi18n(b'response: %s\n' % r)
3994 ui.writenoi18n(b'response: %s\n' % r)
3982
3995
3983
3996
3984 @command(b'debugupdatecaches', [])
3997 @command(b'debugupdatecaches', [])
3985 def debugupdatecaches(ui, repo, *pats, **opts):
3998 def debugupdatecaches(ui, repo, *pats, **opts):
3986 """warm all known caches in the repository"""
3999 """warm all known caches in the repository"""
3987 with repo.wlock(), repo.lock():
4000 with repo.wlock(), repo.lock():
3988 repo.updatecaches(full=True)
4001 repo.updatecaches(full=True)
3989
4002
3990
4003
3991 @command(
4004 @command(
3992 b'debugupgraderepo',
4005 b'debugupgraderepo',
3993 [
4006 [
3994 (
4007 (
3995 b'o',
4008 b'o',
3996 b'optimize',
4009 b'optimize',
3997 [],
4010 [],
3998 _(b'extra optimization to perform'),
4011 _(b'extra optimization to perform'),
3999 _(b'NAME'),
4012 _(b'NAME'),
4000 ),
4013 ),
4001 (b'', b'run', False, _(b'performs an upgrade')),
4014 (b'', b'run', False, _(b'performs an upgrade')),
4002 (b'', b'backup', True, _(b'keep the old repository content around')),
4015 (b'', b'backup', True, _(b'keep the old repository content around')),
4003 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4016 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4004 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4017 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4005 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4018 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4006 ],
4019 ],
4007 )
4020 )
4008 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4021 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4009 """upgrade a repository to use different features
4022 """upgrade a repository to use different features
4010
4023
4011 If no arguments are specified, the repository is evaluated for upgrade
4024 If no arguments are specified, the repository is evaluated for upgrade
4012 and a list of problems and potential optimizations is printed.
4025 and a list of problems and potential optimizations is printed.
4013
4026
4014 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4027 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4015 can be influenced via additional arguments. More details will be provided
4028 can be influenced via additional arguments. More details will be provided
4016 by the command output when run without ``--run``.
4029 by the command output when run without ``--run``.
4017
4030
4018 During the upgrade, the repository will be locked and no writes will be
4031 During the upgrade, the repository will be locked and no writes will be
4019 allowed.
4032 allowed.
4020
4033
4021 At the end of the upgrade, the repository may not be readable while new
4034 At the end of the upgrade, the repository may not be readable while new
4022 repository data is swapped in. This window will be as long as it takes to
4035 repository data is swapped in. This window will be as long as it takes to
4023 rename some directories inside the ``.hg`` directory. On most machines, this
4036 rename some directories inside the ``.hg`` directory. On most machines, this
4024 should complete almost instantaneously and the chances of a consumer being
4037 should complete almost instantaneously and the chances of a consumer being
4025 unable to access the repository should be low.
4038 unable to access the repository should be low.
4026
4039
4027 By default, all revlog will be upgraded. You can restrict this using flag
4040 By default, all revlog will be upgraded. You can restrict this using flag
4028 such as `--manifest`:
4041 such as `--manifest`:
4029
4042
4030 * `--manifest`: only optimize the manifest
4043 * `--manifest`: only optimize the manifest
4031 * `--no-manifest`: optimize all revlog but the manifest
4044 * `--no-manifest`: optimize all revlog but the manifest
4032 * `--changelog`: optimize the changelog only
4045 * `--changelog`: optimize the changelog only
4033 * `--no-changelog --no-manifest`: optimize filelogs only
4046 * `--no-changelog --no-manifest`: optimize filelogs only
4034 * `--filelogs`: optimize the filelogs only
4047 * `--filelogs`: optimize the filelogs only
4035 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4048 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4036 """
4049 """
4037 return upgrade.upgraderepo(
4050 return upgrade.upgraderepo(
4038 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4051 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4039 )
4052 )
4040
4053
4041
4054
4042 @command(
4055 @command(
4043 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4056 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4044 )
4057 )
4045 def debugwalk(ui, repo, *pats, **opts):
4058 def debugwalk(ui, repo, *pats, **opts):
4046 """show how files match on given patterns"""
4059 """show how files match on given patterns"""
4047 opts = pycompat.byteskwargs(opts)
4060 opts = pycompat.byteskwargs(opts)
4048 m = scmutil.match(repo[None], pats, opts)
4061 m = scmutil.match(repo[None], pats, opts)
4049 if ui.verbose:
4062 if ui.verbose:
4050 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4063 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4051 items = list(repo[None].walk(m))
4064 items = list(repo[None].walk(m))
4052 if not items:
4065 if not items:
4053 return
4066 return
4054 f = lambda fn: fn
4067 f = lambda fn: fn
4055 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4068 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4056 f = lambda fn: util.normpath(fn)
4069 f = lambda fn: util.normpath(fn)
4057 fmt = b'f %%-%ds %%-%ds %%s' % (
4070 fmt = b'f %%-%ds %%-%ds %%s' % (
4058 max([len(abs) for abs in items]),
4071 max([len(abs) for abs in items]),
4059 max([len(repo.pathto(abs)) for abs in items]),
4072 max([len(repo.pathto(abs)) for abs in items]),
4060 )
4073 )
4061 for abs in items:
4074 for abs in items:
4062 line = fmt % (
4075 line = fmt % (
4063 abs,
4076 abs,
4064 f(repo.pathto(abs)),
4077 f(repo.pathto(abs)),
4065 m.exact(abs) and b'exact' or b'',
4078 m.exact(abs) and b'exact' or b'',
4066 )
4079 )
4067 ui.write(b"%s\n" % line.rstrip())
4080 ui.write(b"%s\n" % line.rstrip())
4068
4081
4069
4082
4070 @command(b'debugwhyunstable', [], _(b'REV'))
4083 @command(b'debugwhyunstable', [], _(b'REV'))
4071 def debugwhyunstable(ui, repo, rev):
4084 def debugwhyunstable(ui, repo, rev):
4072 """explain instabilities of a changeset"""
4085 """explain instabilities of a changeset"""
4073 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4086 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4074 dnodes = b''
4087 dnodes = b''
4075 if entry.get(b'divergentnodes'):
4088 if entry.get(b'divergentnodes'):
4076 dnodes = (
4089 dnodes = (
4077 b' '.join(
4090 b' '.join(
4078 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4091 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4079 for ctx in entry[b'divergentnodes']
4092 for ctx in entry[b'divergentnodes']
4080 )
4093 )
4081 + b' '
4094 + b' '
4082 )
4095 )
4083 ui.write(
4096 ui.write(
4084 b'%s: %s%s %s\n'
4097 b'%s: %s%s %s\n'
4085 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4098 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4086 )
4099 )
4087
4100
4088
4101
4089 @command(
4102 @command(
4090 b'debugwireargs',
4103 b'debugwireargs',
4091 [
4104 [
4092 (b'', b'three', b'', b'three'),
4105 (b'', b'three', b'', b'three'),
4093 (b'', b'four', b'', b'four'),
4106 (b'', b'four', b'', b'four'),
4094 (b'', b'five', b'', b'five'),
4107 (b'', b'five', b'', b'five'),
4095 ]
4108 ]
4096 + cmdutil.remoteopts,
4109 + cmdutil.remoteopts,
4097 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4110 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4098 norepo=True,
4111 norepo=True,
4099 )
4112 )
4100 def debugwireargs(ui, repopath, *vals, **opts):
4113 def debugwireargs(ui, repopath, *vals, **opts):
4101 opts = pycompat.byteskwargs(opts)
4114 opts = pycompat.byteskwargs(opts)
4102 repo = hg.peer(ui, opts, repopath)
4115 repo = hg.peer(ui, opts, repopath)
4103 try:
4116 try:
4104 for opt in cmdutil.remoteopts:
4117 for opt in cmdutil.remoteopts:
4105 del opts[opt[1]]
4118 del opts[opt[1]]
4106 args = {}
4119 args = {}
4107 for k, v in pycompat.iteritems(opts):
4120 for k, v in pycompat.iteritems(opts):
4108 if v:
4121 if v:
4109 args[k] = v
4122 args[k] = v
4110 args = pycompat.strkwargs(args)
4123 args = pycompat.strkwargs(args)
4111 # run twice to check that we don't mess up the stream for the next command
4124 # run twice to check that we don't mess up the stream for the next command
4112 res1 = repo.debugwireargs(*vals, **args)
4125 res1 = repo.debugwireargs(*vals, **args)
4113 res2 = repo.debugwireargs(*vals, **args)
4126 res2 = repo.debugwireargs(*vals, **args)
4114 ui.write(b"%s\n" % res1)
4127 ui.write(b"%s\n" % res1)
4115 if res1 != res2:
4128 if res1 != res2:
4116 ui.warn(b"%s\n" % res2)
4129 ui.warn(b"%s\n" % res2)
4117 finally:
4130 finally:
4118 repo.close()
4131 repo.close()
4119
4132
4120
4133
4121 def _parsewirelangblocks(fh):
4134 def _parsewirelangblocks(fh):
4122 activeaction = None
4135 activeaction = None
4123 blocklines = []
4136 blocklines = []
4124 lastindent = 0
4137 lastindent = 0
4125
4138
4126 for line in fh:
4139 for line in fh:
4127 line = line.rstrip()
4140 line = line.rstrip()
4128 if not line:
4141 if not line:
4129 continue
4142 continue
4130
4143
4131 if line.startswith(b'#'):
4144 if line.startswith(b'#'):
4132 continue
4145 continue
4133
4146
4134 if not line.startswith(b' '):
4147 if not line.startswith(b' '):
4135 # New block. Flush previous one.
4148 # New block. Flush previous one.
4136 if activeaction:
4149 if activeaction:
4137 yield activeaction, blocklines
4150 yield activeaction, blocklines
4138
4151
4139 activeaction = line
4152 activeaction = line
4140 blocklines = []
4153 blocklines = []
4141 lastindent = 0
4154 lastindent = 0
4142 continue
4155 continue
4143
4156
4144 # Else we start with an indent.
4157 # Else we start with an indent.
4145
4158
4146 if not activeaction:
4159 if not activeaction:
4147 raise error.Abort(_(b'indented line outside of block'))
4160 raise error.Abort(_(b'indented line outside of block'))
4148
4161
4149 indent = len(line) - len(line.lstrip())
4162 indent = len(line) - len(line.lstrip())
4150
4163
4151 # If this line is indented more than the last line, concatenate it.
4164 # If this line is indented more than the last line, concatenate it.
4152 if indent > lastindent and blocklines:
4165 if indent > lastindent and blocklines:
4153 blocklines[-1] += line.lstrip()
4166 blocklines[-1] += line.lstrip()
4154 else:
4167 else:
4155 blocklines.append(line)
4168 blocklines.append(line)
4156 lastindent = indent
4169 lastindent = indent
4157
4170
4158 # Flush last block.
4171 # Flush last block.
4159 if activeaction:
4172 if activeaction:
4160 yield activeaction, blocklines
4173 yield activeaction, blocklines
4161
4174
4162
4175
4163 @command(
4176 @command(
4164 b'debugwireproto',
4177 b'debugwireproto',
4165 [
4178 [
4166 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4179 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4167 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4180 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4168 (
4181 (
4169 b'',
4182 b'',
4170 b'noreadstderr',
4183 b'noreadstderr',
4171 False,
4184 False,
4172 _(b'do not read from stderr of the remote'),
4185 _(b'do not read from stderr of the remote'),
4173 ),
4186 ),
4174 (
4187 (
4175 b'',
4188 b'',
4176 b'nologhandshake',
4189 b'nologhandshake',
4177 False,
4190 False,
4178 _(b'do not log I/O related to the peer handshake'),
4191 _(b'do not log I/O related to the peer handshake'),
4179 ),
4192 ),
4180 ]
4193 ]
4181 + cmdutil.remoteopts,
4194 + cmdutil.remoteopts,
4182 _(b'[PATH]'),
4195 _(b'[PATH]'),
4183 optionalrepo=True,
4196 optionalrepo=True,
4184 )
4197 )
4185 def debugwireproto(ui, repo, path=None, **opts):
4198 def debugwireproto(ui, repo, path=None, **opts):
4186 """send wire protocol commands to a server
4199 """send wire protocol commands to a server
4187
4200
4188 This command can be used to issue wire protocol commands to remote
4201 This command can be used to issue wire protocol commands to remote
4189 peers and to debug the raw data being exchanged.
4202 peers and to debug the raw data being exchanged.
4190
4203
4191 ``--localssh`` will start an SSH server against the current repository
4204 ``--localssh`` will start an SSH server against the current repository
4192 and connect to that. By default, the connection will perform a handshake
4205 and connect to that. By default, the connection will perform a handshake
4193 and establish an appropriate peer instance.
4206 and establish an appropriate peer instance.
4194
4207
4195 ``--peer`` can be used to bypass the handshake protocol and construct a
4208 ``--peer`` can be used to bypass the handshake protocol and construct a
4196 peer instance using the specified class type. Valid values are ``raw``,
4209 peer instance using the specified class type. Valid values are ``raw``,
4197 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4210 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4198 raw data payloads and don't support higher-level command actions.
4211 raw data payloads and don't support higher-level command actions.
4199
4212
4200 ``--noreadstderr`` can be used to disable automatic reading from stderr
4213 ``--noreadstderr`` can be used to disable automatic reading from stderr
4201 of the peer (for SSH connections only). Disabling automatic reading of
4214 of the peer (for SSH connections only). Disabling automatic reading of
4202 stderr is useful for making output more deterministic.
4215 stderr is useful for making output more deterministic.
4203
4216
4204 Commands are issued via a mini language which is specified via stdin.
4217 Commands are issued via a mini language which is specified via stdin.
4205 The language consists of individual actions to perform. An action is
4218 The language consists of individual actions to perform. An action is
4206 defined by a block. A block is defined as a line with no leading
4219 defined by a block. A block is defined as a line with no leading
4207 space followed by 0 or more lines with leading space. Blocks are
4220 space followed by 0 or more lines with leading space. Blocks are
4208 effectively a high-level command with additional metadata.
4221 effectively a high-level command with additional metadata.
4209
4222
4210 Lines beginning with ``#`` are ignored.
4223 Lines beginning with ``#`` are ignored.
4211
4224
4212 The following sections denote available actions.
4225 The following sections denote available actions.
4213
4226
4214 raw
4227 raw
4215 ---
4228 ---
4216
4229
4217 Send raw data to the server.
4230 Send raw data to the server.
4218
4231
4219 The block payload contains the raw data to send as one atomic send
4232 The block payload contains the raw data to send as one atomic send
4220 operation. The data may not actually be delivered in a single system
4233 operation. The data may not actually be delivered in a single system
4221 call: it depends on the abilities of the transport being used.
4234 call: it depends on the abilities of the transport being used.
4222
4235
4223 Each line in the block is de-indented and concatenated. Then, that
4236 Each line in the block is de-indented and concatenated. Then, that
4224 value is evaluated as a Python b'' literal. This allows the use of
4237 value is evaluated as a Python b'' literal. This allows the use of
4225 backslash escaping, etc.
4238 backslash escaping, etc.
4226
4239
4227 raw+
4240 raw+
4228 ----
4241 ----
4229
4242
4230 Behaves like ``raw`` except flushes output afterwards.
4243 Behaves like ``raw`` except flushes output afterwards.
4231
4244
4232 command <X>
4245 command <X>
4233 -----------
4246 -----------
4234
4247
4235 Send a request to run a named command, whose name follows the ``command``
4248 Send a request to run a named command, whose name follows the ``command``
4236 string.
4249 string.
4237
4250
4238 Arguments to the command are defined as lines in this block. The format of
4251 Arguments to the command are defined as lines in this block. The format of
4239 each line is ``<key> <value>``. e.g.::
4252 each line is ``<key> <value>``. e.g.::
4240
4253
4241 command listkeys
4254 command listkeys
4242 namespace bookmarks
4255 namespace bookmarks
4243
4256
4244 If the value begins with ``eval:``, it will be interpreted as a Python
4257 If the value begins with ``eval:``, it will be interpreted as a Python
4245 literal expression. Otherwise values are interpreted as Python b'' literals.
4258 literal expression. Otherwise values are interpreted as Python b'' literals.
4246 This allows sending complex types and encoding special byte sequences via
4259 This allows sending complex types and encoding special byte sequences via
4247 backslash escaping.
4260 backslash escaping.
4248
4261
4249 The following arguments have special meaning:
4262 The following arguments have special meaning:
4250
4263
4251 ``PUSHFILE``
4264 ``PUSHFILE``
4252 When defined, the *push* mechanism of the peer will be used instead
4265 When defined, the *push* mechanism of the peer will be used instead
4253 of the static request-response mechanism and the content of the
4266 of the static request-response mechanism and the content of the
4254 file specified in the value of this argument will be sent as the
4267 file specified in the value of this argument will be sent as the
4255 command payload.
4268 command payload.
4256
4269
4257 This can be used to submit a local bundle file to the remote.
4270 This can be used to submit a local bundle file to the remote.
4258
4271
4259 batchbegin
4272 batchbegin
4260 ----------
4273 ----------
4261
4274
4262 Instruct the peer to begin a batched send.
4275 Instruct the peer to begin a batched send.
4263
4276
4264 All ``command`` blocks are queued for execution until the next
4277 All ``command`` blocks are queued for execution until the next
4265 ``batchsubmit`` block.
4278 ``batchsubmit`` block.
4266
4279
4267 batchsubmit
4280 batchsubmit
4268 -----------
4281 -----------
4269
4282
4270 Submit previously queued ``command`` blocks as a batch request.
4283 Submit previously queued ``command`` blocks as a batch request.
4271
4284
4272 This action MUST be paired with a ``batchbegin`` action.
4285 This action MUST be paired with a ``batchbegin`` action.
4273
4286
4274 httprequest <method> <path>
4287 httprequest <method> <path>
4275 ---------------------------
4288 ---------------------------
4276
4289
4277 (HTTP peer only)
4290 (HTTP peer only)
4278
4291
4279 Send an HTTP request to the peer.
4292 Send an HTTP request to the peer.
4280
4293
4281 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4294 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4282
4295
4283 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4296 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4284 headers to add to the request. e.g. ``Accept: foo``.
4297 headers to add to the request. e.g. ``Accept: foo``.
4285
4298
4286 The following arguments are special:
4299 The following arguments are special:
4287
4300
4288 ``BODYFILE``
4301 ``BODYFILE``
4289 The content of the file defined as the value to this argument will be
4302 The content of the file defined as the value to this argument will be
4290 transferred verbatim as the HTTP request body.
4303 transferred verbatim as the HTTP request body.
4291
4304
4292 ``frame <type> <flags> <payload>``
4305 ``frame <type> <flags> <payload>``
4293 Send a unified protocol frame as part of the request body.
4306 Send a unified protocol frame as part of the request body.
4294
4307
4295 All frames will be collected and sent as the body to the HTTP
4308 All frames will be collected and sent as the body to the HTTP
4296 request.
4309 request.
4297
4310
4298 close
4311 close
4299 -----
4312 -----
4300
4313
4301 Close the connection to the server.
4314 Close the connection to the server.
4302
4315
4303 flush
4316 flush
4304 -----
4317 -----
4305
4318
4306 Flush data written to the server.
4319 Flush data written to the server.
4307
4320
4308 readavailable
4321 readavailable
4309 -------------
4322 -------------
4310
4323
4311 Close the write end of the connection and read all available data from
4324 Close the write end of the connection and read all available data from
4312 the server.
4325 the server.
4313
4326
4314 If the connection to the server encompasses multiple pipes, we poll both
4327 If the connection to the server encompasses multiple pipes, we poll both
4315 pipes and read available data.
4328 pipes and read available data.
4316
4329
4317 readline
4330 readline
4318 --------
4331 --------
4319
4332
4320 Read a line of output from the server. If there are multiple output
4333 Read a line of output from the server. If there are multiple output
4321 pipes, reads only the main pipe.
4334 pipes, reads only the main pipe.
4322
4335
4323 ereadline
4336 ereadline
4324 ---------
4337 ---------
4325
4338
4326 Like ``readline``, but read from the stderr pipe, if available.
4339 Like ``readline``, but read from the stderr pipe, if available.
4327
4340
4328 read <X>
4341 read <X>
4329 --------
4342 --------
4330
4343
4331 ``read()`` N bytes from the server's main output pipe.
4344 ``read()`` N bytes from the server's main output pipe.
4332
4345
4333 eread <X>
4346 eread <X>
4334 ---------
4347 ---------
4335
4348
4336 ``read()`` N bytes from the server's stderr pipe, if available.
4349 ``read()`` N bytes from the server's stderr pipe, if available.
4337
4350
4338 Specifying Unified Frame-Based Protocol Frames
4351 Specifying Unified Frame-Based Protocol Frames
4339 ----------------------------------------------
4352 ----------------------------------------------
4340
4353
4341 It is possible to emit a *Unified Frame-Based Protocol* by using special
4354 It is possible to emit a *Unified Frame-Based Protocol* by using special
4342 syntax.
4355 syntax.
4343
4356
4344 A frame is composed as a type, flags, and payload. These can be parsed
4357 A frame is composed as a type, flags, and payload. These can be parsed
4345 from a string of the form:
4358 from a string of the form:
4346
4359
4347 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4360 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4348
4361
4349 ``request-id`` and ``stream-id`` are integers defining the request and
4362 ``request-id`` and ``stream-id`` are integers defining the request and
4350 stream identifiers.
4363 stream identifiers.
4351
4364
4352 ``type`` can be an integer value for the frame type or the string name
4365 ``type`` can be an integer value for the frame type or the string name
4353 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4366 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4354 ``command-name``.
4367 ``command-name``.
4355
4368
4356 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4369 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4357 components. Each component (and there can be just one) can be an integer
4370 components. Each component (and there can be just one) can be an integer
4358 or a flag name for stream flags or frame flags, respectively. Values are
4371 or a flag name for stream flags or frame flags, respectively. Values are
4359 resolved to integers and then bitwise OR'd together.
4372 resolved to integers and then bitwise OR'd together.
4360
4373
4361 ``payload`` represents the raw frame payload. If it begins with
4374 ``payload`` represents the raw frame payload. If it begins with
4362 ``cbor:``, the following string is evaluated as Python code and the
4375 ``cbor:``, the following string is evaluated as Python code and the
4363 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4376 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4364 as a Python byte string literal.
4377 as a Python byte string literal.
4365 """
4378 """
4366 opts = pycompat.byteskwargs(opts)
4379 opts = pycompat.byteskwargs(opts)
4367
4380
4368 if opts[b'localssh'] and not repo:
4381 if opts[b'localssh'] and not repo:
4369 raise error.Abort(_(b'--localssh requires a repository'))
4382 raise error.Abort(_(b'--localssh requires a repository'))
4370
4383
4371 if opts[b'peer'] and opts[b'peer'] not in (
4384 if opts[b'peer'] and opts[b'peer'] not in (
4372 b'raw',
4385 b'raw',
4373 b'http2',
4386 b'http2',
4374 b'ssh1',
4387 b'ssh1',
4375 b'ssh2',
4388 b'ssh2',
4376 ):
4389 ):
4377 raise error.Abort(
4390 raise error.Abort(
4378 _(b'invalid value for --peer'),
4391 _(b'invalid value for --peer'),
4379 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4392 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4380 )
4393 )
4381
4394
4382 if path and opts[b'localssh']:
4395 if path and opts[b'localssh']:
4383 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4396 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4384
4397
4385 if ui.interactive():
4398 if ui.interactive():
4386 ui.write(_(b'(waiting for commands on stdin)\n'))
4399 ui.write(_(b'(waiting for commands on stdin)\n'))
4387
4400
4388 blocks = list(_parsewirelangblocks(ui.fin))
4401 blocks = list(_parsewirelangblocks(ui.fin))
4389
4402
4390 proc = None
4403 proc = None
4391 stdin = None
4404 stdin = None
4392 stdout = None
4405 stdout = None
4393 stderr = None
4406 stderr = None
4394 opener = None
4407 opener = None
4395
4408
4396 if opts[b'localssh']:
4409 if opts[b'localssh']:
4397 # We start the SSH server in its own process so there is process
4410 # We start the SSH server in its own process so there is process
4398 # separation. This prevents a whole class of potential bugs around
4411 # separation. This prevents a whole class of potential bugs around
4399 # shared state from interfering with server operation.
4412 # shared state from interfering with server operation.
4400 args = procutil.hgcmd() + [
4413 args = procutil.hgcmd() + [
4401 b'-R',
4414 b'-R',
4402 repo.root,
4415 repo.root,
4403 b'debugserve',
4416 b'debugserve',
4404 b'--sshstdio',
4417 b'--sshstdio',
4405 ]
4418 ]
4406 proc = subprocess.Popen(
4419 proc = subprocess.Popen(
4407 pycompat.rapply(procutil.tonativestr, args),
4420 pycompat.rapply(procutil.tonativestr, args),
4408 stdin=subprocess.PIPE,
4421 stdin=subprocess.PIPE,
4409 stdout=subprocess.PIPE,
4422 stdout=subprocess.PIPE,
4410 stderr=subprocess.PIPE,
4423 stderr=subprocess.PIPE,
4411 bufsize=0,
4424 bufsize=0,
4412 )
4425 )
4413
4426
4414 stdin = proc.stdin
4427 stdin = proc.stdin
4415 stdout = proc.stdout
4428 stdout = proc.stdout
4416 stderr = proc.stderr
4429 stderr = proc.stderr
4417
4430
4418 # We turn the pipes into observers so we can log I/O.
4431 # We turn the pipes into observers so we can log I/O.
4419 if ui.verbose or opts[b'peer'] == b'raw':
4432 if ui.verbose or opts[b'peer'] == b'raw':
4420 stdin = util.makeloggingfileobject(
4433 stdin = util.makeloggingfileobject(
4421 ui, proc.stdin, b'i', logdata=True
4434 ui, proc.stdin, b'i', logdata=True
4422 )
4435 )
4423 stdout = util.makeloggingfileobject(
4436 stdout = util.makeloggingfileobject(
4424 ui, proc.stdout, b'o', logdata=True
4437 ui, proc.stdout, b'o', logdata=True
4425 )
4438 )
4426 stderr = util.makeloggingfileobject(
4439 stderr = util.makeloggingfileobject(
4427 ui, proc.stderr, b'e', logdata=True
4440 ui, proc.stderr, b'e', logdata=True
4428 )
4441 )
4429
4442
4430 # --localssh also implies the peer connection settings.
4443 # --localssh also implies the peer connection settings.
4431
4444
4432 url = b'ssh://localserver'
4445 url = b'ssh://localserver'
4433 autoreadstderr = not opts[b'noreadstderr']
4446 autoreadstderr = not opts[b'noreadstderr']
4434
4447
4435 if opts[b'peer'] == b'ssh1':
4448 if opts[b'peer'] == b'ssh1':
4436 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4449 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4437 peer = sshpeer.sshv1peer(
4450 peer = sshpeer.sshv1peer(
4438 ui,
4451 ui,
4439 url,
4452 url,
4440 proc,
4453 proc,
4441 stdin,
4454 stdin,
4442 stdout,
4455 stdout,
4443 stderr,
4456 stderr,
4444 None,
4457 None,
4445 autoreadstderr=autoreadstderr,
4458 autoreadstderr=autoreadstderr,
4446 )
4459 )
4447 elif opts[b'peer'] == b'ssh2':
4460 elif opts[b'peer'] == b'ssh2':
4448 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4461 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4449 peer = sshpeer.sshv2peer(
4462 peer = sshpeer.sshv2peer(
4450 ui,
4463 ui,
4451 url,
4464 url,
4452 proc,
4465 proc,
4453 stdin,
4466 stdin,
4454 stdout,
4467 stdout,
4455 stderr,
4468 stderr,
4456 None,
4469 None,
4457 autoreadstderr=autoreadstderr,
4470 autoreadstderr=autoreadstderr,
4458 )
4471 )
4459 elif opts[b'peer'] == b'raw':
4472 elif opts[b'peer'] == b'raw':
4460 ui.write(_(b'using raw connection to peer\n'))
4473 ui.write(_(b'using raw connection to peer\n'))
4461 peer = None
4474 peer = None
4462 else:
4475 else:
4463 ui.write(_(b'creating ssh peer from handshake results\n'))
4476 ui.write(_(b'creating ssh peer from handshake results\n'))
4464 peer = sshpeer.makepeer(
4477 peer = sshpeer.makepeer(
4465 ui,
4478 ui,
4466 url,
4479 url,
4467 proc,
4480 proc,
4468 stdin,
4481 stdin,
4469 stdout,
4482 stdout,
4470 stderr,
4483 stderr,
4471 autoreadstderr=autoreadstderr,
4484 autoreadstderr=autoreadstderr,
4472 )
4485 )
4473
4486
4474 elif path:
4487 elif path:
4475 # We bypass hg.peer() so we can proxy the sockets.
4488 # We bypass hg.peer() so we can proxy the sockets.
4476 # TODO consider not doing this because we skip
4489 # TODO consider not doing this because we skip
4477 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4490 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4478 u = util.url(path)
4491 u = util.url(path)
4479 if u.scheme != b'http':
4492 if u.scheme != b'http':
4480 raise error.Abort(_(b'only http:// paths are currently supported'))
4493 raise error.Abort(_(b'only http:// paths are currently supported'))
4481
4494
4482 url, authinfo = u.authinfo()
4495 url, authinfo = u.authinfo()
4483 openerargs = {
4496 openerargs = {
4484 'useragent': b'Mercurial debugwireproto',
4497 'useragent': b'Mercurial debugwireproto',
4485 }
4498 }
4486
4499
4487 # Turn pipes/sockets into observers so we can log I/O.
4500 # Turn pipes/sockets into observers so we can log I/O.
4488 if ui.verbose:
4501 if ui.verbose:
4489 openerargs.update(
4502 openerargs.update(
4490 {
4503 {
4491 'loggingfh': ui,
4504 'loggingfh': ui,
4492 'loggingname': b's',
4505 'loggingname': b's',
4493 'loggingopts': {
4506 'loggingopts': {
4494 'logdata': True,
4507 'logdata': True,
4495 'logdataapis': False,
4508 'logdataapis': False,
4496 },
4509 },
4497 }
4510 }
4498 )
4511 )
4499
4512
4500 if ui.debugflag:
4513 if ui.debugflag:
4501 openerargs['loggingopts']['logdataapis'] = True
4514 openerargs['loggingopts']['logdataapis'] = True
4502
4515
4503 # Don't send default headers when in raw mode. This allows us to
4516 # Don't send default headers when in raw mode. This allows us to
4504 # bypass most of the behavior of our URL handling code so we can
4517 # bypass most of the behavior of our URL handling code so we can
4505 # have near complete control over what's sent on the wire.
4518 # have near complete control over what's sent on the wire.
4506 if opts[b'peer'] == b'raw':
4519 if opts[b'peer'] == b'raw':
4507 openerargs['sendaccept'] = False
4520 openerargs['sendaccept'] = False
4508
4521
4509 opener = urlmod.opener(ui, authinfo, **openerargs)
4522 opener = urlmod.opener(ui, authinfo, **openerargs)
4510
4523
4511 if opts[b'peer'] == b'http2':
4524 if opts[b'peer'] == b'http2':
4512 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4525 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4513 # We go through makepeer() because we need an API descriptor for
4526 # We go through makepeer() because we need an API descriptor for
4514 # the peer instance to be useful.
4527 # the peer instance to be useful.
4515 with ui.configoverride(
4528 with ui.configoverride(
4516 {(b'experimental', b'httppeer.advertise-v2'): True}
4529 {(b'experimental', b'httppeer.advertise-v2'): True}
4517 ):
4530 ):
4518 if opts[b'nologhandshake']:
4531 if opts[b'nologhandshake']:
4519 ui.pushbuffer()
4532 ui.pushbuffer()
4520
4533
4521 peer = httppeer.makepeer(ui, path, opener=opener)
4534 peer = httppeer.makepeer(ui, path, opener=opener)
4522
4535
4523 if opts[b'nologhandshake']:
4536 if opts[b'nologhandshake']:
4524 ui.popbuffer()
4537 ui.popbuffer()
4525
4538
4526 if not isinstance(peer, httppeer.httpv2peer):
4539 if not isinstance(peer, httppeer.httpv2peer):
4527 raise error.Abort(
4540 raise error.Abort(
4528 _(
4541 _(
4529 b'could not instantiate HTTP peer for '
4542 b'could not instantiate HTTP peer for '
4530 b'wire protocol version 2'
4543 b'wire protocol version 2'
4531 ),
4544 ),
4532 hint=_(
4545 hint=_(
4533 b'the server may not have the feature '
4546 b'the server may not have the feature '
4534 b'enabled or is not allowing this '
4547 b'enabled or is not allowing this '
4535 b'client version'
4548 b'client version'
4536 ),
4549 ),
4537 )
4550 )
4538
4551
4539 elif opts[b'peer'] == b'raw':
4552 elif opts[b'peer'] == b'raw':
4540 ui.write(_(b'using raw connection to peer\n'))
4553 ui.write(_(b'using raw connection to peer\n'))
4541 peer = None
4554 peer = None
4542 elif opts[b'peer']:
4555 elif opts[b'peer']:
4543 raise error.Abort(
4556 raise error.Abort(
4544 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4545 )
4558 )
4546 else:
4559 else:
4547 peer = httppeer.makepeer(ui, path, opener=opener)
4560 peer = httppeer.makepeer(ui, path, opener=opener)
4548
4561
4549 # We /could/ populate stdin/stdout with sock.makefile()...
4562 # We /could/ populate stdin/stdout with sock.makefile()...
4550 else:
4563 else:
4551 raise error.Abort(_(b'unsupported connection configuration'))
4564 raise error.Abort(_(b'unsupported connection configuration'))
4552
4565
4553 batchedcommands = None
4566 batchedcommands = None
4554
4567
4555 # Now perform actions based on the parsed wire language instructions.
4568 # Now perform actions based on the parsed wire language instructions.
4556 for action, lines in blocks:
4569 for action, lines in blocks:
4557 if action in (b'raw', b'raw+'):
4570 if action in (b'raw', b'raw+'):
4558 if not stdin:
4571 if not stdin:
4559 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4572 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4560
4573
4561 # Concatenate the data together.
4574 # Concatenate the data together.
4562 data = b''.join(l.lstrip() for l in lines)
4575 data = b''.join(l.lstrip() for l in lines)
4563 data = stringutil.unescapestr(data)
4576 data = stringutil.unescapestr(data)
4564 stdin.write(data)
4577 stdin.write(data)
4565
4578
4566 if action == b'raw+':
4579 if action == b'raw+':
4567 stdin.flush()
4580 stdin.flush()
4568 elif action == b'flush':
4581 elif action == b'flush':
4569 if not stdin:
4582 if not stdin:
4570 raise error.Abort(_(b'cannot call flush on this peer'))
4583 raise error.Abort(_(b'cannot call flush on this peer'))
4571 stdin.flush()
4584 stdin.flush()
4572 elif action.startswith(b'command'):
4585 elif action.startswith(b'command'):
4573 if not peer:
4586 if not peer:
4574 raise error.Abort(
4587 raise error.Abort(
4575 _(
4588 _(
4576 b'cannot send commands unless peer instance '
4589 b'cannot send commands unless peer instance '
4577 b'is available'
4590 b'is available'
4578 )
4591 )
4579 )
4592 )
4580
4593
4581 command = action.split(b' ', 1)[1]
4594 command = action.split(b' ', 1)[1]
4582
4595
4583 args = {}
4596 args = {}
4584 for line in lines:
4597 for line in lines:
4585 # We need to allow empty values.
4598 # We need to allow empty values.
4586 fields = line.lstrip().split(b' ', 1)
4599 fields = line.lstrip().split(b' ', 1)
4587 if len(fields) == 1:
4600 if len(fields) == 1:
4588 key = fields[0]
4601 key = fields[0]
4589 value = b''
4602 value = b''
4590 else:
4603 else:
4591 key, value = fields
4604 key, value = fields
4592
4605
4593 if value.startswith(b'eval:'):
4606 if value.startswith(b'eval:'):
4594 value = stringutil.evalpythonliteral(value[5:])
4607 value = stringutil.evalpythonliteral(value[5:])
4595 else:
4608 else:
4596 value = stringutil.unescapestr(value)
4609 value = stringutil.unescapestr(value)
4597
4610
4598 args[key] = value
4611 args[key] = value
4599
4612
4600 if batchedcommands is not None:
4613 if batchedcommands is not None:
4601 batchedcommands.append((command, args))
4614 batchedcommands.append((command, args))
4602 continue
4615 continue
4603
4616
4604 ui.status(_(b'sending %s command\n') % command)
4617 ui.status(_(b'sending %s command\n') % command)
4605
4618
4606 if b'PUSHFILE' in args:
4619 if b'PUSHFILE' in args:
4607 with open(args[b'PUSHFILE'], 'rb') as fh:
4620 with open(args[b'PUSHFILE'], 'rb') as fh:
4608 del args[b'PUSHFILE']
4621 del args[b'PUSHFILE']
4609 res, output = peer._callpush(
4622 res, output = peer._callpush(
4610 command, fh, **pycompat.strkwargs(args)
4623 command, fh, **pycompat.strkwargs(args)
4611 )
4624 )
4612 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4625 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4613 ui.status(
4626 ui.status(
4614 _(b'remote output: %s\n') % stringutil.escapestr(output)
4627 _(b'remote output: %s\n') % stringutil.escapestr(output)
4615 )
4628 )
4616 else:
4629 else:
4617 with peer.commandexecutor() as e:
4630 with peer.commandexecutor() as e:
4618 res = e.callcommand(command, args).result()
4631 res = e.callcommand(command, args).result()
4619
4632
4620 if isinstance(res, wireprotov2peer.commandresponse):
4633 if isinstance(res, wireprotov2peer.commandresponse):
4621 val = res.objects()
4634 val = res.objects()
4622 ui.status(
4635 ui.status(
4623 _(b'response: %s\n')
4636 _(b'response: %s\n')
4624 % stringutil.pprint(val, bprefix=True, indent=2)
4637 % stringutil.pprint(val, bprefix=True, indent=2)
4625 )
4638 )
4626 else:
4639 else:
4627 ui.status(
4640 ui.status(
4628 _(b'response: %s\n')
4641 _(b'response: %s\n')
4629 % stringutil.pprint(res, bprefix=True, indent=2)
4642 % stringutil.pprint(res, bprefix=True, indent=2)
4630 )
4643 )
4631
4644
4632 elif action == b'batchbegin':
4645 elif action == b'batchbegin':
4633 if batchedcommands is not None:
4646 if batchedcommands is not None:
4634 raise error.Abort(_(b'nested batchbegin not allowed'))
4647 raise error.Abort(_(b'nested batchbegin not allowed'))
4635
4648
4636 batchedcommands = []
4649 batchedcommands = []
4637 elif action == b'batchsubmit':
4650 elif action == b'batchsubmit':
4638 # There is a batching API we could go through. But it would be
4651 # There is a batching API we could go through. But it would be
4639 # difficult to normalize requests into function calls. It is easier
4652 # difficult to normalize requests into function calls. It is easier
4640 # to bypass this layer and normalize to commands + args.
4653 # to bypass this layer and normalize to commands + args.
4641 ui.status(
4654 ui.status(
4642 _(b'sending batch with %d sub-commands\n')
4655 _(b'sending batch with %d sub-commands\n')
4643 % len(batchedcommands)
4656 % len(batchedcommands)
4644 )
4657 )
4645 assert peer is not None
4658 assert peer is not None
4646 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4659 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4647 ui.status(
4660 ui.status(
4648 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4661 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4649 )
4662 )
4650
4663
4651 batchedcommands = None
4664 batchedcommands = None
4652
4665
4653 elif action.startswith(b'httprequest '):
4666 elif action.startswith(b'httprequest '):
4654 if not opener:
4667 if not opener:
4655 raise error.Abort(
4668 raise error.Abort(
4656 _(b'cannot use httprequest without an HTTP peer')
4669 _(b'cannot use httprequest without an HTTP peer')
4657 )
4670 )
4658
4671
4659 request = action.split(b' ', 2)
4672 request = action.split(b' ', 2)
4660 if len(request) != 3:
4673 if len(request) != 3:
4661 raise error.Abort(
4674 raise error.Abort(
4662 _(
4675 _(
4663 b'invalid httprequest: expected format is '
4676 b'invalid httprequest: expected format is '
4664 b'"httprequest <method> <path>'
4677 b'"httprequest <method> <path>'
4665 )
4678 )
4666 )
4679 )
4667
4680
4668 method, httppath = request[1:]
4681 method, httppath = request[1:]
4669 headers = {}
4682 headers = {}
4670 body = None
4683 body = None
4671 frames = []
4684 frames = []
4672 for line in lines:
4685 for line in lines:
4673 line = line.lstrip()
4686 line = line.lstrip()
4674 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4687 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4675 if m:
4688 if m:
4676 # Headers need to use native strings.
4689 # Headers need to use native strings.
4677 key = pycompat.strurl(m.group(1))
4690 key = pycompat.strurl(m.group(1))
4678 value = pycompat.strurl(m.group(2))
4691 value = pycompat.strurl(m.group(2))
4679 headers[key] = value
4692 headers[key] = value
4680 continue
4693 continue
4681
4694
4682 if line.startswith(b'BODYFILE '):
4695 if line.startswith(b'BODYFILE '):
4683 with open(line.split(b' ', 1), b'rb') as fh:
4696 with open(line.split(b' ', 1), b'rb') as fh:
4684 body = fh.read()
4697 body = fh.read()
4685 elif line.startswith(b'frame '):
4698 elif line.startswith(b'frame '):
4686 frame = wireprotoframing.makeframefromhumanstring(
4699 frame = wireprotoframing.makeframefromhumanstring(
4687 line[len(b'frame ') :]
4700 line[len(b'frame ') :]
4688 )
4701 )
4689
4702
4690 frames.append(frame)
4703 frames.append(frame)
4691 else:
4704 else:
4692 raise error.Abort(
4705 raise error.Abort(
4693 _(b'unknown argument to httprequest: %s') % line
4706 _(b'unknown argument to httprequest: %s') % line
4694 )
4707 )
4695
4708
4696 url = path + httppath
4709 url = path + httppath
4697
4710
4698 if frames:
4711 if frames:
4699 body = b''.join(bytes(f) for f in frames)
4712 body = b''.join(bytes(f) for f in frames)
4700
4713
4701 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4714 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4702
4715
4703 # urllib.Request insists on using has_data() as a proxy for
4716 # urllib.Request insists on using has_data() as a proxy for
4704 # determining the request method. Override that to use our
4717 # determining the request method. Override that to use our
4705 # explicitly requested method.
4718 # explicitly requested method.
4706 req.get_method = lambda: pycompat.sysstr(method)
4719 req.get_method = lambda: pycompat.sysstr(method)
4707
4720
4708 try:
4721 try:
4709 res = opener.open(req)
4722 res = opener.open(req)
4710 body = res.read()
4723 body = res.read()
4711 except util.urlerr.urlerror as e:
4724 except util.urlerr.urlerror as e:
4712 # read() method must be called, but only exists in Python 2
4725 # read() method must be called, but only exists in Python 2
4713 getattr(e, 'read', lambda: None)()
4726 getattr(e, 'read', lambda: None)()
4714 continue
4727 continue
4715
4728
4716 ct = res.headers.get('Content-Type')
4729 ct = res.headers.get('Content-Type')
4717 if ct == 'application/mercurial-cbor':
4730 if ct == 'application/mercurial-cbor':
4718 ui.write(
4731 ui.write(
4719 _(b'cbor> %s\n')
4732 _(b'cbor> %s\n')
4720 % stringutil.pprint(
4733 % stringutil.pprint(
4721 cborutil.decodeall(body), bprefix=True, indent=2
4734 cborutil.decodeall(body), bprefix=True, indent=2
4722 )
4735 )
4723 )
4736 )
4724
4737
4725 elif action == b'close':
4738 elif action == b'close':
4726 assert peer is not None
4739 assert peer is not None
4727 peer.close()
4740 peer.close()
4728 elif action == b'readavailable':
4741 elif action == b'readavailable':
4729 if not stdout or not stderr:
4742 if not stdout or not stderr:
4730 raise error.Abort(
4743 raise error.Abort(
4731 _(b'readavailable not available on this peer')
4744 _(b'readavailable not available on this peer')
4732 )
4745 )
4733
4746
4734 stdin.close()
4747 stdin.close()
4735 stdout.read()
4748 stdout.read()
4736 stderr.read()
4749 stderr.read()
4737
4750
4738 elif action == b'readline':
4751 elif action == b'readline':
4739 if not stdout:
4752 if not stdout:
4740 raise error.Abort(_(b'readline not available on this peer'))
4753 raise error.Abort(_(b'readline not available on this peer'))
4741 stdout.readline()
4754 stdout.readline()
4742 elif action == b'ereadline':
4755 elif action == b'ereadline':
4743 if not stderr:
4756 if not stderr:
4744 raise error.Abort(_(b'ereadline not available on this peer'))
4757 raise error.Abort(_(b'ereadline not available on this peer'))
4745 stderr.readline()
4758 stderr.readline()
4746 elif action.startswith(b'read '):
4759 elif action.startswith(b'read '):
4747 count = int(action.split(b' ', 1)[1])
4760 count = int(action.split(b' ', 1)[1])
4748 if not stdout:
4761 if not stdout:
4749 raise error.Abort(_(b'read not available on this peer'))
4762 raise error.Abort(_(b'read not available on this peer'))
4750 stdout.read(count)
4763 stdout.read(count)
4751 elif action.startswith(b'eread '):
4764 elif action.startswith(b'eread '):
4752 count = int(action.split(b' ', 1)[1])
4765 count = int(action.split(b' ', 1)[1])
4753 if not stderr:
4766 if not stderr:
4754 raise error.Abort(_(b'eread not available on this peer'))
4767 raise error.Abort(_(b'eread not available on this peer'))
4755 stderr.read(count)
4768 stderr.read(count)
4756 else:
4769 else:
4757 raise error.Abort(_(b'unknown action: %s') % action)
4770 raise error.Abort(_(b'unknown action: %s') % action)
4758
4771
4759 if batchedcommands is not None:
4772 if batchedcommands is not None:
4760 raise error.Abort(_(b'unclosed "batchbegin" request'))
4773 raise error.Abort(_(b'unclosed "batchbegin" request'))
4761
4774
4762 if peer:
4775 if peer:
4763 peer.close()
4776 peer.close()
4764
4777
4765 if proc:
4778 if proc:
4766 proc.kill()
4779 proc.kill()
@@ -1,865 +1,873 b''
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.verbose = verbose
48 ... ui.verbose = verbose
49 ... ui.pushbuffer()
49 ... ui.pushbuffer()
50 ... try:
50 ... try:
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 ... pycompat.byteskwargs(opts)))
52 ... pycompat.byteskwargs(opts)))
53 ... finally:
53 ... finally:
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55
55
56 Basic example:
56 Basic example:
57
57
58 >>> def files(ui, fm):
58 >>> def files(ui, fm):
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 ... for f in files:
60 ... for f in files:
61 ... fm.startitem()
61 ... fm.startitem()
62 ... fm.write(b'path', b'%s', f[0])
62 ... fm.write(b'path', b'%s', f[0])
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 ... fm.data(size=f[1])
65 ... fm.data(size=f[1])
66 ... fm.plain(b'\\n')
66 ... fm.plain(b'\\n')
67 ... fm.end()
67 ... fm.end()
68 >>> show(files)
68 >>> show(files)
69 foo
69 foo
70 bar
70 bar
71 >>> show(files, verbose=True)
71 >>> show(files, verbose=True)
72 foo 1970-01-01 00:00:00
72 foo 1970-01-01 00:00:00
73 bar 1970-01-01 00:00:01
73 bar 1970-01-01 00:00:01
74 >>> show(files, template=b'json')
74 >>> show(files, template=b'json')
75 [
75 [
76 {
76 {
77 "date": [0, 0],
77 "date": [0, 0],
78 "path": "foo",
78 "path": "foo",
79 "size": 123
79 "size": 123
80 },
80 },
81 {
81 {
82 "date": [1, 0],
82 "date": [1, 0],
83 "path": "bar",
83 "path": "bar",
84 "size": 456
84 "size": 456
85 }
85 }
86 ]
86 ]
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 path: foo
88 path: foo
89 date: 1970-01-01T00:00:00+00:00
89 date: 1970-01-01T00:00:00+00:00
90 path: bar
90 path: bar
91 date: 1970-01-01T00:00:01+00:00
91 date: 1970-01-01T00:00:01+00:00
92
92
93 Nested example:
93 Nested example:
94
94
95 >>> def subrepos(ui, fm):
95 >>> def subrepos(ui, fm):
96 ... fm.startitem()
96 ... fm.startitem()
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
99 ... fm.end()
99 ... fm.end()
100 >>> show(subrepos)
100 >>> show(subrepos)
101 [baz]
101 [baz]
102 foo
102 foo
103 bar
103 bar
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 baz: foo, bar
105 baz: foo, bar
106 """
106 """
107
107
108 from __future__ import absolute_import, print_function
108 from __future__ import absolute_import, print_function
109
109
110 import contextlib
110 import contextlib
111 import itertools
111 import itertools
112 import os
112 import os
113
113
114 from .i18n import _
114 from .i18n import _
115 from .node import (
115 from .node import (
116 hex,
116 hex,
117 short,
117 short,
118 )
118 )
119 from .thirdparty import attr
119 from .thirdparty import attr
120
120
121 from . import (
121 from . import (
122 error,
122 error,
123 pycompat,
123 pycompat,
124 templatefilters,
124 templatefilters,
125 templatekw,
125 templatekw,
126 templater,
126 templater,
127 templateutil,
127 templateutil,
128 util,
128 util,
129 )
129 )
130 from .utils import (
130 from .utils import (
131 cborutil,
131 cborutil,
132 dateutil,
132 dateutil,
133 stringutil,
133 stringutil,
134 )
134 )
135
135
136 pickle = util.pickle
136 pickle = util.pickle
137
137
138
138
139 def isprintable(obj):
139 def isprintable(obj):
140 """Check if the given object can be directly passed in to formatter's
140 """Check if the given object can be directly passed in to formatter's
141 write() and data() functions
141 write() and data() functions
142
142
143 Returns False if the object is unsupported or must be pre-processed by
143 Returns False if the object is unsupported or must be pre-processed by
144 formatdate(), formatdict(), or formatlist().
144 formatdate(), formatdict(), or formatlist().
145 """
145 """
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
147
147
148
148
149 class _nullconverter(object):
149 class _nullconverter(object):
150 '''convert non-primitive data types to be processed by formatter'''
150 '''convert non-primitive data types to be processed by formatter'''
151
151
152 # set to True if context object should be stored as item
152 # set to True if context object should be stored as item
153 storecontext = False
153 storecontext = False
154
154
155 @staticmethod
155 @staticmethod
156 def wrapnested(data, tmpl, sep):
156 def wrapnested(data, tmpl, sep):
157 '''wrap nested data by appropriate type'''
157 '''wrap nested data by appropriate type'''
158 return data
158 return data
159
159
160 @staticmethod
160 @staticmethod
161 def formatdate(date, fmt):
161 def formatdate(date, fmt):
162 '''convert date tuple to appropriate format'''
162 '''convert date tuple to appropriate format'''
163 # timestamp can be float, but the canonical form should be int
163 # timestamp can be float, but the canonical form should be int
164 ts, tz = date
164 ts, tz = date
165 return (int(ts), tz)
165 return (int(ts), tz)
166
166
167 @staticmethod
167 @staticmethod
168 def formatdict(data, key, value, fmt, sep):
168 def formatdict(data, key, value, fmt, sep):
169 '''convert dict or key-value pairs to appropriate dict format'''
169 '''convert dict or key-value pairs to appropriate dict format'''
170 # use plain dict instead of util.sortdict so that data can be
170 # use plain dict instead of util.sortdict so that data can be
171 # serialized as a builtin dict in pickle output
171 # serialized as a builtin dict in pickle output
172 return dict(data)
172 return dict(data)
173
173
174 @staticmethod
174 @staticmethod
175 def formatlist(data, name, fmt, sep):
175 def formatlist(data, name, fmt, sep):
176 '''convert iterable to appropriate list format'''
176 '''convert iterable to appropriate list format'''
177 return list(data)
177 return list(data)
178
178
179
179
180 class baseformatter(object):
180 class baseformatter(object):
181
182 # set to True if the formater output a strict format that does not support
183 # arbitrary output in the stream.
184 strict_format = False
185
181 def __init__(self, ui, topic, opts, converter):
186 def __init__(self, ui, topic, opts, converter):
182 self._ui = ui
187 self._ui = ui
183 self._topic = topic
188 self._topic = topic
184 self._opts = opts
189 self._opts = opts
185 self._converter = converter
190 self._converter = converter
186 self._item = None
191 self._item = None
187 # function to convert node to string suitable for this output
192 # function to convert node to string suitable for this output
188 self.hexfunc = hex
193 self.hexfunc = hex
189
194
190 def __enter__(self):
195 def __enter__(self):
191 return self
196 return self
192
197
193 def __exit__(self, exctype, excvalue, traceback):
198 def __exit__(self, exctype, excvalue, traceback):
194 if exctype is None:
199 if exctype is None:
195 self.end()
200 self.end()
196
201
197 def _showitem(self):
202 def _showitem(self):
198 '''show a formatted item once all data is collected'''
203 '''show a formatted item once all data is collected'''
199
204
200 def startitem(self):
205 def startitem(self):
201 '''begin an item in the format list'''
206 '''begin an item in the format list'''
202 if self._item is not None:
207 if self._item is not None:
203 self._showitem()
208 self._showitem()
204 self._item = {}
209 self._item = {}
205
210
206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
211 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
207 '''convert date tuple to appropriate format'''
212 '''convert date tuple to appropriate format'''
208 return self._converter.formatdate(date, fmt)
213 return self._converter.formatdate(date, fmt)
209
214
210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
215 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
211 '''convert dict or key-value pairs to appropriate dict format'''
216 '''convert dict or key-value pairs to appropriate dict format'''
212 return self._converter.formatdict(data, key, value, fmt, sep)
217 return self._converter.formatdict(data, key, value, fmt, sep)
213
218
214 def formatlist(self, data, name, fmt=None, sep=b' '):
219 def formatlist(self, data, name, fmt=None, sep=b' '):
215 '''convert iterable to appropriate list format'''
220 '''convert iterable to appropriate list format'''
216 # name is mandatory argument for now, but it could be optional if
221 # name is mandatory argument for now, but it could be optional if
217 # we have default template keyword, e.g. {item}
222 # we have default template keyword, e.g. {item}
218 return self._converter.formatlist(data, name, fmt, sep)
223 return self._converter.formatlist(data, name, fmt, sep)
219
224
220 def context(self, **ctxs):
225 def context(self, **ctxs):
221 '''insert context objects to be used to render template keywords'''
226 '''insert context objects to be used to render template keywords'''
222 ctxs = pycompat.byteskwargs(ctxs)
227 ctxs = pycompat.byteskwargs(ctxs)
223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
228 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
224 if self._converter.storecontext:
229 if self._converter.storecontext:
225 # populate missing resources in fctx -> ctx -> repo order
230 # populate missing resources in fctx -> ctx -> repo order
226 if b'fctx' in ctxs and b'ctx' not in ctxs:
231 if b'fctx' in ctxs and b'ctx' not in ctxs:
227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
232 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
228 if b'ctx' in ctxs and b'repo' not in ctxs:
233 if b'ctx' in ctxs and b'repo' not in ctxs:
229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
234 ctxs[b'repo'] = ctxs[b'ctx'].repo()
230 self._item.update(ctxs)
235 self._item.update(ctxs)
231
236
232 def datahint(self):
237 def datahint(self):
233 '''set of field names to be referenced'''
238 '''set of field names to be referenced'''
234 return set()
239 return set()
235
240
236 def data(self, **data):
241 def data(self, **data):
237 '''insert data into item that's not shown in default output'''
242 '''insert data into item that's not shown in default output'''
238 data = pycompat.byteskwargs(data)
243 data = pycompat.byteskwargs(data)
239 self._item.update(data)
244 self._item.update(data)
240
245
241 def write(self, fields, deftext, *fielddata, **opts):
246 def write(self, fields, deftext, *fielddata, **opts):
242 '''do default text output while assigning data to item'''
247 '''do default text output while assigning data to item'''
243 fieldkeys = fields.split()
248 fieldkeys = fields.split()
244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
249 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
245 self._item.update(zip(fieldkeys, fielddata))
250 self._item.update(zip(fieldkeys, fielddata))
246
251
247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
252 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
248 '''do conditional write (primarily for plain formatter)'''
253 '''do conditional write (primarily for plain formatter)'''
249 fieldkeys = fields.split()
254 fieldkeys = fields.split()
250 assert len(fieldkeys) == len(fielddata)
255 assert len(fieldkeys) == len(fielddata)
251 self._item.update(zip(fieldkeys, fielddata))
256 self._item.update(zip(fieldkeys, fielddata))
252
257
253 def plain(self, text, **opts):
258 def plain(self, text, **opts):
254 '''show raw text for non-templated mode'''
259 '''show raw text for non-templated mode'''
255
260
256 def isplain(self):
261 def isplain(self):
257 '''check for plain formatter usage'''
262 '''check for plain formatter usage'''
258 return False
263 return False
259
264
260 def nested(self, field, tmpl=None, sep=b''):
265 def nested(self, field, tmpl=None, sep=b''):
261 '''sub formatter to store nested data in the specified field'''
266 '''sub formatter to store nested data in the specified field'''
262 data = []
267 data = []
263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
268 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
264 return _nestedformatter(self._ui, self._converter, data)
269 return _nestedformatter(self._ui, self._converter, data)
265
270
266 def end(self):
271 def end(self):
267 '''end output for the formatter'''
272 '''end output for the formatter'''
268 if self._item is not None:
273 if self._item is not None:
269 self._showitem()
274 self._showitem()
270
275
271
276
272 def nullformatter(ui, topic, opts):
277 def nullformatter(ui, topic, opts):
273 '''formatter that prints nothing'''
278 '''formatter that prints nothing'''
274 return baseformatter(ui, topic, opts, converter=_nullconverter)
279 return baseformatter(ui, topic, opts, converter=_nullconverter)
275
280
276
281
277 class _nestedformatter(baseformatter):
282 class _nestedformatter(baseformatter):
278 '''build sub items and store them in the parent formatter'''
283 '''build sub items and store them in the parent formatter'''
279
284
280 def __init__(self, ui, converter, data):
285 def __init__(self, ui, converter, data):
281 baseformatter.__init__(
286 baseformatter.__init__(
282 self, ui, topic=b'', opts={}, converter=converter
287 self, ui, topic=b'', opts={}, converter=converter
283 )
288 )
284 self._data = data
289 self._data = data
285
290
286 def _showitem(self):
291 def _showitem(self):
287 self._data.append(self._item)
292 self._data.append(self._item)
288
293
289
294
290 def _iteritems(data):
295 def _iteritems(data):
291 '''iterate key-value pairs in stable order'''
296 '''iterate key-value pairs in stable order'''
292 if isinstance(data, dict):
297 if isinstance(data, dict):
293 return sorted(pycompat.iteritems(data))
298 return sorted(pycompat.iteritems(data))
294 return data
299 return data
295
300
296
301
297 class _plainconverter(object):
302 class _plainconverter(object):
298 '''convert non-primitive data types to text'''
303 '''convert non-primitive data types to text'''
299
304
300 storecontext = False
305 storecontext = False
301
306
302 @staticmethod
307 @staticmethod
303 def wrapnested(data, tmpl, sep):
308 def wrapnested(data, tmpl, sep):
304 raise error.ProgrammingError(b'plainformatter should never be nested')
309 raise error.ProgrammingError(b'plainformatter should never be nested')
305
310
306 @staticmethod
311 @staticmethod
307 def formatdate(date, fmt):
312 def formatdate(date, fmt):
308 '''stringify date tuple in the given format'''
313 '''stringify date tuple in the given format'''
309 return dateutil.datestr(date, fmt)
314 return dateutil.datestr(date, fmt)
310
315
311 @staticmethod
316 @staticmethod
312 def formatdict(data, key, value, fmt, sep):
317 def formatdict(data, key, value, fmt, sep):
313 '''stringify key-value pairs separated by sep'''
318 '''stringify key-value pairs separated by sep'''
314 prefmt = pycompat.identity
319 prefmt = pycompat.identity
315 if fmt is None:
320 if fmt is None:
316 fmt = b'%s=%s'
321 fmt = b'%s=%s'
317 prefmt = pycompat.bytestr
322 prefmt = pycompat.bytestr
318 return sep.join(
323 return sep.join(
319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
324 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
320 )
325 )
321
326
322 @staticmethod
327 @staticmethod
323 def formatlist(data, name, fmt, sep):
328 def formatlist(data, name, fmt, sep):
324 '''stringify iterable separated by sep'''
329 '''stringify iterable separated by sep'''
325 prefmt = pycompat.identity
330 prefmt = pycompat.identity
326 if fmt is None:
331 if fmt is None:
327 fmt = b'%s'
332 fmt = b'%s'
328 prefmt = pycompat.bytestr
333 prefmt = pycompat.bytestr
329 return sep.join(fmt % prefmt(e) for e in data)
334 return sep.join(fmt % prefmt(e) for e in data)
330
335
331
336
332 class plainformatter(baseformatter):
337 class plainformatter(baseformatter):
333 '''the default text output scheme'''
338 '''the default text output scheme'''
334
339
335 def __init__(self, ui, out, topic, opts):
340 def __init__(self, ui, out, topic, opts):
336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
341 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
337 if ui.debugflag:
342 if ui.debugflag:
338 self.hexfunc = hex
343 self.hexfunc = hex
339 else:
344 else:
340 self.hexfunc = short
345 self.hexfunc = short
341 if ui is out:
346 if ui is out:
342 self._write = ui.write
347 self._write = ui.write
343 else:
348 else:
344 self._write = lambda s, **opts: out.write(s)
349 self._write = lambda s, **opts: out.write(s)
345
350
346 def startitem(self):
351 def startitem(self):
347 pass
352 pass
348
353
349 def data(self, **data):
354 def data(self, **data):
350 pass
355 pass
351
356
352 def write(self, fields, deftext, *fielddata, **opts):
357 def write(self, fields, deftext, *fielddata, **opts):
353 self._write(deftext % fielddata, **opts)
358 self._write(deftext % fielddata, **opts)
354
359
355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
360 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
356 '''do conditional write'''
361 '''do conditional write'''
357 if cond:
362 if cond:
358 self._write(deftext % fielddata, **opts)
363 self._write(deftext % fielddata, **opts)
359
364
360 def plain(self, text, **opts):
365 def plain(self, text, **opts):
361 self._write(text, **opts)
366 self._write(text, **opts)
362
367
363 def isplain(self):
368 def isplain(self):
364 return True
369 return True
365
370
366 def nested(self, field, tmpl=None, sep=b''):
371 def nested(self, field, tmpl=None, sep=b''):
367 # nested data will be directly written to ui
372 # nested data will be directly written to ui
368 return self
373 return self
369
374
370 def end(self):
375 def end(self):
371 pass
376 pass
372
377
373
378
374 class debugformatter(baseformatter):
379 class debugformatter(baseformatter):
375 def __init__(self, ui, out, topic, opts):
380 def __init__(self, ui, out, topic, opts):
376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
381 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
377 self._out = out
382 self._out = out
378 self._out.write(b"%s = [\n" % self._topic)
383 self._out.write(b"%s = [\n" % self._topic)
379
384
380 def _showitem(self):
385 def _showitem(self):
381 self._out.write(
386 self._out.write(
382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
387 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
383 )
388 )
384
389
385 def end(self):
390 def end(self):
386 baseformatter.end(self)
391 baseformatter.end(self)
387 self._out.write(b"]\n")
392 self._out.write(b"]\n")
388
393
389
394
390 class pickleformatter(baseformatter):
395 class pickleformatter(baseformatter):
391 def __init__(self, ui, out, topic, opts):
396 def __init__(self, ui, out, topic, opts):
392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
397 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
393 self._out = out
398 self._out = out
394 self._data = []
399 self._data = []
395
400
396 def _showitem(self):
401 def _showitem(self):
397 self._data.append(self._item)
402 self._data.append(self._item)
398
403
399 def end(self):
404 def end(self):
400 baseformatter.end(self)
405 baseformatter.end(self)
401 self._out.write(pickle.dumps(self._data))
406 self._out.write(pickle.dumps(self._data))
402
407
403
408
404 class cborformatter(baseformatter):
409 class cborformatter(baseformatter):
405 '''serialize items as an indefinite-length CBOR array'''
410 '''serialize items as an indefinite-length CBOR array'''
406
411
407 def __init__(self, ui, out, topic, opts):
412 def __init__(self, ui, out, topic, opts):
408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
413 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
409 self._out = out
414 self._out = out
410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
415 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
411
416
412 def _showitem(self):
417 def _showitem(self):
413 self._out.write(b''.join(cborutil.streamencode(self._item)))
418 self._out.write(b''.join(cborutil.streamencode(self._item)))
414
419
415 def end(self):
420 def end(self):
416 baseformatter.end(self)
421 baseformatter.end(self)
417 self._out.write(cborutil.BREAK)
422 self._out.write(cborutil.BREAK)
418
423
419
424
420 class jsonformatter(baseformatter):
425 class jsonformatter(baseformatter):
426
427 strict_format = True
428
421 def __init__(self, ui, out, topic, opts):
429 def __init__(self, ui, out, topic, opts):
422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
430 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
423 self._out = out
431 self._out = out
424 self._out.write(b"[")
432 self._out.write(b"[")
425 self._first = True
433 self._first = True
426
434
427 def _showitem(self):
435 def _showitem(self):
428 if self._first:
436 if self._first:
429 self._first = False
437 self._first = False
430 else:
438 else:
431 self._out.write(b",")
439 self._out.write(b",")
432
440
433 self._out.write(b"\n {\n")
441 self._out.write(b"\n {\n")
434 first = True
442 first = True
435 for k, v in sorted(self._item.items()):
443 for k, v in sorted(self._item.items()):
436 if first:
444 if first:
437 first = False
445 first = False
438 else:
446 else:
439 self._out.write(b",\n")
447 self._out.write(b",\n")
440 u = templatefilters.json(v, paranoid=False)
448 u = templatefilters.json(v, paranoid=False)
441 self._out.write(b' "%s": %s' % (k, u))
449 self._out.write(b' "%s": %s' % (k, u))
442 self._out.write(b"\n }")
450 self._out.write(b"\n }")
443
451
444 def end(self):
452 def end(self):
445 baseformatter.end(self)
453 baseformatter.end(self)
446 self._out.write(b"\n]\n")
454 self._out.write(b"\n]\n")
447
455
448
456
449 class _templateconverter(object):
457 class _templateconverter(object):
450 '''convert non-primitive data types to be processed by templater'''
458 '''convert non-primitive data types to be processed by templater'''
451
459
452 storecontext = True
460 storecontext = True
453
461
454 @staticmethod
462 @staticmethod
455 def wrapnested(data, tmpl, sep):
463 def wrapnested(data, tmpl, sep):
456 '''wrap nested data by templatable type'''
464 '''wrap nested data by templatable type'''
457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
465 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
458
466
459 @staticmethod
467 @staticmethod
460 def formatdate(date, fmt):
468 def formatdate(date, fmt):
461 '''return date tuple'''
469 '''return date tuple'''
462 return templateutil.date(date)
470 return templateutil.date(date)
463
471
464 @staticmethod
472 @staticmethod
465 def formatdict(data, key, value, fmt, sep):
473 def formatdict(data, key, value, fmt, sep):
466 '''build object that can be evaluated as either plain string or dict'''
474 '''build object that can be evaluated as either plain string or dict'''
467 data = util.sortdict(_iteritems(data))
475 data = util.sortdict(_iteritems(data))
468
476
469 def f():
477 def f():
470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
478 yield _plainconverter.formatdict(data, key, value, fmt, sep)
471
479
472 return templateutil.hybriddict(
480 return templateutil.hybriddict(
473 data, key=key, value=value, fmt=fmt, gen=f
481 data, key=key, value=value, fmt=fmt, gen=f
474 )
482 )
475
483
476 @staticmethod
484 @staticmethod
477 def formatlist(data, name, fmt, sep):
485 def formatlist(data, name, fmt, sep):
478 '''build object that can be evaluated as either plain string or list'''
486 '''build object that can be evaluated as either plain string or list'''
479 data = list(data)
487 data = list(data)
480
488
481 def f():
489 def f():
482 yield _plainconverter.formatlist(data, name, fmt, sep)
490 yield _plainconverter.formatlist(data, name, fmt, sep)
483
491
484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
492 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
485
493
486
494
487 class templateformatter(baseformatter):
495 class templateformatter(baseformatter):
488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
496 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
497 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
490 self._out = out
498 self._out = out
491 self._tref = spec.ref
499 self._tref = spec.ref
492 self._t = loadtemplater(
500 self._t = loadtemplater(
493 ui,
501 ui,
494 spec,
502 spec,
495 defaults=templatekw.keywords,
503 defaults=templatekw.keywords,
496 resources=templateresources(ui),
504 resources=templateresources(ui),
497 cache=templatekw.defaulttempl,
505 cache=templatekw.defaulttempl,
498 )
506 )
499 if overridetemplates:
507 if overridetemplates:
500 self._t.cache.update(overridetemplates)
508 self._t.cache.update(overridetemplates)
501 self._parts = templatepartsmap(
509 self._parts = templatepartsmap(
502 spec, self._t, [b'docheader', b'docfooter', b'separator']
510 spec, self._t, [b'docheader', b'docfooter', b'separator']
503 )
511 )
504 self._counter = itertools.count()
512 self._counter = itertools.count()
505 self._renderitem(b'docheader', {})
513 self._renderitem(b'docheader', {})
506
514
507 def _showitem(self):
515 def _showitem(self):
508 item = self._item.copy()
516 item = self._item.copy()
509 item[b'index'] = index = next(self._counter)
517 item[b'index'] = index = next(self._counter)
510 if index > 0:
518 if index > 0:
511 self._renderitem(b'separator', {})
519 self._renderitem(b'separator', {})
512 self._renderitem(self._tref, item)
520 self._renderitem(self._tref, item)
513
521
514 def _renderitem(self, part, item):
522 def _renderitem(self, part, item):
515 if part not in self._parts:
523 if part not in self._parts:
516 return
524 return
517 ref = self._parts[part]
525 ref = self._parts[part]
518 # None can't be put in the mapping dict since it means <unset>
526 # None can't be put in the mapping dict since it means <unset>
519 for k, v in item.items():
527 for k, v in item.items():
520 if v is None:
528 if v is None:
521 item[k] = templateutil.wrappedvalue(v)
529 item[k] = templateutil.wrappedvalue(v)
522 self._out.write(self._t.render(ref, item))
530 self._out.write(self._t.render(ref, item))
523
531
524 @util.propertycache
532 @util.propertycache
525 def _symbolsused(self):
533 def _symbolsused(self):
526 return self._t.symbolsused(self._tref)
534 return self._t.symbolsused(self._tref)
527
535
528 def datahint(self):
536 def datahint(self):
529 '''set of field names to be referenced from the template'''
537 '''set of field names to be referenced from the template'''
530 return self._symbolsused[0]
538 return self._symbolsused[0]
531
539
532 def end(self):
540 def end(self):
533 baseformatter.end(self)
541 baseformatter.end(self)
534 self._renderitem(b'docfooter', {})
542 self._renderitem(b'docfooter', {})
535
543
536
544
537 @attr.s(frozen=True)
545 @attr.s(frozen=True)
538 class templatespec(object):
546 class templatespec(object):
539 ref = attr.ib()
547 ref = attr.ib()
540 tmpl = attr.ib()
548 tmpl = attr.ib()
541 mapfile = attr.ib()
549 mapfile = attr.ib()
542 refargs = attr.ib(default=None)
550 refargs = attr.ib(default=None)
543 fp = attr.ib(default=None)
551 fp = attr.ib(default=None)
544
552
545
553
546 def empty_templatespec():
554 def empty_templatespec():
547 return templatespec(None, None, None)
555 return templatespec(None, None, None)
548
556
549
557
550 def reference_templatespec(ref, refargs=None):
558 def reference_templatespec(ref, refargs=None):
551 return templatespec(ref, None, None, refargs)
559 return templatespec(ref, None, None, refargs)
552
560
553
561
554 def literal_templatespec(tmpl):
562 def literal_templatespec(tmpl):
555 if pycompat.ispy3:
563 if pycompat.ispy3:
556 assert not isinstance(tmpl, str), b'tmpl must not be a str'
564 assert not isinstance(tmpl, str), b'tmpl must not be a str'
557 return templatespec(b'', tmpl, None)
565 return templatespec(b'', tmpl, None)
558
566
559
567
560 def mapfile_templatespec(topic, mapfile, fp=None):
568 def mapfile_templatespec(topic, mapfile, fp=None):
561 return templatespec(topic, None, mapfile, fp=fp)
569 return templatespec(topic, None, mapfile, fp=fp)
562
570
563
571
564 def lookuptemplate(ui, topic, tmpl):
572 def lookuptemplate(ui, topic, tmpl):
565 """Find the template matching the given -T/--template spec 'tmpl'
573 """Find the template matching the given -T/--template spec 'tmpl'
566
574
567 'tmpl' can be any of the following:
575 'tmpl' can be any of the following:
568
576
569 - a literal template (e.g. '{rev}')
577 - a literal template (e.g. '{rev}')
570 - a reference to built-in template (i.e. formatter)
578 - a reference to built-in template (i.e. formatter)
571 - a map-file name or path (e.g. 'changelog')
579 - a map-file name or path (e.g. 'changelog')
572 - a reference to [templates] in config file
580 - a reference to [templates] in config file
573 - a path to raw template file
581 - a path to raw template file
574
582
575 A map file defines a stand-alone template environment. If a map file
583 A map file defines a stand-alone template environment. If a map file
576 selected, all templates defined in the file will be loaded, and the
584 selected, all templates defined in the file will be loaded, and the
577 template matching the given topic will be rendered. Aliases won't be
585 template matching the given topic will be rendered. Aliases won't be
578 loaded from user config, but from the map file.
586 loaded from user config, but from the map file.
579
587
580 If no map file selected, all templates in [templates] section will be
588 If no map file selected, all templates in [templates] section will be
581 available as well as aliases in [templatealias].
589 available as well as aliases in [templatealias].
582 """
590 """
583
591
584 if not tmpl:
592 if not tmpl:
585 return empty_templatespec()
593 return empty_templatespec()
586
594
587 # looks like a literal template?
595 # looks like a literal template?
588 if b'{' in tmpl:
596 if b'{' in tmpl:
589 return literal_templatespec(tmpl)
597 return literal_templatespec(tmpl)
590
598
591 # a reference to built-in (formatter) template
599 # a reference to built-in (formatter) template
592 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
600 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
593 return reference_templatespec(tmpl)
601 return reference_templatespec(tmpl)
594
602
595 # a function-style reference to built-in template
603 # a function-style reference to built-in template
596 func, fsep, ftail = tmpl.partition(b'(')
604 func, fsep, ftail = tmpl.partition(b'(')
597 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
605 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
598 templater.parseexpr(tmpl) # make sure syntax errors are confined
606 templater.parseexpr(tmpl) # make sure syntax errors are confined
599 return reference_templatespec(func, refargs=ftail[:-1])
607 return reference_templatespec(func, refargs=ftail[:-1])
600
608
601 # perhaps a stock style?
609 # perhaps a stock style?
602 if not os.path.split(tmpl)[0]:
610 if not os.path.split(tmpl)[0]:
603 (mapname, fp) = templater.try_open_template(
611 (mapname, fp) = templater.try_open_template(
604 b'map-cmdline.' + tmpl
612 b'map-cmdline.' + tmpl
605 ) or templater.try_open_template(tmpl)
613 ) or templater.try_open_template(tmpl)
606 if mapname:
614 if mapname:
607 return mapfile_templatespec(topic, mapname, fp)
615 return mapfile_templatespec(topic, mapname, fp)
608
616
609 # perhaps it's a reference to [templates]
617 # perhaps it's a reference to [templates]
610 if ui.config(b'templates', tmpl):
618 if ui.config(b'templates', tmpl):
611 return reference_templatespec(tmpl)
619 return reference_templatespec(tmpl)
612
620
613 if tmpl == b'list':
621 if tmpl == b'list':
614 ui.write(_(b"available styles: %s\n") % templater.stylelist())
622 ui.write(_(b"available styles: %s\n") % templater.stylelist())
615 raise error.Abort(_(b"specify a template"))
623 raise error.Abort(_(b"specify a template"))
616
624
617 # perhaps it's a path to a map or a template
625 # perhaps it's a path to a map or a template
618 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
626 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
619 # is it a mapfile for a style?
627 # is it a mapfile for a style?
620 if os.path.basename(tmpl).startswith(b"map-"):
628 if os.path.basename(tmpl).startswith(b"map-"):
621 return mapfile_templatespec(topic, os.path.realpath(tmpl))
629 return mapfile_templatespec(topic, os.path.realpath(tmpl))
622 with util.posixfile(tmpl, b'rb') as f:
630 with util.posixfile(tmpl, b'rb') as f:
623 tmpl = f.read()
631 tmpl = f.read()
624 return literal_templatespec(tmpl)
632 return literal_templatespec(tmpl)
625
633
626 # constant string?
634 # constant string?
627 return literal_templatespec(tmpl)
635 return literal_templatespec(tmpl)
628
636
629
637
630 def templatepartsmap(spec, t, partnames):
638 def templatepartsmap(spec, t, partnames):
631 """Create a mapping of {part: ref}"""
639 """Create a mapping of {part: ref}"""
632 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
640 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
633 if spec.mapfile:
641 if spec.mapfile:
634 partsmap.update((p, p) for p in partnames if p in t)
642 partsmap.update((p, p) for p in partnames if p in t)
635 elif spec.ref:
643 elif spec.ref:
636 for part in partnames:
644 for part in partnames:
637 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
645 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
638 if ref in t:
646 if ref in t:
639 partsmap[part] = ref
647 partsmap[part] = ref
640 return partsmap
648 return partsmap
641
649
642
650
643 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
651 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
644 """Create a templater from either a literal template or loading from
652 """Create a templater from either a literal template or loading from
645 a map file"""
653 a map file"""
646 assert not (spec.tmpl and spec.mapfile)
654 assert not (spec.tmpl and spec.mapfile)
647 if spec.mapfile:
655 if spec.mapfile:
648 return templater.templater.frommapfile(
656 return templater.templater.frommapfile(
649 spec.mapfile,
657 spec.mapfile,
650 spec.fp,
658 spec.fp,
651 defaults=defaults,
659 defaults=defaults,
652 resources=resources,
660 resources=resources,
653 cache=cache,
661 cache=cache,
654 )
662 )
655 return maketemplater(
663 return maketemplater(
656 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
664 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
657 )
665 )
658
666
659
667
660 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
668 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
661 """Create a templater from a string template 'tmpl'"""
669 """Create a templater from a string template 'tmpl'"""
662 aliases = ui.configitems(b'templatealias')
670 aliases = ui.configitems(b'templatealias')
663 t = templater.templater(
671 t = templater.templater(
664 defaults=defaults, resources=resources, cache=cache, aliases=aliases
672 defaults=defaults, resources=resources, cache=cache, aliases=aliases
665 )
673 )
666 t.cache.update(
674 t.cache.update(
667 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
675 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
668 )
676 )
669 if tmpl:
677 if tmpl:
670 t.cache[b''] = tmpl
678 t.cache[b''] = tmpl
671 return t
679 return t
672
680
673
681
674 # marker to denote a resource to be loaded on demand based on mapping values
682 # marker to denote a resource to be loaded on demand based on mapping values
675 # (e.g. (ctx, path) -> fctx)
683 # (e.g. (ctx, path) -> fctx)
676 _placeholder = object()
684 _placeholder = object()
677
685
678
686
679 class templateresources(templater.resourcemapper):
687 class templateresources(templater.resourcemapper):
680 """Resource mapper designed for the default templatekw and function"""
688 """Resource mapper designed for the default templatekw and function"""
681
689
682 def __init__(self, ui, repo=None):
690 def __init__(self, ui, repo=None):
683 self._resmap = {
691 self._resmap = {
684 b'cache': {}, # for templatekw/funcs to store reusable data
692 b'cache': {}, # for templatekw/funcs to store reusable data
685 b'repo': repo,
693 b'repo': repo,
686 b'ui': ui,
694 b'ui': ui,
687 }
695 }
688
696
689 def availablekeys(self, mapping):
697 def availablekeys(self, mapping):
690 return {
698 return {
691 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
699 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
692 }
700 }
693
701
694 def knownkeys(self):
702 def knownkeys(self):
695 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
703 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
696
704
697 def lookup(self, mapping, key):
705 def lookup(self, mapping, key):
698 if key not in self.knownkeys():
706 if key not in self.knownkeys():
699 return None
707 return None
700 v = self._getsome(mapping, key)
708 v = self._getsome(mapping, key)
701 if v is _placeholder:
709 if v is _placeholder:
702 v = mapping[key] = self._loadermap[key](self, mapping)
710 v = mapping[key] = self._loadermap[key](self, mapping)
703 return v
711 return v
704
712
705 def populatemap(self, context, origmapping, newmapping):
713 def populatemap(self, context, origmapping, newmapping):
706 mapping = {}
714 mapping = {}
707 if self._hasnodespec(newmapping):
715 if self._hasnodespec(newmapping):
708 mapping[b'revcache'] = {} # per-ctx cache
716 mapping[b'revcache'] = {} # per-ctx cache
709 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
717 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
710 orignode = templateutil.runsymbol(context, origmapping, b'node')
718 orignode = templateutil.runsymbol(context, origmapping, b'node')
711 mapping[b'originalnode'] = orignode
719 mapping[b'originalnode'] = orignode
712 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
720 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
713 # its existence to be reported by availablekeys()
721 # its existence to be reported by availablekeys()
714 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
722 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
715 mapping[b'ctx'] = _placeholder
723 mapping[b'ctx'] = _placeholder
716 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
724 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
717 mapping[b'fctx'] = _placeholder
725 mapping[b'fctx'] = _placeholder
718 return mapping
726 return mapping
719
727
720 def _getsome(self, mapping, key):
728 def _getsome(self, mapping, key):
721 v = mapping.get(key)
729 v = mapping.get(key)
722 if v is not None:
730 if v is not None:
723 return v
731 return v
724 return self._resmap.get(key)
732 return self._resmap.get(key)
725
733
726 def _hasliteral(self, mapping, key):
734 def _hasliteral(self, mapping, key):
727 """Test if a literal value is set or unset in the given mapping"""
735 """Test if a literal value is set or unset in the given mapping"""
728 return key in mapping and not callable(mapping[key])
736 return key in mapping and not callable(mapping[key])
729
737
730 def _getliteral(self, mapping, key):
738 def _getliteral(self, mapping, key):
731 """Return value of the given name if it is a literal"""
739 """Return value of the given name if it is a literal"""
732 v = mapping.get(key)
740 v = mapping.get(key)
733 if callable(v):
741 if callable(v):
734 return None
742 return None
735 return v
743 return v
736
744
737 def _hasnodespec(self, mapping):
745 def _hasnodespec(self, mapping):
738 """Test if context revision is set or unset in the given mapping"""
746 """Test if context revision is set or unset in the given mapping"""
739 return b'node' in mapping or b'ctx' in mapping
747 return b'node' in mapping or b'ctx' in mapping
740
748
741 def _loadctx(self, mapping):
749 def _loadctx(self, mapping):
742 repo = self._getsome(mapping, b'repo')
750 repo = self._getsome(mapping, b'repo')
743 node = self._getliteral(mapping, b'node')
751 node = self._getliteral(mapping, b'node')
744 if repo is None or node is None:
752 if repo is None or node is None:
745 return
753 return
746 try:
754 try:
747 return repo[node]
755 return repo[node]
748 except error.RepoLookupError:
756 except error.RepoLookupError:
749 return None # maybe hidden/non-existent node
757 return None # maybe hidden/non-existent node
750
758
751 def _loadfctx(self, mapping):
759 def _loadfctx(self, mapping):
752 ctx = self._getsome(mapping, b'ctx')
760 ctx = self._getsome(mapping, b'ctx')
753 path = self._getliteral(mapping, b'path')
761 path = self._getliteral(mapping, b'path')
754 if ctx is None or path is None:
762 if ctx is None or path is None:
755 return None
763 return None
756 try:
764 try:
757 return ctx[path]
765 return ctx[path]
758 except error.LookupError:
766 except error.LookupError:
759 return None # maybe removed file?
767 return None # maybe removed file?
760
768
761 _loadermap = {
769 _loadermap = {
762 b'ctx': _loadctx,
770 b'ctx': _loadctx,
763 b'fctx': _loadfctx,
771 b'fctx': _loadfctx,
764 }
772 }
765
773
766
774
767 def _internaltemplateformatter(
775 def _internaltemplateformatter(
768 ui,
776 ui,
769 out,
777 out,
770 topic,
778 topic,
771 opts,
779 opts,
772 spec,
780 spec,
773 tmpl,
781 tmpl,
774 docheader=b'',
782 docheader=b'',
775 docfooter=b'',
783 docfooter=b'',
776 separator=b'',
784 separator=b'',
777 ):
785 ):
778 """Build template formatter that handles customizable built-in templates
786 """Build template formatter that handles customizable built-in templates
779 such as -Tjson(...)"""
787 such as -Tjson(...)"""
780 templates = {spec.ref: tmpl}
788 templates = {spec.ref: tmpl}
781 if docheader:
789 if docheader:
782 templates[b'%s:docheader' % spec.ref] = docheader
790 templates[b'%s:docheader' % spec.ref] = docheader
783 if docfooter:
791 if docfooter:
784 templates[b'%s:docfooter' % spec.ref] = docfooter
792 templates[b'%s:docfooter' % spec.ref] = docfooter
785 if separator:
793 if separator:
786 templates[b'%s:separator' % spec.ref] = separator
794 templates[b'%s:separator' % spec.ref] = separator
787 return templateformatter(
795 return templateformatter(
788 ui, out, topic, opts, spec, overridetemplates=templates
796 ui, out, topic, opts, spec, overridetemplates=templates
789 )
797 )
790
798
791
799
792 def formatter(ui, out, topic, opts):
800 def formatter(ui, out, topic, opts):
793 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
801 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
794 if spec.ref == b"cbor" and spec.refargs is not None:
802 if spec.ref == b"cbor" and spec.refargs is not None:
795 return _internaltemplateformatter(
803 return _internaltemplateformatter(
796 ui,
804 ui,
797 out,
805 out,
798 topic,
806 topic,
799 opts,
807 opts,
800 spec,
808 spec,
801 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
809 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
802 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
810 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
803 docfooter=cborutil.BREAK,
811 docfooter=cborutil.BREAK,
804 )
812 )
805 elif spec.ref == b"cbor":
813 elif spec.ref == b"cbor":
806 return cborformatter(ui, out, topic, opts)
814 return cborformatter(ui, out, topic, opts)
807 elif spec.ref == b"json" and spec.refargs is not None:
815 elif spec.ref == b"json" and spec.refargs is not None:
808 return _internaltemplateformatter(
816 return _internaltemplateformatter(
809 ui,
817 ui,
810 out,
818 out,
811 topic,
819 topic,
812 opts,
820 opts,
813 spec,
821 spec,
814 tmpl=b'{dict(%s)|json}' % spec.refargs,
822 tmpl=b'{dict(%s)|json}' % spec.refargs,
815 docheader=b'[\n ',
823 docheader=b'[\n ',
816 docfooter=b'\n]\n',
824 docfooter=b'\n]\n',
817 separator=b',\n ',
825 separator=b',\n ',
818 )
826 )
819 elif spec.ref == b"json":
827 elif spec.ref == b"json":
820 return jsonformatter(ui, out, topic, opts)
828 return jsonformatter(ui, out, topic, opts)
821 elif spec.ref == b"pickle":
829 elif spec.ref == b"pickle":
822 assert spec.refargs is None, r'function-style not supported'
830 assert spec.refargs is None, r'function-style not supported'
823 return pickleformatter(ui, out, topic, opts)
831 return pickleformatter(ui, out, topic, opts)
824 elif spec.ref == b"debug":
832 elif spec.ref == b"debug":
825 assert spec.refargs is None, r'function-style not supported'
833 assert spec.refargs is None, r'function-style not supported'
826 return debugformatter(ui, out, topic, opts)
834 return debugformatter(ui, out, topic, opts)
827 elif spec.ref or spec.tmpl or spec.mapfile:
835 elif spec.ref or spec.tmpl or spec.mapfile:
828 assert spec.refargs is None, r'function-style not supported'
836 assert spec.refargs is None, r'function-style not supported'
829 return templateformatter(ui, out, topic, opts, spec)
837 return templateformatter(ui, out, topic, opts, spec)
830 # developer config: ui.formatdebug
838 # developer config: ui.formatdebug
831 elif ui.configbool(b'ui', b'formatdebug'):
839 elif ui.configbool(b'ui', b'formatdebug'):
832 return debugformatter(ui, out, topic, opts)
840 return debugformatter(ui, out, topic, opts)
833 # deprecated config: ui.formatjson
841 # deprecated config: ui.formatjson
834 elif ui.configbool(b'ui', b'formatjson'):
842 elif ui.configbool(b'ui', b'formatjson'):
835 return jsonformatter(ui, out, topic, opts)
843 return jsonformatter(ui, out, topic, opts)
836 return plainformatter(ui, out, topic, opts)
844 return plainformatter(ui, out, topic, opts)
837
845
838
846
839 @contextlib.contextmanager
847 @contextlib.contextmanager
840 def openformatter(ui, filename, topic, opts):
848 def openformatter(ui, filename, topic, opts):
841 """Create a formatter that writes outputs to the specified file
849 """Create a formatter that writes outputs to the specified file
842
850
843 Must be invoked using the 'with' statement.
851 Must be invoked using the 'with' statement.
844 """
852 """
845 with util.posixfile(filename, b'wb') as out:
853 with util.posixfile(filename, b'wb') as out:
846 with formatter(ui, out, topic, opts) as fm:
854 with formatter(ui, out, topic, opts) as fm:
847 yield fm
855 yield fm
848
856
849
857
850 @contextlib.contextmanager
858 @contextlib.contextmanager
851 def _neverending(fm):
859 def _neverending(fm):
852 yield fm
860 yield fm
853
861
854
862
855 def maybereopen(fm, filename):
863 def maybereopen(fm, filename):
856 """Create a formatter backed by file if filename specified, else return
864 """Create a formatter backed by file if filename specified, else return
857 the given formatter
865 the given formatter
858
866
859 Must be invoked using the 'with' statement. This will never call fm.end()
867 Must be invoked using the 'with' statement. This will never call fm.end()
860 of the given formatter.
868 of the given formatter.
861 """
869 """
862 if filename:
870 if filename:
863 return openformatter(fm._ui, filename, fm._topic, fm._opts)
871 return openformatter(fm._ui, filename, fm._topic, fm._opts)
864 else:
872 else:
865 return _neverending(fm)
873 return _neverending(fm)
@@ -1,1768 +1,1762 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 round-trips: 2
47 round-trips: 2
48 heads summary:
48 heads summary:
49 total common heads: 2
49 total common heads: 2
50 also local heads: 2
50 also local heads: 2
51 also remote heads: 1
51 also remote heads: 1
52 both: 1
52 both: 1
53 local heads: 2
53 local heads: 2
54 common: 2
54 common: 2
55 missing: 0
55 missing: 0
56 remote heads: 3
56 remote heads: 3
57 common: 1
57 common: 1
58 unknown: 2
58 unknown: 2
59 local changesets: 7
59 local changesets: 7
60 common: 7
60 common: 7
61 heads: 2
61 heads: 2
62 roots: 1
62 roots: 1
63 missing: 0
63 missing: 0
64 heads: 0
64 heads: 0
65 roots: 0
65 roots: 0
66 first undecided set: 3
66 first undecided set: 3
67 heads: 1
67 heads: 1
68 roots: 1
68 roots: 1
69 common: 3
69 common: 3
70 missing: 0
70 missing: 0
71 common heads: 01241442b3c2 b5714e113bc0
71 common heads: 01241442b3c2 b5714e113bc0
72
72
73 % -- a -> b set
73 % -- a -> b set
74 comparing with b
74 comparing with b
75 query 1; heads
75 query 1; heads
76 searching for changes
76 searching for changes
77 all local changesets known remotely
77 all local changesets known remotely
78 elapsed time: * seconds (glob)
78 elapsed time: * seconds (glob)
79 round-trips: 1
79 round-trips: 1
80 heads summary:
80 heads summary:
81 total common heads: 2
81 total common heads: 2
82 also local heads: 2
82 also local heads: 2
83 also remote heads: 1
83 also remote heads: 1
84 both: 1
84 both: 1
85 local heads: 2
85 local heads: 2
86 common: 2
86 common: 2
87 missing: 0
87 missing: 0
88 remote heads: 3
88 remote heads: 3
89 common: 1
89 common: 1
90 unknown: 2
90 unknown: 2
91 local changesets: 7
91 local changesets: 7
92 common: 7
92 common: 7
93 heads: 2
93 heads: 2
94 roots: 1
94 roots: 1
95 missing: 0
95 missing: 0
96 heads: 0
96 heads: 0
97 roots: 0
97 roots: 0
98 first undecided set: 3
98 first undecided set: 3
99 heads: 1
99 heads: 1
100 roots: 1
100 roots: 1
101 common: 3
101 common: 3
102 missing: 0
102 missing: 0
103 common heads: 01241442b3c2 b5714e113bc0
103 common heads: 01241442b3c2 b5714e113bc0
104
104
105 % -- a -> b set (tip only)
105 % -- a -> b set (tip only)
106 comparing with b
106 comparing with b
107 query 1; heads
107 query 1; heads
108 searching for changes
108 searching for changes
109 all local changesets known remotely
109 all local changesets known remotely
110 elapsed time: * seconds (glob)
110 elapsed time: * seconds (glob)
111 round-trips: 1
111 round-trips: 1
112 heads summary:
112 heads summary:
113 total common heads: 1
113 total common heads: 1
114 also local heads: 1
114 also local heads: 1
115 also remote heads: 0
115 also remote heads: 0
116 both: 0
116 both: 0
117 local heads: 2
117 local heads: 2
118 common: 1
118 common: 1
119 missing: 1
119 missing: 1
120 remote heads: 3
120 remote heads: 3
121 common: 0
121 common: 0
122 unknown: 3
122 unknown: 3
123 local changesets: 7
123 local changesets: 7
124 common: 6
124 common: 6
125 heads: 1
125 heads: 1
126 roots: 1
126 roots: 1
127 missing: 1
127 missing: 1
128 heads: 1
128 heads: 1
129 roots: 1
129 roots: 1
130 first undecided set: 6
130 first undecided set: 6
131 heads: 2
131 heads: 2
132 roots: 1
132 roots: 1
133 common: 5
133 common: 5
134 missing: 1
134 missing: 1
135 common heads: b5714e113bc0
135 common heads: b5714e113bc0
136
136
137 % -- b -> a tree
137 % -- b -> a tree
138 comparing with a
138 comparing with a
139 searching for changes
139 searching for changes
140 unpruned common: 01241442b3c2 b5714e113bc0
140 unpruned common: 01241442b3c2 b5714e113bc0
141 elapsed time: * seconds (glob)
141 elapsed time: * seconds (glob)
142 round-trips: 1
142 round-trips: 1
143 heads summary:
143 heads summary:
144 total common heads: 2
144 total common heads: 2
145 also local heads: 1
145 also local heads: 1
146 also remote heads: 2
146 also remote heads: 2
147 both: 1
147 both: 1
148 local heads: 3
148 local heads: 3
149 common: 1
149 common: 1
150 missing: 2
150 missing: 2
151 remote heads: 2
151 remote heads: 2
152 common: 2
152 common: 2
153 unknown: 0
153 unknown: 0
154 local changesets: 15
154 local changesets: 15
155 common: 7
155 common: 7
156 heads: 2
156 heads: 2
157 roots: 1
157 roots: 1
158 missing: 8
158 missing: 8
159 heads: 2
159 heads: 2
160 roots: 2
160 roots: 2
161 first undecided set: 8
161 first undecided set: 8
162 heads: 2
162 heads: 2
163 roots: 2
163 roots: 2
164 common: 0
164 common: 0
165 missing: 8
165 missing: 8
166 common heads: 01241442b3c2 b5714e113bc0
166 common heads: 01241442b3c2 b5714e113bc0
167
167
168 % -- b -> a set
168 % -- b -> a set
169 comparing with a
169 comparing with a
170 query 1; heads
170 query 1; heads
171 searching for changes
171 searching for changes
172 all remote heads known locally
172 all remote heads known locally
173 elapsed time: * seconds (glob)
173 elapsed time: * seconds (glob)
174 round-trips: 1
174 round-trips: 1
175 heads summary:
175 heads summary:
176 total common heads: 2
176 total common heads: 2
177 also local heads: 1
177 also local heads: 1
178 also remote heads: 2
178 also remote heads: 2
179 both: 1
179 both: 1
180 local heads: 3
180 local heads: 3
181 common: 1
181 common: 1
182 missing: 2
182 missing: 2
183 remote heads: 2
183 remote heads: 2
184 common: 2
184 common: 2
185 unknown: 0
185 unknown: 0
186 local changesets: 15
186 local changesets: 15
187 common: 7
187 common: 7
188 heads: 2
188 heads: 2
189 roots: 1
189 roots: 1
190 missing: 8
190 missing: 8
191 heads: 2
191 heads: 2
192 roots: 2
192 roots: 2
193 first undecided set: 8
193 first undecided set: 8
194 heads: 2
194 heads: 2
195 roots: 2
195 roots: 2
196 common: 0
196 common: 0
197 missing: 8
197 missing: 8
198 common heads: 01241442b3c2 b5714e113bc0
198 common heads: 01241442b3c2 b5714e113bc0
199
199
200 % -- b -> a set (tip only)
200 % -- b -> a set (tip only)
201 comparing with a
201 comparing with a
202 query 1; heads
202 query 1; heads
203 searching for changes
203 searching for changes
204 all remote heads known locally
204 all remote heads known locally
205 elapsed time: * seconds (glob)
205 elapsed time: * seconds (glob)
206 round-trips: 1
206 round-trips: 1
207 heads summary:
207 heads summary:
208 total common heads: 2
208 total common heads: 2
209 also local heads: 1
209 also local heads: 1
210 also remote heads: 2
210 also remote heads: 2
211 both: 1
211 both: 1
212 local heads: 3
212 local heads: 3
213 common: 1
213 common: 1
214 missing: 2
214 missing: 2
215 remote heads: 2
215 remote heads: 2
216 common: 2
216 common: 2
217 unknown: 0
217 unknown: 0
218 local changesets: 15
218 local changesets: 15
219 common: 7
219 common: 7
220 heads: 2
220 heads: 2
221 roots: 1
221 roots: 1
222 missing: 8
222 missing: 8
223 heads: 2
223 heads: 2
224 roots: 2
224 roots: 2
225 first undecided set: 8
225 first undecided set: 8
226 heads: 2
226 heads: 2
227 roots: 2
227 roots: 2
228 common: 0
228 common: 0
229 missing: 8
229 missing: 8
230 common heads: 01241442b3c2 b5714e113bc0
230 common heads: 01241442b3c2 b5714e113bc0
231
231
232
232
233 Many new:
233 Many new:
234
234
235 $ testdesc '-ra1 -ra2' '-rb' '
235 $ testdesc '-ra1 -ra2' '-rb' '
236 > +2:f +3:a1 +3:b
236 > +2:f +3:a1 +3:b
237 > <f +30 :a2'
237 > <f +30 :a2'
238
238
239 % -- a -> b tree
239 % -- a -> b tree
240 comparing with b
240 comparing with b
241 searching for changes
241 searching for changes
242 unpruned common: bebd167eb94d
242 unpruned common: bebd167eb94d
243 elapsed time: * seconds (glob)
243 elapsed time: * seconds (glob)
244 round-trips: 2
244 round-trips: 2
245 heads summary:
245 heads summary:
246 total common heads: 1
246 total common heads: 1
247 also local heads: 1
247 also local heads: 1
248 also remote heads: 0
248 also remote heads: 0
249 both: 0
249 both: 0
250 local heads: 2
250 local heads: 2
251 common: 1
251 common: 1
252 missing: 1
252 missing: 1
253 remote heads: 1
253 remote heads: 1
254 common: 0
254 common: 0
255 unknown: 1
255 unknown: 1
256 local changesets: 35
256 local changesets: 35
257 common: 5
257 common: 5
258 heads: 1
258 heads: 1
259 roots: 1
259 roots: 1
260 missing: 30
260 missing: 30
261 heads: 1
261 heads: 1
262 roots: 1
262 roots: 1
263 first undecided set: 34
263 first undecided set: 34
264 heads: 2
264 heads: 2
265 roots: 1
265 roots: 1
266 common: 4
266 common: 4
267 missing: 30
267 missing: 30
268 common heads: bebd167eb94d
268 common heads: bebd167eb94d
269
269
270 % -- a -> b set
270 % -- a -> b set
271 comparing with b
271 comparing with b
272 query 1; heads
272 query 1; heads
273 searching for changes
273 searching for changes
274 taking initial sample
274 taking initial sample
275 searching: 2 queries
275 searching: 2 queries
276 query 2; still undecided: 29, sample size is: 29
276 query 2; still undecided: 29, sample size is: 29
277 2 total queries in *.????s (glob)
277 2 total queries in *.????s (glob)
278 elapsed time: * seconds (glob)
278 elapsed time: * seconds (glob)
279 round-trips: 2
279 round-trips: 2
280 heads summary:
280 heads summary:
281 total common heads: 1
281 total common heads: 1
282 also local heads: 1
282 also local heads: 1
283 also remote heads: 0
283 also remote heads: 0
284 both: 0
284 both: 0
285 local heads: 2
285 local heads: 2
286 common: 1
286 common: 1
287 missing: 1
287 missing: 1
288 remote heads: 1
288 remote heads: 1
289 common: 0
289 common: 0
290 unknown: 1
290 unknown: 1
291 local changesets: 35
291 local changesets: 35
292 common: 5
292 common: 5
293 heads: 1
293 heads: 1
294 roots: 1
294 roots: 1
295 missing: 30
295 missing: 30
296 heads: 1
296 heads: 1
297 roots: 1
297 roots: 1
298 first undecided set: 34
298 first undecided set: 34
299 heads: 2
299 heads: 2
300 roots: 1
300 roots: 1
301 common: 4
301 common: 4
302 missing: 30
302 missing: 30
303 common heads: bebd167eb94d
303 common heads: bebd167eb94d
304
304
305 % -- a -> b set (tip only)
305 % -- a -> b set (tip only)
306 comparing with b
306 comparing with b
307 query 1; heads
307 query 1; heads
308 searching for changes
308 searching for changes
309 taking quick initial sample
309 taking quick initial sample
310 searching: 2 queries
310 searching: 2 queries
311 query 2; still undecided: 31, sample size is: 31
311 query 2; still undecided: 31, sample size is: 31
312 2 total queries in *.????s (glob)
312 2 total queries in *.????s (glob)
313 elapsed time: * seconds (glob)
313 elapsed time: * seconds (glob)
314 round-trips: 2
314 round-trips: 2
315 heads summary:
315 heads summary:
316 total common heads: 1
316 total common heads: 1
317 also local heads: 0
317 also local heads: 0
318 also remote heads: 0
318 also remote heads: 0
319 both: 0
319 both: 0
320 local heads: 2
320 local heads: 2
321 common: 0
321 common: 0
322 missing: 2
322 missing: 2
323 remote heads: 1
323 remote heads: 1
324 common: 0
324 common: 0
325 unknown: 1
325 unknown: 1
326 local changesets: 35
326 local changesets: 35
327 common: 2
327 common: 2
328 heads: 1
328 heads: 1
329 roots: 1
329 roots: 1
330 missing: 33
330 missing: 33
331 heads: 2
331 heads: 2
332 roots: 2
332 roots: 2
333 first undecided set: 35
333 first undecided set: 35
334 heads: 2
334 heads: 2
335 roots: 1
335 roots: 1
336 common: 2
336 common: 2
337 missing: 33
337 missing: 33
338 common heads: 66f7d451a68b
338 common heads: 66f7d451a68b
339
339
340 % -- b -> a tree
340 % -- b -> a tree
341 comparing with a
341 comparing with a
342 searching for changes
342 searching for changes
343 unpruned common: 66f7d451a68b bebd167eb94d
343 unpruned common: 66f7d451a68b bebd167eb94d
344 elapsed time: * seconds (glob)
344 elapsed time: * seconds (glob)
345 round-trips: 4
345 round-trips: 4
346 heads summary:
346 heads summary:
347 total common heads: 1
347 total common heads: 1
348 also local heads: 0
348 also local heads: 0
349 also remote heads: 1
349 also remote heads: 1
350 both: 0
350 both: 0
351 local heads: 1
351 local heads: 1
352 common: 0
352 common: 0
353 missing: 1
353 missing: 1
354 remote heads: 2
354 remote heads: 2
355 common: 1
355 common: 1
356 unknown: 1
356 unknown: 1
357 local changesets: 8
357 local changesets: 8
358 common: 5
358 common: 5
359 heads: 1
359 heads: 1
360 roots: 1
360 roots: 1
361 missing: 3
361 missing: 3
362 heads: 1
362 heads: 1
363 roots: 1
363 roots: 1
364 first undecided set: 3
364 first undecided set: 3
365 heads: 1
365 heads: 1
366 roots: 1
366 roots: 1
367 common: 0
367 common: 0
368 missing: 3
368 missing: 3
369 common heads: bebd167eb94d
369 common heads: bebd167eb94d
370
370
371 % -- b -> a set
371 % -- b -> a set
372 comparing with a
372 comparing with a
373 query 1; heads
373 query 1; heads
374 searching for changes
374 searching for changes
375 taking initial sample
375 taking initial sample
376 searching: 2 queries
376 searching: 2 queries
377 query 2; still undecided: 2, sample size is: 2
377 query 2; still undecided: 2, sample size is: 2
378 2 total queries in *.????s (glob)
378 2 total queries in *.????s (glob)
379 elapsed time: * seconds (glob)
379 elapsed time: * seconds (glob)
380 round-trips: 2
380 round-trips: 2
381 heads summary:
381 heads summary:
382 total common heads: 1
382 total common heads: 1
383 also local heads: 0
383 also local heads: 0
384 also remote heads: 1
384 also remote heads: 1
385 both: 0
385 both: 0
386 local heads: 1
386 local heads: 1
387 common: 0
387 common: 0
388 missing: 1
388 missing: 1
389 remote heads: 2
389 remote heads: 2
390 common: 1
390 common: 1
391 unknown: 1
391 unknown: 1
392 local changesets: 8
392 local changesets: 8
393 common: 5
393 common: 5
394 heads: 1
394 heads: 1
395 roots: 1
395 roots: 1
396 missing: 3
396 missing: 3
397 heads: 1
397 heads: 1
398 roots: 1
398 roots: 1
399 first undecided set: 3
399 first undecided set: 3
400 heads: 1
400 heads: 1
401 roots: 1
401 roots: 1
402 common: 0
402 common: 0
403 missing: 3
403 missing: 3
404 common heads: bebd167eb94d
404 common heads: bebd167eb94d
405
405
406 % -- b -> a set (tip only)
406 % -- b -> a set (tip only)
407 comparing with a
407 comparing with a
408 query 1; heads
408 query 1; heads
409 searching for changes
409 searching for changes
410 taking initial sample
410 taking initial sample
411 searching: 2 queries
411 searching: 2 queries
412 query 2; still undecided: 2, sample size is: 2
412 query 2; still undecided: 2, sample size is: 2
413 2 total queries in *.????s (glob)
413 2 total queries in *.????s (glob)
414 elapsed time: * seconds (glob)
414 elapsed time: * seconds (glob)
415 round-trips: 2
415 round-trips: 2
416 heads summary:
416 heads summary:
417 total common heads: 1
417 total common heads: 1
418 also local heads: 0
418 also local heads: 0
419 also remote heads: 1
419 also remote heads: 1
420 both: 0
420 both: 0
421 local heads: 1
421 local heads: 1
422 common: 0
422 common: 0
423 missing: 1
423 missing: 1
424 remote heads: 2
424 remote heads: 2
425 common: 1
425 common: 1
426 unknown: 1
426 unknown: 1
427 local changesets: 8
427 local changesets: 8
428 common: 5
428 common: 5
429 heads: 1
429 heads: 1
430 roots: 1
430 roots: 1
431 missing: 3
431 missing: 3
432 heads: 1
432 heads: 1
433 roots: 1
433 roots: 1
434 first undecided set: 3
434 first undecided set: 3
435 heads: 1
435 heads: 1
436 roots: 1
436 roots: 1
437 common: 0
437 common: 0
438 missing: 3
438 missing: 3
439 common heads: bebd167eb94d
439 common heads: bebd167eb94d
440
440
441 Both sides many new with stub:
441 Both sides many new with stub:
442
442
443 $ testdesc '-ra1 -ra2' '-rb' '
443 $ testdesc '-ra1 -ra2' '-rb' '
444 > +2:f +2:a1 +30 :b
444 > +2:f +2:a1 +30 :b
445 > <f +30 :a2'
445 > <f +30 :a2'
446
446
447 % -- a -> b tree
447 % -- a -> b tree
448 comparing with b
448 comparing with b
449 searching for changes
449 searching for changes
450 unpruned common: 2dc09a01254d
450 unpruned common: 2dc09a01254d
451 elapsed time: * seconds (glob)
451 elapsed time: * seconds (glob)
452 round-trips: 4
452 round-trips: 4
453 heads summary:
453 heads summary:
454 total common heads: 1
454 total common heads: 1
455 also local heads: 1
455 also local heads: 1
456 also remote heads: 0
456 also remote heads: 0
457 both: 0
457 both: 0
458 local heads: 2
458 local heads: 2
459 common: 1
459 common: 1
460 missing: 1
460 missing: 1
461 remote heads: 1
461 remote heads: 1
462 common: 0
462 common: 0
463 unknown: 1
463 unknown: 1
464 local changesets: 34
464 local changesets: 34
465 common: 4
465 common: 4
466 heads: 1
466 heads: 1
467 roots: 1
467 roots: 1
468 missing: 30
468 missing: 30
469 heads: 1
469 heads: 1
470 roots: 1
470 roots: 1
471 first undecided set: 33
471 first undecided set: 33
472 heads: 2
472 heads: 2
473 roots: 1
473 roots: 1
474 common: 3
474 common: 3
475 missing: 30
475 missing: 30
476 common heads: 2dc09a01254d
476 common heads: 2dc09a01254d
477
477
478 % -- a -> b set
478 % -- a -> b set
479 comparing with b
479 comparing with b
480 query 1; heads
480 query 1; heads
481 searching for changes
481 searching for changes
482 taking initial sample
482 taking initial sample
483 searching: 2 queries
483 searching: 2 queries
484 query 2; still undecided: 29, sample size is: 29
484 query 2; still undecided: 29, sample size is: 29
485 2 total queries in *.????s (glob)
485 2 total queries in *.????s (glob)
486 elapsed time: * seconds (glob)
486 elapsed time: * seconds (glob)
487 round-trips: 2
487 round-trips: 2
488 heads summary:
488 heads summary:
489 total common heads: 1
489 total common heads: 1
490 also local heads: 1
490 also local heads: 1
491 also remote heads: 0
491 also remote heads: 0
492 both: 0
492 both: 0
493 local heads: 2
493 local heads: 2
494 common: 1
494 common: 1
495 missing: 1
495 missing: 1
496 remote heads: 1
496 remote heads: 1
497 common: 0
497 common: 0
498 unknown: 1
498 unknown: 1
499 local changesets: 34
499 local changesets: 34
500 common: 4
500 common: 4
501 heads: 1
501 heads: 1
502 roots: 1
502 roots: 1
503 missing: 30
503 missing: 30
504 heads: 1
504 heads: 1
505 roots: 1
505 roots: 1
506 first undecided set: 33
506 first undecided set: 33
507 heads: 2
507 heads: 2
508 roots: 1
508 roots: 1
509 common: 3
509 common: 3
510 missing: 30
510 missing: 30
511 common heads: 2dc09a01254d
511 common heads: 2dc09a01254d
512
512
513 % -- a -> b set (tip only)
513 % -- a -> b set (tip only)
514 comparing with b
514 comparing with b
515 query 1; heads
515 query 1; heads
516 searching for changes
516 searching for changes
517 taking quick initial sample
517 taking quick initial sample
518 searching: 2 queries
518 searching: 2 queries
519 query 2; still undecided: 31, sample size is: 31
519 query 2; still undecided: 31, sample size is: 31
520 2 total queries in *.????s (glob)
520 2 total queries in *.????s (glob)
521 elapsed time: * seconds (glob)
521 elapsed time: * seconds (glob)
522 round-trips: 2
522 round-trips: 2
523 heads summary:
523 heads summary:
524 total common heads: 1
524 total common heads: 1
525 also local heads: 0
525 also local heads: 0
526 also remote heads: 0
526 also remote heads: 0
527 both: 0
527 both: 0
528 local heads: 2
528 local heads: 2
529 common: 0
529 common: 0
530 missing: 2
530 missing: 2
531 remote heads: 1
531 remote heads: 1
532 common: 0
532 common: 0
533 unknown: 1
533 unknown: 1
534 local changesets: 34
534 local changesets: 34
535 common: 2
535 common: 2
536 heads: 1
536 heads: 1
537 roots: 1
537 roots: 1
538 missing: 32
538 missing: 32
539 heads: 2
539 heads: 2
540 roots: 2
540 roots: 2
541 first undecided set: 34
541 first undecided set: 34
542 heads: 2
542 heads: 2
543 roots: 1
543 roots: 1
544 common: 2
544 common: 2
545 missing: 32
545 missing: 32
546 common heads: 66f7d451a68b
546 common heads: 66f7d451a68b
547
547
548 % -- b -> a tree
548 % -- b -> a tree
549 comparing with a
549 comparing with a
550 searching for changes
550 searching for changes
551 unpruned common: 2dc09a01254d 66f7d451a68b
551 unpruned common: 2dc09a01254d 66f7d451a68b
552 elapsed time: * seconds (glob)
552 elapsed time: * seconds (glob)
553 round-trips: 4
553 round-trips: 4
554 heads summary:
554 heads summary:
555 total common heads: 1
555 total common heads: 1
556 also local heads: 0
556 also local heads: 0
557 also remote heads: 1
557 also remote heads: 1
558 both: 0
558 both: 0
559 local heads: 1
559 local heads: 1
560 common: 0
560 common: 0
561 missing: 1
561 missing: 1
562 remote heads: 2
562 remote heads: 2
563 common: 1
563 common: 1
564 unknown: 1
564 unknown: 1
565 local changesets: 34
565 local changesets: 34
566 common: 4
566 common: 4
567 heads: 1
567 heads: 1
568 roots: 1
568 roots: 1
569 missing: 30
569 missing: 30
570 heads: 1
570 heads: 1
571 roots: 1
571 roots: 1
572 first undecided set: 30
572 first undecided set: 30
573 heads: 1
573 heads: 1
574 roots: 1
574 roots: 1
575 common: 0
575 common: 0
576 missing: 30
576 missing: 30
577 common heads: 2dc09a01254d
577 common heads: 2dc09a01254d
578
578
579 % -- b -> a set
579 % -- b -> a set
580 comparing with a
580 comparing with a
581 query 1; heads
581 query 1; heads
582 searching for changes
582 searching for changes
583 taking initial sample
583 taking initial sample
584 searching: 2 queries
584 searching: 2 queries
585 query 2; still undecided: 29, sample size is: 29
585 query 2; still undecided: 29, sample size is: 29
586 2 total queries in *.????s (glob)
586 2 total queries in *.????s (glob)
587 elapsed time: * seconds (glob)
587 elapsed time: * seconds (glob)
588 round-trips: 2
588 round-trips: 2
589 heads summary:
589 heads summary:
590 total common heads: 1
590 total common heads: 1
591 also local heads: 0
591 also local heads: 0
592 also remote heads: 1
592 also remote heads: 1
593 both: 0
593 both: 0
594 local heads: 1
594 local heads: 1
595 common: 0
595 common: 0
596 missing: 1
596 missing: 1
597 remote heads: 2
597 remote heads: 2
598 common: 1
598 common: 1
599 unknown: 1
599 unknown: 1
600 local changesets: 34
600 local changesets: 34
601 common: 4
601 common: 4
602 heads: 1
602 heads: 1
603 roots: 1
603 roots: 1
604 missing: 30
604 missing: 30
605 heads: 1
605 heads: 1
606 roots: 1
606 roots: 1
607 first undecided set: 30
607 first undecided set: 30
608 heads: 1
608 heads: 1
609 roots: 1
609 roots: 1
610 common: 0
610 common: 0
611 missing: 30
611 missing: 30
612 common heads: 2dc09a01254d
612 common heads: 2dc09a01254d
613
613
614 % -- b -> a set (tip only)
614 % -- b -> a set (tip only)
615 comparing with a
615 comparing with a
616 query 1; heads
616 query 1; heads
617 searching for changes
617 searching for changes
618 taking initial sample
618 taking initial sample
619 searching: 2 queries
619 searching: 2 queries
620 query 2; still undecided: 29, sample size is: 29
620 query 2; still undecided: 29, sample size is: 29
621 2 total queries in *.????s (glob)
621 2 total queries in *.????s (glob)
622 elapsed time: * seconds (glob)
622 elapsed time: * seconds (glob)
623 round-trips: 2
623 round-trips: 2
624 heads summary:
624 heads summary:
625 total common heads: 1
625 total common heads: 1
626 also local heads: 0
626 also local heads: 0
627 also remote heads: 1
627 also remote heads: 1
628 both: 0
628 both: 0
629 local heads: 1
629 local heads: 1
630 common: 0
630 common: 0
631 missing: 1
631 missing: 1
632 remote heads: 2
632 remote heads: 2
633 common: 1
633 common: 1
634 unknown: 1
634 unknown: 1
635 local changesets: 34
635 local changesets: 34
636 common: 4
636 common: 4
637 heads: 1
637 heads: 1
638 roots: 1
638 roots: 1
639 missing: 30
639 missing: 30
640 heads: 1
640 heads: 1
641 roots: 1
641 roots: 1
642 first undecided set: 30
642 first undecided set: 30
643 heads: 1
643 heads: 1
644 roots: 1
644 roots: 1
645 common: 0
645 common: 0
646 missing: 30
646 missing: 30
647 common heads: 2dc09a01254d
647 common heads: 2dc09a01254d
648
648
649
649
650 Both many new:
650 Both many new:
651
651
652 $ testdesc '-ra' '-rb' '
652 $ testdesc '-ra' '-rb' '
653 > +2:f +30 :b
653 > +2:f +30 :b
654 > <f +30 :a'
654 > <f +30 :a'
655
655
656 % -- a -> b tree
656 % -- a -> b tree
657 comparing with b
657 comparing with b
658 searching for changes
658 searching for changes
659 unpruned common: 66f7d451a68b
659 unpruned common: 66f7d451a68b
660 elapsed time: * seconds (glob)
660 elapsed time: * seconds (glob)
661 round-trips: 4
661 round-trips: 4
662 heads summary:
662 heads summary:
663 total common heads: 1
663 total common heads: 1
664 also local heads: 0
664 also local heads: 0
665 also remote heads: 0
665 also remote heads: 0
666 both: 0
666 both: 0
667 local heads: 1
667 local heads: 1
668 common: 0
668 common: 0
669 missing: 1
669 missing: 1
670 remote heads: 1
670 remote heads: 1
671 common: 0
671 common: 0
672 unknown: 1
672 unknown: 1
673 local changesets: 32
673 local changesets: 32
674 common: 2
674 common: 2
675 heads: 1
675 heads: 1
676 roots: 1
676 roots: 1
677 missing: 30
677 missing: 30
678 heads: 1
678 heads: 1
679 roots: 1
679 roots: 1
680 first undecided set: 32
680 first undecided set: 32
681 heads: 1
681 heads: 1
682 roots: 1
682 roots: 1
683 common: 2
683 common: 2
684 missing: 30
684 missing: 30
685 common heads: 66f7d451a68b
685 common heads: 66f7d451a68b
686
686
687 % -- a -> b set
687 % -- a -> b set
688 comparing with b
688 comparing with b
689 query 1; heads
689 query 1; heads
690 searching for changes
690 searching for changes
691 taking quick initial sample
691 taking quick initial sample
692 searching: 2 queries
692 searching: 2 queries
693 query 2; still undecided: 31, sample size is: 31
693 query 2; still undecided: 31, sample size is: 31
694 2 total queries in *.????s (glob)
694 2 total queries in *.????s (glob)
695 elapsed time: * seconds (glob)
695 elapsed time: * seconds (glob)
696 round-trips: 2
696 round-trips: 2
697 heads summary:
697 heads summary:
698 total common heads: 1
698 total common heads: 1
699 also local heads: 0
699 also local heads: 0
700 also remote heads: 0
700 also remote heads: 0
701 both: 0
701 both: 0
702 local heads: 1
702 local heads: 1
703 common: 0
703 common: 0
704 missing: 1
704 missing: 1
705 remote heads: 1
705 remote heads: 1
706 common: 0
706 common: 0
707 unknown: 1
707 unknown: 1
708 local changesets: 32
708 local changesets: 32
709 common: 2
709 common: 2
710 heads: 1
710 heads: 1
711 roots: 1
711 roots: 1
712 missing: 30
712 missing: 30
713 heads: 1
713 heads: 1
714 roots: 1
714 roots: 1
715 first undecided set: 32
715 first undecided set: 32
716 heads: 1
716 heads: 1
717 roots: 1
717 roots: 1
718 common: 2
718 common: 2
719 missing: 30
719 missing: 30
720 common heads: 66f7d451a68b
720 common heads: 66f7d451a68b
721
721
722 % -- a -> b set (tip only)
722 % -- a -> b set (tip only)
723 comparing with b
723 comparing with b
724 query 1; heads
724 query 1; heads
725 searching for changes
725 searching for changes
726 taking quick initial sample
726 taking quick initial sample
727 searching: 2 queries
727 searching: 2 queries
728 query 2; still undecided: 31, sample size is: 31
728 query 2; still undecided: 31, sample size is: 31
729 2 total queries in *.????s (glob)
729 2 total queries in *.????s (glob)
730 elapsed time: * seconds (glob)
730 elapsed time: * seconds (glob)
731 round-trips: 2
731 round-trips: 2
732 heads summary:
732 heads summary:
733 total common heads: 1
733 total common heads: 1
734 also local heads: 0
734 also local heads: 0
735 also remote heads: 0
735 also remote heads: 0
736 both: 0
736 both: 0
737 local heads: 1
737 local heads: 1
738 common: 0
738 common: 0
739 missing: 1
739 missing: 1
740 remote heads: 1
740 remote heads: 1
741 common: 0
741 common: 0
742 unknown: 1
742 unknown: 1
743 local changesets: 32
743 local changesets: 32
744 common: 2
744 common: 2
745 heads: 1
745 heads: 1
746 roots: 1
746 roots: 1
747 missing: 30
747 missing: 30
748 heads: 1
748 heads: 1
749 roots: 1
749 roots: 1
750 first undecided set: 32
750 first undecided set: 32
751 heads: 1
751 heads: 1
752 roots: 1
752 roots: 1
753 common: 2
753 common: 2
754 missing: 30
754 missing: 30
755 common heads: 66f7d451a68b
755 common heads: 66f7d451a68b
756
756
757 % -- b -> a tree
757 % -- b -> a tree
758 comparing with a
758 comparing with a
759 searching for changes
759 searching for changes
760 unpruned common: 66f7d451a68b
760 unpruned common: 66f7d451a68b
761 elapsed time: * seconds (glob)
761 elapsed time: * seconds (glob)
762 round-trips: 4
762 round-trips: 4
763 heads summary:
763 heads summary:
764 total common heads: 1
764 total common heads: 1
765 also local heads: 0
765 also local heads: 0
766 also remote heads: 0
766 also remote heads: 0
767 both: 0
767 both: 0
768 local heads: 1
768 local heads: 1
769 common: 0
769 common: 0
770 missing: 1
770 missing: 1
771 remote heads: 1
771 remote heads: 1
772 common: 0
772 common: 0
773 unknown: 1
773 unknown: 1
774 local changesets: 32
774 local changesets: 32
775 common: 2
775 common: 2
776 heads: 1
776 heads: 1
777 roots: 1
777 roots: 1
778 missing: 30
778 missing: 30
779 heads: 1
779 heads: 1
780 roots: 1
780 roots: 1
781 first undecided set: 32
781 first undecided set: 32
782 heads: 1
782 heads: 1
783 roots: 1
783 roots: 1
784 common: 2
784 common: 2
785 missing: 30
785 missing: 30
786 common heads: 66f7d451a68b
786 common heads: 66f7d451a68b
787
787
788 % -- b -> a set
788 % -- b -> a set
789 comparing with a
789 comparing with a
790 query 1; heads
790 query 1; heads
791 searching for changes
791 searching for changes
792 taking quick initial sample
792 taking quick initial sample
793 searching: 2 queries
793 searching: 2 queries
794 query 2; still undecided: 31, sample size is: 31
794 query 2; still undecided: 31, sample size is: 31
795 2 total queries in *.????s (glob)
795 2 total queries in *.????s (glob)
796 elapsed time: * seconds (glob)
796 elapsed time: * seconds (glob)
797 round-trips: 2
797 round-trips: 2
798 heads summary:
798 heads summary:
799 total common heads: 1
799 total common heads: 1
800 also local heads: 0
800 also local heads: 0
801 also remote heads: 0
801 also remote heads: 0
802 both: 0
802 both: 0
803 local heads: 1
803 local heads: 1
804 common: 0
804 common: 0
805 missing: 1
805 missing: 1
806 remote heads: 1
806 remote heads: 1
807 common: 0
807 common: 0
808 unknown: 1
808 unknown: 1
809 local changesets: 32
809 local changesets: 32
810 common: 2
810 common: 2
811 heads: 1
811 heads: 1
812 roots: 1
812 roots: 1
813 missing: 30
813 missing: 30
814 heads: 1
814 heads: 1
815 roots: 1
815 roots: 1
816 first undecided set: 32
816 first undecided set: 32
817 heads: 1
817 heads: 1
818 roots: 1
818 roots: 1
819 common: 2
819 common: 2
820 missing: 30
820 missing: 30
821 common heads: 66f7d451a68b
821 common heads: 66f7d451a68b
822
822
823 % -- b -> a set (tip only)
823 % -- b -> a set (tip only)
824 comparing with a
824 comparing with a
825 query 1; heads
825 query 1; heads
826 searching for changes
826 searching for changes
827 taking quick initial sample
827 taking quick initial sample
828 searching: 2 queries
828 searching: 2 queries
829 query 2; still undecided: 31, sample size is: 31
829 query 2; still undecided: 31, sample size is: 31
830 2 total queries in *.????s (glob)
830 2 total queries in *.????s (glob)
831 elapsed time: * seconds (glob)
831 elapsed time: * seconds (glob)
832 round-trips: 2
832 round-trips: 2
833 heads summary:
833 heads summary:
834 total common heads: 1
834 total common heads: 1
835 also local heads: 0
835 also local heads: 0
836 also remote heads: 0
836 also remote heads: 0
837 both: 0
837 both: 0
838 local heads: 1
838 local heads: 1
839 common: 0
839 common: 0
840 missing: 1
840 missing: 1
841 remote heads: 1
841 remote heads: 1
842 common: 0
842 common: 0
843 unknown: 1
843 unknown: 1
844 local changesets: 32
844 local changesets: 32
845 common: 2
845 common: 2
846 heads: 1
846 heads: 1
847 roots: 1
847 roots: 1
848 missing: 30
848 missing: 30
849 heads: 1
849 heads: 1
850 roots: 1
850 roots: 1
851 first undecided set: 32
851 first undecided set: 32
852 heads: 1
852 heads: 1
853 roots: 1
853 roots: 1
854 common: 2
854 common: 2
855 missing: 30
855 missing: 30
856 common heads: 66f7d451a68b
856 common heads: 66f7d451a68b
857
857
858
858
859 Both many new skewed:
859 Both many new skewed:
860
860
861 $ testdesc '-ra' '-rb' '
861 $ testdesc '-ra' '-rb' '
862 > +2:f +30 :b
862 > +2:f +30 :b
863 > <f +50 :a'
863 > <f +50 :a'
864
864
865 % -- a -> b tree
865 % -- a -> b tree
866 comparing with b
866 comparing with b
867 searching for changes
867 searching for changes
868 unpruned common: 66f7d451a68b
868 unpruned common: 66f7d451a68b
869 elapsed time: * seconds (glob)
869 elapsed time: * seconds (glob)
870 round-trips: 4
870 round-trips: 4
871 heads summary:
871 heads summary:
872 total common heads: 1
872 total common heads: 1
873 also local heads: 0
873 also local heads: 0
874 also remote heads: 0
874 also remote heads: 0
875 both: 0
875 both: 0
876 local heads: 1
876 local heads: 1
877 common: 0
877 common: 0
878 missing: 1
878 missing: 1
879 remote heads: 1
879 remote heads: 1
880 common: 0
880 common: 0
881 unknown: 1
881 unknown: 1
882 local changesets: 52
882 local changesets: 52
883 common: 2
883 common: 2
884 heads: 1
884 heads: 1
885 roots: 1
885 roots: 1
886 missing: 50
886 missing: 50
887 heads: 1
887 heads: 1
888 roots: 1
888 roots: 1
889 first undecided set: 52
889 first undecided set: 52
890 heads: 1
890 heads: 1
891 roots: 1
891 roots: 1
892 common: 2
892 common: 2
893 missing: 50
893 missing: 50
894 common heads: 66f7d451a68b
894 common heads: 66f7d451a68b
895
895
896 % -- a -> b set
896 % -- a -> b set
897 comparing with b
897 comparing with b
898 query 1; heads
898 query 1; heads
899 searching for changes
899 searching for changes
900 taking quick initial sample
900 taking quick initial sample
901 searching: 2 queries
901 searching: 2 queries
902 query 2; still undecided: 51, sample size is: 51
902 query 2; still undecided: 51, sample size is: 51
903 2 total queries in *.????s (glob)
903 2 total queries in *.????s (glob)
904 elapsed time: * seconds (glob)
904 elapsed time: * seconds (glob)
905 round-trips: 2
905 round-trips: 2
906 heads summary:
906 heads summary:
907 total common heads: 1
907 total common heads: 1
908 also local heads: 0
908 also local heads: 0
909 also remote heads: 0
909 also remote heads: 0
910 both: 0
910 both: 0
911 local heads: 1
911 local heads: 1
912 common: 0
912 common: 0
913 missing: 1
913 missing: 1
914 remote heads: 1
914 remote heads: 1
915 common: 0
915 common: 0
916 unknown: 1
916 unknown: 1
917 local changesets: 52
917 local changesets: 52
918 common: 2
918 common: 2
919 heads: 1
919 heads: 1
920 roots: 1
920 roots: 1
921 missing: 50
921 missing: 50
922 heads: 1
922 heads: 1
923 roots: 1
923 roots: 1
924 first undecided set: 52
924 first undecided set: 52
925 heads: 1
925 heads: 1
926 roots: 1
926 roots: 1
927 common: 2
927 common: 2
928 missing: 50
928 missing: 50
929 common heads: 66f7d451a68b
929 common heads: 66f7d451a68b
930
930
931 % -- a -> b set (tip only)
931 % -- a -> b set (tip only)
932 comparing with b
932 comparing with b
933 query 1; heads
933 query 1; heads
934 searching for changes
934 searching for changes
935 taking quick initial sample
935 taking quick initial sample
936 searching: 2 queries
936 searching: 2 queries
937 query 2; still undecided: 51, sample size is: 51
937 query 2; still undecided: 51, sample size is: 51
938 2 total queries in *.????s (glob)
938 2 total queries in *.????s (glob)
939 elapsed time: * seconds (glob)
939 elapsed time: * seconds (glob)
940 round-trips: 2
940 round-trips: 2
941 heads summary:
941 heads summary:
942 total common heads: 1
942 total common heads: 1
943 also local heads: 0
943 also local heads: 0
944 also remote heads: 0
944 also remote heads: 0
945 both: 0
945 both: 0
946 local heads: 1
946 local heads: 1
947 common: 0
947 common: 0
948 missing: 1
948 missing: 1
949 remote heads: 1
949 remote heads: 1
950 common: 0
950 common: 0
951 unknown: 1
951 unknown: 1
952 local changesets: 52
952 local changesets: 52
953 common: 2
953 common: 2
954 heads: 1
954 heads: 1
955 roots: 1
955 roots: 1
956 missing: 50
956 missing: 50
957 heads: 1
957 heads: 1
958 roots: 1
958 roots: 1
959 first undecided set: 52
959 first undecided set: 52
960 heads: 1
960 heads: 1
961 roots: 1
961 roots: 1
962 common: 2
962 common: 2
963 missing: 50
963 missing: 50
964 common heads: 66f7d451a68b
964 common heads: 66f7d451a68b
965
965
966 % -- b -> a tree
966 % -- b -> a tree
967 comparing with a
967 comparing with a
968 searching for changes
968 searching for changes
969 unpruned common: 66f7d451a68b
969 unpruned common: 66f7d451a68b
970 elapsed time: * seconds (glob)
970 elapsed time: * seconds (glob)
971 round-trips: 3
971 round-trips: 3
972 heads summary:
972 heads summary:
973 total common heads: 1
973 total common heads: 1
974 also local heads: 0
974 also local heads: 0
975 also remote heads: 0
975 also remote heads: 0
976 both: 0
976 both: 0
977 local heads: 1
977 local heads: 1
978 common: 0
978 common: 0
979 missing: 1
979 missing: 1
980 remote heads: 1
980 remote heads: 1
981 common: 0
981 common: 0
982 unknown: 1
982 unknown: 1
983 local changesets: 32
983 local changesets: 32
984 common: 2
984 common: 2
985 heads: 1
985 heads: 1
986 roots: 1
986 roots: 1
987 missing: 30
987 missing: 30
988 heads: 1
988 heads: 1
989 roots: 1
989 roots: 1
990 first undecided set: 32
990 first undecided set: 32
991 heads: 1
991 heads: 1
992 roots: 1
992 roots: 1
993 common: 2
993 common: 2
994 missing: 30
994 missing: 30
995 common heads: 66f7d451a68b
995 common heads: 66f7d451a68b
996
996
997 % -- b -> a set
997 % -- b -> a set
998 comparing with a
998 comparing with a
999 query 1; heads
999 query 1; heads
1000 searching for changes
1000 searching for changes
1001 taking quick initial sample
1001 taking quick initial sample
1002 searching: 2 queries
1002 searching: 2 queries
1003 query 2; still undecided: 31, sample size is: 31
1003 query 2; still undecided: 31, sample size is: 31
1004 2 total queries in *.????s (glob)
1004 2 total queries in *.????s (glob)
1005 elapsed time: * seconds (glob)
1005 elapsed time: * seconds (glob)
1006 round-trips: 2
1006 round-trips: 2
1007 heads summary:
1007 heads summary:
1008 total common heads: 1
1008 total common heads: 1
1009 also local heads: 0
1009 also local heads: 0
1010 also remote heads: 0
1010 also remote heads: 0
1011 both: 0
1011 both: 0
1012 local heads: 1
1012 local heads: 1
1013 common: 0
1013 common: 0
1014 missing: 1
1014 missing: 1
1015 remote heads: 1
1015 remote heads: 1
1016 common: 0
1016 common: 0
1017 unknown: 1
1017 unknown: 1
1018 local changesets: 32
1018 local changesets: 32
1019 common: 2
1019 common: 2
1020 heads: 1
1020 heads: 1
1021 roots: 1
1021 roots: 1
1022 missing: 30
1022 missing: 30
1023 heads: 1
1023 heads: 1
1024 roots: 1
1024 roots: 1
1025 first undecided set: 32
1025 first undecided set: 32
1026 heads: 1
1026 heads: 1
1027 roots: 1
1027 roots: 1
1028 common: 2
1028 common: 2
1029 missing: 30
1029 missing: 30
1030 common heads: 66f7d451a68b
1030 common heads: 66f7d451a68b
1031
1031
1032 % -- b -> a set (tip only)
1032 % -- b -> a set (tip only)
1033 comparing with a
1033 comparing with a
1034 query 1; heads
1034 query 1; heads
1035 searching for changes
1035 searching for changes
1036 taking quick initial sample
1036 taking quick initial sample
1037 searching: 2 queries
1037 searching: 2 queries
1038 query 2; still undecided: 31, sample size is: 31
1038 query 2; still undecided: 31, sample size is: 31
1039 2 total queries in *.????s (glob)
1039 2 total queries in *.????s (glob)
1040 elapsed time: * seconds (glob)
1040 elapsed time: * seconds (glob)
1041 round-trips: 2
1041 round-trips: 2
1042 heads summary:
1042 heads summary:
1043 total common heads: 1
1043 total common heads: 1
1044 also local heads: 0
1044 also local heads: 0
1045 also remote heads: 0
1045 also remote heads: 0
1046 both: 0
1046 both: 0
1047 local heads: 1
1047 local heads: 1
1048 common: 0
1048 common: 0
1049 missing: 1
1049 missing: 1
1050 remote heads: 1
1050 remote heads: 1
1051 common: 0
1051 common: 0
1052 unknown: 1
1052 unknown: 1
1053 local changesets: 32
1053 local changesets: 32
1054 common: 2
1054 common: 2
1055 heads: 1
1055 heads: 1
1056 roots: 1
1056 roots: 1
1057 missing: 30
1057 missing: 30
1058 heads: 1
1058 heads: 1
1059 roots: 1
1059 roots: 1
1060 first undecided set: 32
1060 first undecided set: 32
1061 heads: 1
1061 heads: 1
1062 roots: 1
1062 roots: 1
1063 common: 2
1063 common: 2
1064 missing: 30
1064 missing: 30
1065 common heads: 66f7d451a68b
1065 common heads: 66f7d451a68b
1066
1066
1067
1067
1068 Both many new on top of long history:
1068 Both many new on top of long history:
1069
1069
1070 $ testdesc '-ra' '-rb' '
1070 $ testdesc '-ra' '-rb' '
1071 > +1000:f +30 :b
1071 > +1000:f +30 :b
1072 > <f +50 :a'
1072 > <f +50 :a'
1073
1073
1074 % -- a -> b tree
1074 % -- a -> b tree
1075 comparing with b
1075 comparing with b
1076 searching for changes
1076 searching for changes
1077 unpruned common: 7ead0cba2838
1077 unpruned common: 7ead0cba2838
1078 elapsed time: * seconds (glob)
1078 elapsed time: * seconds (glob)
1079 round-trips: 4
1079 round-trips: 4
1080 heads summary:
1080 heads summary:
1081 total common heads: 1
1081 total common heads: 1
1082 also local heads: 0
1082 also local heads: 0
1083 also remote heads: 0
1083 also remote heads: 0
1084 both: 0
1084 both: 0
1085 local heads: 1
1085 local heads: 1
1086 common: 0
1086 common: 0
1087 missing: 1
1087 missing: 1
1088 remote heads: 1
1088 remote heads: 1
1089 common: 0
1089 common: 0
1090 unknown: 1
1090 unknown: 1
1091 local changesets: 1050
1091 local changesets: 1050
1092 common: 1000
1092 common: 1000
1093 heads: 1
1093 heads: 1
1094 roots: 1
1094 roots: 1
1095 missing: 50
1095 missing: 50
1096 heads: 1
1096 heads: 1
1097 roots: 1
1097 roots: 1
1098 first undecided set: 1050
1098 first undecided set: 1050
1099 heads: 1
1099 heads: 1
1100 roots: 1
1100 roots: 1
1101 common: 1000
1101 common: 1000
1102 missing: 50
1102 missing: 50
1103 common heads: 7ead0cba2838
1103 common heads: 7ead0cba2838
1104
1104
1105 % -- a -> b set
1105 % -- a -> b set
1106 comparing with b
1106 comparing with b
1107 query 1; heads
1107 query 1; heads
1108 searching for changes
1108 searching for changes
1109 taking quick initial sample
1109 taking quick initial sample
1110 searching: 2 queries
1110 searching: 2 queries
1111 query 2; still undecided: 1049, sample size is: 11
1111 query 2; still undecided: 1049, sample size is: 11
1112 sampling from both directions
1112 sampling from both directions
1113 searching: 3 queries
1113 searching: 3 queries
1114 query 3; still undecided: 31, sample size is: 31
1114 query 3; still undecided: 31, sample size is: 31
1115 3 total queries in *.????s (glob)
1115 3 total queries in *.????s (glob)
1116 elapsed time: * seconds (glob)
1116 elapsed time: * seconds (glob)
1117 round-trips: 3
1117 round-trips: 3
1118 heads summary:
1118 heads summary:
1119 total common heads: 1
1119 total common heads: 1
1120 also local heads: 0
1120 also local heads: 0
1121 also remote heads: 0
1121 also remote heads: 0
1122 both: 0
1122 both: 0
1123 local heads: 1
1123 local heads: 1
1124 common: 0
1124 common: 0
1125 missing: 1
1125 missing: 1
1126 remote heads: 1
1126 remote heads: 1
1127 common: 0
1127 common: 0
1128 unknown: 1
1128 unknown: 1
1129 local changesets: 1050
1129 local changesets: 1050
1130 common: 1000
1130 common: 1000
1131 heads: 1
1131 heads: 1
1132 roots: 1
1132 roots: 1
1133 missing: 50
1133 missing: 50
1134 heads: 1
1134 heads: 1
1135 roots: 1
1135 roots: 1
1136 first undecided set: 1050
1136 first undecided set: 1050
1137 heads: 1
1137 heads: 1
1138 roots: 1
1138 roots: 1
1139 common: 1000
1139 common: 1000
1140 missing: 50
1140 missing: 50
1141 common heads: 7ead0cba2838
1141 common heads: 7ead0cba2838
1142
1142
1143 % -- a -> b set (tip only)
1143 % -- a -> b set (tip only)
1144 comparing with b
1144 comparing with b
1145 query 1; heads
1145 query 1; heads
1146 searching for changes
1146 searching for changes
1147 taking quick initial sample
1147 taking quick initial sample
1148 searching: 2 queries
1148 searching: 2 queries
1149 query 2; still undecided: 1049, sample size is: 11
1149 query 2; still undecided: 1049, sample size is: 11
1150 sampling from both directions
1150 sampling from both directions
1151 searching: 3 queries
1151 searching: 3 queries
1152 query 3; still undecided: 31, sample size is: 31
1152 query 3; still undecided: 31, sample size is: 31
1153 3 total queries in *.????s (glob)
1153 3 total queries in *.????s (glob)
1154 elapsed time: * seconds (glob)
1154 elapsed time: * seconds (glob)
1155 round-trips: 3
1155 round-trips: 3
1156 heads summary:
1156 heads summary:
1157 total common heads: 1
1157 total common heads: 1
1158 also local heads: 0
1158 also local heads: 0
1159 also remote heads: 0
1159 also remote heads: 0
1160 both: 0
1160 both: 0
1161 local heads: 1
1161 local heads: 1
1162 common: 0
1162 common: 0
1163 missing: 1
1163 missing: 1
1164 remote heads: 1
1164 remote heads: 1
1165 common: 0
1165 common: 0
1166 unknown: 1
1166 unknown: 1
1167 local changesets: 1050
1167 local changesets: 1050
1168 common: 1000
1168 common: 1000
1169 heads: 1
1169 heads: 1
1170 roots: 1
1170 roots: 1
1171 missing: 50
1171 missing: 50
1172 heads: 1
1172 heads: 1
1173 roots: 1
1173 roots: 1
1174 first undecided set: 1050
1174 first undecided set: 1050
1175 heads: 1
1175 heads: 1
1176 roots: 1
1176 roots: 1
1177 common: 1000
1177 common: 1000
1178 missing: 50
1178 missing: 50
1179 common heads: 7ead0cba2838
1179 common heads: 7ead0cba2838
1180
1180
1181 % -- b -> a tree
1181 % -- b -> a tree
1182 comparing with a
1182 comparing with a
1183 searching for changes
1183 searching for changes
1184 unpruned common: 7ead0cba2838
1184 unpruned common: 7ead0cba2838
1185 elapsed time: * seconds (glob)
1185 elapsed time: * seconds (glob)
1186 round-trips: 3
1186 round-trips: 3
1187 heads summary:
1187 heads summary:
1188 total common heads: 1
1188 total common heads: 1
1189 also local heads: 0
1189 also local heads: 0
1190 also remote heads: 0
1190 also remote heads: 0
1191 both: 0
1191 both: 0
1192 local heads: 1
1192 local heads: 1
1193 common: 0
1193 common: 0
1194 missing: 1
1194 missing: 1
1195 remote heads: 1
1195 remote heads: 1
1196 common: 0
1196 common: 0
1197 unknown: 1
1197 unknown: 1
1198 local changesets: 1030
1198 local changesets: 1030
1199 common: 1000
1199 common: 1000
1200 heads: 1
1200 heads: 1
1201 roots: 1
1201 roots: 1
1202 missing: 30
1202 missing: 30
1203 heads: 1
1203 heads: 1
1204 roots: 1
1204 roots: 1
1205 first undecided set: 1030
1205 first undecided set: 1030
1206 heads: 1
1206 heads: 1
1207 roots: 1
1207 roots: 1
1208 common: 1000
1208 common: 1000
1209 missing: 30
1209 missing: 30
1210 common heads: 7ead0cba2838
1210 common heads: 7ead0cba2838
1211
1211
1212 % -- b -> a set
1212 % -- b -> a set
1213 comparing with a
1213 comparing with a
1214 query 1; heads
1214 query 1; heads
1215 searching for changes
1215 searching for changes
1216 taking quick initial sample
1216 taking quick initial sample
1217 searching: 2 queries
1217 searching: 2 queries
1218 query 2; still undecided: 1029, sample size is: 11
1218 query 2; still undecided: 1029, sample size is: 11
1219 sampling from both directions
1219 sampling from both directions
1220 searching: 3 queries
1220 searching: 3 queries
1221 query 3; still undecided: 15, sample size is: 15
1221 query 3; still undecided: 15, sample size is: 15
1222 3 total queries in *.????s (glob)
1222 3 total queries in *.????s (glob)
1223 elapsed time: * seconds (glob)
1223 elapsed time: * seconds (glob)
1224 round-trips: 3
1224 round-trips: 3
1225 heads summary:
1225 heads summary:
1226 total common heads: 1
1226 total common heads: 1
1227 also local heads: 0
1227 also local heads: 0
1228 also remote heads: 0
1228 also remote heads: 0
1229 both: 0
1229 both: 0
1230 local heads: 1
1230 local heads: 1
1231 common: 0
1231 common: 0
1232 missing: 1
1232 missing: 1
1233 remote heads: 1
1233 remote heads: 1
1234 common: 0
1234 common: 0
1235 unknown: 1
1235 unknown: 1
1236 local changesets: 1030
1236 local changesets: 1030
1237 common: 1000
1237 common: 1000
1238 heads: 1
1238 heads: 1
1239 roots: 1
1239 roots: 1
1240 missing: 30
1240 missing: 30
1241 heads: 1
1241 heads: 1
1242 roots: 1
1242 roots: 1
1243 first undecided set: 1030
1243 first undecided set: 1030
1244 heads: 1
1244 heads: 1
1245 roots: 1
1245 roots: 1
1246 common: 1000
1246 common: 1000
1247 missing: 30
1247 missing: 30
1248 common heads: 7ead0cba2838
1248 common heads: 7ead0cba2838
1249
1249
1250 % -- b -> a set (tip only)
1250 % -- b -> a set (tip only)
1251 comparing with a
1251 comparing with a
1252 query 1; heads
1252 query 1; heads
1253 searching for changes
1253 searching for changes
1254 taking quick initial sample
1254 taking quick initial sample
1255 searching: 2 queries
1255 searching: 2 queries
1256 query 2; still undecided: 1029, sample size is: 11
1256 query 2; still undecided: 1029, sample size is: 11
1257 sampling from both directions
1257 sampling from both directions
1258 searching: 3 queries
1258 searching: 3 queries
1259 query 3; still undecided: 15, sample size is: 15
1259 query 3; still undecided: 15, sample size is: 15
1260 3 total queries in *.????s (glob)
1260 3 total queries in *.????s (glob)
1261 elapsed time: * seconds (glob)
1261 elapsed time: * seconds (glob)
1262 round-trips: 3
1262 round-trips: 3
1263 heads summary:
1263 heads summary:
1264 total common heads: 1
1264 total common heads: 1
1265 also local heads: 0
1265 also local heads: 0
1266 also remote heads: 0
1266 also remote heads: 0
1267 both: 0
1267 both: 0
1268 local heads: 1
1268 local heads: 1
1269 common: 0
1269 common: 0
1270 missing: 1
1270 missing: 1
1271 remote heads: 1
1271 remote heads: 1
1272 common: 0
1272 common: 0
1273 unknown: 1
1273 unknown: 1
1274 local changesets: 1030
1274 local changesets: 1030
1275 common: 1000
1275 common: 1000
1276 heads: 1
1276 heads: 1
1277 roots: 1
1277 roots: 1
1278 missing: 30
1278 missing: 30
1279 heads: 1
1279 heads: 1
1280 roots: 1
1280 roots: 1
1281 first undecided set: 1030
1281 first undecided set: 1030
1282 heads: 1
1282 heads: 1
1283 roots: 1
1283 roots: 1
1284 common: 1000
1284 common: 1000
1285 missing: 30
1285 missing: 30
1286 common heads: 7ead0cba2838
1286 common heads: 7ead0cba2838
1287
1287
1288
1288
1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1289 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1290
1290
1291 $ hg init manyheads
1291 $ hg init manyheads
1292 $ cd manyheads
1292 $ cd manyheads
1293 $ echo "+300:r @a" >dagdesc
1293 $ echo "+300:r @a" >dagdesc
1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1294 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1295 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1296 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1297 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1298 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1299 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1300 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1301 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1302 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1303 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1304 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1305 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1306 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1307 $ echo "@b *r+3" >>dagdesc # one more head
1307 $ echo "@b *r+3" >>dagdesc # one more head
1308 $ hg debugbuilddag <dagdesc
1308 $ hg debugbuilddag <dagdesc
1309 reading DAG from stdin
1309 reading DAG from stdin
1310
1310
1311 $ hg heads -t --template . | wc -c
1311 $ hg heads -t --template . | wc -c
1312 \s*261 (re)
1312 \s*261 (re)
1313
1313
1314 $ hg clone -b a . a
1314 $ hg clone -b a . a
1315 adding changesets
1315 adding changesets
1316 adding manifests
1316 adding manifests
1317 adding file changes
1317 adding file changes
1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1318 added 1340 changesets with 0 changes to 0 files (+259 heads)
1319 new changesets 1ea73414a91b:1c51e2c80832
1319 new changesets 1ea73414a91b:1c51e2c80832
1320 updating to branch a
1320 updating to branch a
1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1321 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1322 $ hg clone -b b . b
1322 $ hg clone -b b . b
1323 adding changesets
1323 adding changesets
1324 adding manifests
1324 adding manifests
1325 adding file changes
1325 adding file changes
1326 added 304 changesets with 0 changes to 0 files
1326 added 304 changesets with 0 changes to 0 files
1327 new changesets 1ea73414a91b:513314ca8b3a
1327 new changesets 1ea73414a91b:513314ca8b3a
1328 updating to branch b
1328 updating to branch b
1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1329 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1330
1330
1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1331 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
1332 comparing with b
1332 comparing with b
1333 query 1; heads
1333 query 1; heads
1334 searching for changes
1334 searching for changes
1335 taking quick initial sample
1335 taking quick initial sample
1336 searching: 2 queries
1336 searching: 2 queries
1337 query 2; still undecided: 1080, sample size is: 50
1337 query 2; still undecided: 1080, sample size is: 50
1338 sampling from both directions
1338 sampling from both directions
1339 searching: 3 queries
1339 searching: 3 queries
1340 query 3; still undecided: 1030, sample size is: 200
1340 query 3; still undecided: 1030, sample size is: 200
1341 sampling from both directions
1341 sampling from both directions
1342 searching: 4 queries
1342 searching: 4 queries
1343 query 4; still undecided: 547, sample size is: 210
1343 query 4; still undecided: 547, sample size is: 210
1344 sampling from both directions
1344 sampling from both directions
1345 searching: 5 queries
1345 searching: 5 queries
1346 query 5; still undecided: 336, sample size is: 220
1346 query 5; still undecided: 336, sample size is: 220
1347 sampling from both directions
1347 sampling from both directions
1348 searching: 6 queries
1348 searching: 6 queries
1349 query 6; still undecided: 114, sample size is: 114
1349 query 6; still undecided: 114, sample size is: 114
1350 6 total queries in *.????s (glob)
1350 6 total queries in *.????s (glob)
1351 elapsed time: * seconds (glob)
1351 elapsed time: * seconds (glob)
1352 round-trips: 6
1352 round-trips: 6
1353 heads summary:
1353 heads summary:
1354 total common heads: 1
1354 total common heads: 1
1355 also local heads: 0
1355 also local heads: 0
1356 also remote heads: 0
1356 also remote heads: 0
1357 both: 0
1357 both: 0
1358 local heads: 260
1358 local heads: 260
1359 common: 0
1359 common: 0
1360 missing: 260
1360 missing: 260
1361 remote heads: 1
1361 remote heads: 1
1362 common: 0
1362 common: 0
1363 unknown: 1
1363 unknown: 1
1364 local changesets: 1340
1364 local changesets: 1340
1365 common: 300
1365 common: 300
1366 heads: 1
1366 heads: 1
1367 roots: 1
1367 roots: 1
1368 missing: 1040
1368 missing: 1040
1369 heads: 260
1369 heads: 260
1370 roots: 260
1370 roots: 260
1371 first undecided set: 1340
1371 first undecided set: 1340
1372 heads: 260
1372 heads: 260
1373 roots: 1
1373 roots: 1
1374 common: 300
1374 common: 300
1375 missing: 1040
1375 missing: 1040
1376 common heads: 3ee37d65064a
1376 common heads: 3ee37d65064a
1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1377 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1378 comparing with b
1378 comparing with b
1379 query 1; heads
1379 query 1; heads
1380 searching for changes
1380 searching for changes
1381 taking quick initial sample
1381 taking quick initial sample
1382 searching: 2 queries
1382 searching: 2 queries
1383 query 2; still undecided: 303, sample size is: 9
1383 query 2; still undecided: 303, sample size is: 9
1384 sampling from both directions
1384 sampling from both directions
1385 searching: 3 queries
1385 searching: 3 queries
1386 query 3; still undecided: 3, sample size is: 3
1386 query 3; still undecided: 3, sample size is: 3
1387 3 total queries in *.????s (glob)
1387 3 total queries in *.????s (glob)
1388 elapsed time: * seconds (glob)
1388 elapsed time: * seconds (glob)
1389 round-trips: 3
1389 round-trips: 3
1390 heads summary:
1390 heads summary:
1391 total common heads: 1
1391 total common heads: 1
1392 also local heads: 0
1392 also local heads: 0
1393 also remote heads: 0
1393 also remote heads: 0
1394 both: 0
1394 both: 0
1395 local heads: 260
1395 local heads: 260
1396 common: 0
1396 common: 0
1397 missing: 260
1397 missing: 260
1398 remote heads: 1
1398 remote heads: 1
1399 common: 0
1399 common: 0
1400 unknown: 1
1400 unknown: 1
1401 local changesets: 1340
1401 local changesets: 1340
1402 common: 300
1402 common: 300
1403 heads: 1
1403 heads: 1
1404 roots: 1
1404 roots: 1
1405 missing: 1040
1405 missing: 1040
1406 heads: 260
1406 heads: 260
1407 roots: 260
1407 roots: 260
1408 first undecided set: 1340
1408 first undecided set: 1340
1409 heads: 260
1409 heads: 260
1410 roots: 1
1410 roots: 1
1411 common: 300
1411 common: 300
1412 missing: 1040
1412 missing: 1040
1413 common heads: 3ee37d65064a
1413 common heads: 3ee37d65064a
1414
1414
1415 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1415 $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
1416 comparing with b
1416 comparing with b
1417 searching for changes
1417 searching for changes
1418 sampling from both directions
1418 sampling from both directions
1419 query 1; still undecided: 1340, sample size is: 50
1419 query 1; still undecided: 1340, sample size is: 50
1420 sampling from both directions
1420 sampling from both directions
1421 query 2; still undecided: 995, sample size is: 60
1421 query 2; still undecided: 995, sample size is: 60
1422 sampling from both directions
1422 sampling from both directions
1423 query 3; still undecided: 913, sample size is: 72
1423 query 3; still undecided: 913, sample size is: 72
1424 sampling from both directions
1424 sampling from both directions
1425 query 4; still undecided: 816, sample size is: 204
1425 query 4; still undecided: 816, sample size is: 204
1426 sampling from both directions
1426 sampling from both directions
1427 query 5; still undecided: 612, sample size is: 153
1427 query 5; still undecided: 612, sample size is: 153
1428 sampling from both directions
1428 sampling from both directions
1429 query 6; still undecided: 456, sample size is: 123
1429 query 6; still undecided: 456, sample size is: 123
1430 sampling from both directions
1430 sampling from both directions
1431 query 7; still undecided: 332, sample size is: 147
1431 query 7; still undecided: 332, sample size is: 147
1432 sampling from both directions
1432 sampling from both directions
1433 query 8; still undecided: 184, sample size is: 176
1433 query 8; still undecided: 184, sample size is: 176
1434 sampling from both directions
1434 sampling from both directions
1435 query 9; still undecided: 8, sample size is: 8
1435 query 9; still undecided: 8, sample size is: 8
1436 9 total queries in *s (glob)
1436 9 total queries in *s (glob)
1437 elapsed time: * seconds (glob)
1437 elapsed time: * seconds (glob)
1438 round-trips: 9
1438 round-trips: 9
1439 heads summary:
1439 heads summary:
1440 total common heads: 1
1440 total common heads: 1
1441 also local heads: 0
1441 also local heads: 0
1442 also remote heads: 0
1442 also remote heads: 0
1443 both: 0
1443 both: 0
1444 local heads: 260
1444 local heads: 260
1445 common: 0
1445 common: 0
1446 missing: 260
1446 missing: 260
1447 remote heads: 1
1447 remote heads: 1
1448 common: 0
1448 common: 0
1449 unknown: 1
1449 unknown: 1
1450 local changesets: 1340
1450 local changesets: 1340
1451 common: 300
1451 common: 300
1452 heads: 1
1452 heads: 1
1453 roots: 1
1453 roots: 1
1454 missing: 1040
1454 missing: 1040
1455 heads: 260
1455 heads: 260
1456 roots: 260
1456 roots: 260
1457 first undecided set: 1340
1457 first undecided set: 1340
1458 heads: 260
1458 heads: 260
1459 roots: 1
1459 roots: 1
1460 common: 300
1460 common: 300
1461 missing: 1040
1461 missing: 1040
1462 common heads: 3ee37d65064a
1462 common heads: 3ee37d65064a
1463
1463
1464 Test actual protocol when pulling one new head in addition to common heads
1464 Test actual protocol when pulling one new head in addition to common heads
1465
1465
1466 $ hg clone -U b c
1466 $ hg clone -U b c
1467 $ hg -R c id -ir tip
1467 $ hg -R c id -ir tip
1468 513314ca8b3a
1468 513314ca8b3a
1469 $ hg -R c up -qr default
1469 $ hg -R c up -qr default
1470 $ touch c/f
1470 $ touch c/f
1471 $ hg -R c ci -Aqm "extra head"
1471 $ hg -R c ci -Aqm "extra head"
1472 $ hg -R c id -i
1472 $ hg -R c id -i
1473 e64a39e7da8b
1473 e64a39e7da8b
1474
1474
1475 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1475 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1476 $ cat hg.pid >> $DAEMON_PIDS
1476 $ cat hg.pid >> $DAEMON_PIDS
1477
1477
1478 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1478 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1479 comparing with http://localhost:$HGPORT/
1479 comparing with http://localhost:$HGPORT/
1480 searching for changes
1480 searching for changes
1481 e64a39e7da8b
1481 e64a39e7da8b
1482
1482
1483 $ killdaemons.py
1483 $ killdaemons.py
1484 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1484 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1485 "GET /?cmd=capabilities HTTP/1.1" 200 -
1485 "GET /?cmd=capabilities HTTP/1.1" 200 -
1486 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1486 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1487 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1487 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1488 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1488 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1489 $ cat errors.log
1489 $ cat errors.log
1490
1490
1491 $ cd ..
1491 $ cd ..
1492
1492
1493
1493
1494 Issue 4438 - test coverage for 3ef893520a85 issues.
1494 Issue 4438 - test coverage for 3ef893520a85 issues.
1495
1495
1496 $ mkdir issue4438
1496 $ mkdir issue4438
1497 $ cd issue4438
1497 $ cd issue4438
1498 #if false
1498 #if false
1499 generate new bundles:
1499 generate new bundles:
1500 $ hg init r1
1500 $ hg init r1
1501 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1501 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1502 $ hg clone -q r1 r2
1502 $ hg clone -q r1 r2
1503 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1503 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1504 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1504 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1505 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1505 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1506 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1506 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1507 #else
1507 #else
1508 use existing bundles:
1508 use existing bundles:
1509 $ hg init r1
1509 $ hg init r1
1510 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1510 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1511 $ hg -R r1 -q up
1511 $ hg -R r1 -q up
1512 $ hg init r2
1512 $ hg init r2
1513 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1513 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1514 $ hg -R r2 -q up
1514 $ hg -R r2 -q up
1515 #endif
1515 #endif
1516
1516
1517 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1517 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1518
1518
1519 $ hg -R r1 outgoing r2 -T'{rev} '
1519 $ hg -R r1 outgoing r2 -T'{rev} '
1520 comparing with r2
1520 comparing with r2
1521 searching for changes
1521 searching for changes
1522 101 102 103 104 105 106 107 108 109 110 (no-eol)
1522 101 102 103 104 105 106 107 108 109 110 (no-eol)
1523
1523
1524 The case where all the 'initialsamplesize' samples already were common would
1524 The case where all the 'initialsamplesize' samples already were common would
1525 give 'all remote heads known locally' without checking the remaining heads -
1525 give 'all remote heads known locally' without checking the remaining heads -
1526 fixed in 86c35b7ae300:
1526 fixed in 86c35b7ae300:
1527
1527
1528 $ cat >> r1/.hg/hgrc << EOF
1528 $ cat >> r1/.hg/hgrc << EOF
1529 > [devel]
1529 > [devel]
1530 > discovery.randomize = False
1530 > discovery.randomize = False
1531 > EOF
1531 > EOF
1532
1532
1533 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1533 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1534 > --config blackbox.track='command commandfinish discovery'
1534 > --config blackbox.track='command commandfinish discovery'
1535 comparing with r2
1535 comparing with r2
1536 searching for changes
1536 searching for changes
1537 101 102 103 104 105 106 107 108 109 110 (no-eol)
1537 101 102 103 104 105 106 107 108 109 110 (no-eol)
1538 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1538 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1539 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1539 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1540 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1540 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1541 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1541 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1542 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1542 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1543 $ cd ..
1543 $ cd ..
1544
1544
1545 Even if the set of revs to discover is restricted, unrelated revs may be
1545 Even if the set of revs to discover is restricted, unrelated revs may be
1546 returned as common heads.
1546 returned as common heads.
1547
1547
1548 $ mkdir ancestorsof
1548 $ mkdir ancestorsof
1549 $ cd ancestorsof
1549 $ cd ancestorsof
1550 $ hg init a
1550 $ hg init a
1551 $ hg clone a b -q
1551 $ hg clone a b -q
1552 $ cd b
1552 $ cd b
1553 $ hg debugbuilddag '.:root *root *root'
1553 $ hg debugbuilddag '.:root *root *root'
1554 $ hg log -G -T '{node|short}'
1554 $ hg log -G -T '{node|short}'
1555 o fa942426a6fd
1555 o fa942426a6fd
1556 |
1556 |
1557 | o 66f7d451a68b
1557 | o 66f7d451a68b
1558 |/
1558 |/
1559 o 1ea73414a91b
1559 o 1ea73414a91b
1560
1560
1561 $ hg push -r 66f7d451a68b -q
1561 $ hg push -r 66f7d451a68b -q
1562 $ hg debugdiscovery --verbose --rev fa942426a6fd
1562 $ hg debugdiscovery --verbose --rev fa942426a6fd
1563 comparing with $TESTTMP/ancestorsof/a
1563 comparing with $TESTTMP/ancestorsof/a
1564 searching for changes
1564 searching for changes
1565 elapsed time: * seconds (glob)
1565 elapsed time: * seconds (glob)
1566 round-trips: 1
1566 round-trips: 1
1567 heads summary:
1567 heads summary:
1568 total common heads: 1
1568 total common heads: 1
1569 also local heads: 1
1569 also local heads: 1
1570 also remote heads: 1
1570 also remote heads: 1
1571 both: 1
1571 both: 1
1572 local heads: 2
1572 local heads: 2
1573 common: 1
1573 common: 1
1574 missing: 1
1574 missing: 1
1575 remote heads: 1
1575 remote heads: 1
1576 common: 1
1576 common: 1
1577 unknown: 0
1577 unknown: 0
1578 local changesets: 3
1578 local changesets: 3
1579 common: 2
1579 common: 2
1580 heads: 1
1580 heads: 1
1581 roots: 1
1581 roots: 1
1582 missing: 1
1582 missing: 1
1583 heads: 1
1583 heads: 1
1584 roots: 1
1584 roots: 1
1585 first undecided set: 1
1585 first undecided set: 1
1586 heads: 1
1586 heads: 1
1587 roots: 1
1587 roots: 1
1588 common: 0
1588 common: 0
1589 missing: 1
1589 missing: 1
1590 common heads: 66f7d451a68b
1590 common heads: 66f7d451a68b
1591
1591
1592 $ cd ..
1592 $ cd ..
1593
1593
1594
1594
1595 Test debuging discovery using different subset of the same repository
1595 Test debuging discovery using different subset of the same repository
1596 =====================================================================
1596 =====================================================================
1597
1597
1598 remote is a local subset
1598 remote is a local subset
1599 ------------------------
1599 ------------------------
1600
1600
1601 remote will be last 25 heads of the local graph
1601 remote will be last 25 heads of the local graph
1602
1602
1603 $ cd $TESTTMP/manyheads
1603 $ cd $TESTTMP/manyheads
1604 $ hg -R a debugdiscovery \
1604 $ hg -R a debugdiscovery \
1605 > --debug \
1605 > --debug \
1606 > --remote-as-revs 'last(heads(all()), 25)' \
1606 > --remote-as-revs 'last(heads(all()), 25)' \
1607 > --config devel.discovery.randomize=false
1607 > --config devel.discovery.randomize=false
1608 query 1; heads
1608 query 1; heads
1609 searching for changes
1609 searching for changes
1610 all remote heads known locally
1610 all remote heads known locally
1611 elapsed time: * seconds (glob)
1611 elapsed time: * seconds (glob)
1612 round-trips: 1
1612 round-trips: 1
1613 heads summary:
1613 heads summary:
1614 total common heads: 25
1614 total common heads: 25
1615 also local heads: 25
1615 also local heads: 25
1616 also remote heads: 25
1616 also remote heads: 25
1617 both: 25
1617 both: 25
1618 local heads: 260
1618 local heads: 260
1619 common: 25
1619 common: 25
1620 missing: 235
1620 missing: 235
1621 remote heads: 25
1621 remote heads: 25
1622 common: 25
1622 common: 25
1623 unknown: 0
1623 unknown: 0
1624 local changesets: 1340
1624 local changesets: 1340
1625 common: 400
1625 common: 400
1626 heads: 25
1626 heads: 25
1627 roots: 1
1627 roots: 1
1628 missing: 940
1628 missing: 940
1629 heads: 235
1629 heads: 235
1630 roots: 235
1630 roots: 235
1631 first undecided set: 940
1631 first undecided set: 940
1632 heads: 235
1632 heads: 235
1633 roots: 235
1633 roots: 235
1634 common: 0
1634 common: 0
1635 missing: 940
1635 missing: 940
1636 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1636 common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
1637
1637
1638 local is a local subset
1638 local is a local subset
1639 ------------------------
1639 ------------------------
1640
1640
1641 remote will be last 25 heads of the local graph
1641 remote will be last 25 heads of the local graph
1642
1642
1643 $ cd $TESTTMP/manyheads
1643 $ cd $TESTTMP/manyheads
1644 $ hg -R a debugdiscovery b \
1644 $ hg -R a debugdiscovery b \
1645 > --debug \
1645 > --debug \
1646 > --local-as-revs 'first(heads(all()), 25)' \
1646 > --local-as-revs 'first(heads(all()), 25)' \
1647 > --config devel.discovery.randomize=false
1647 > --config devel.discovery.randomize=false
1648 comparing with b
1648 comparing with b
1649 query 1; heads
1649 query 1; heads
1650 searching for changes
1650 searching for changes
1651 taking quick initial sample
1651 taking quick initial sample
1652 query 2; still undecided: 375, sample size is: 81
1652 query 2; still undecided: 375, sample size is: 81
1653 sampling from both directions
1653 sampling from both directions
1654 query 3; still undecided: 3, sample size is: 3
1654 query 3; still undecided: 3, sample size is: 3
1655 3 total queries *s (glob)
1655 3 total queries *s (glob)
1656 elapsed time: * seconds (glob)
1656 elapsed time: * seconds (glob)
1657 round-trips: 3
1657 round-trips: 3
1658 heads summary:
1658 heads summary:
1659 total common heads: 1
1659 total common heads: 1
1660 also local heads: 0
1660 also local heads: 0
1661 also remote heads: 0
1661 also remote heads: 0
1662 both: 0
1662 both: 0
1663 local heads: 25
1663 local heads: 25
1664 common: 0
1664 common: 0
1665 missing: 25
1665 missing: 25
1666 remote heads: 1
1666 remote heads: 1
1667 common: 0
1667 common: 0
1668 unknown: 1
1668 unknown: 1
1669 local changesets: 400
1669 local changesets: 400
1670 common: 300
1670 common: 300
1671 heads: 1
1671 heads: 1
1672 roots: 1
1672 roots: 1
1673 missing: 100
1673 missing: 100
1674 heads: 25
1674 heads: 25
1675 roots: 25
1675 roots: 25
1676 first undecided set: 400
1676 first undecided set: 400
1677 heads: 25
1677 heads: 25
1678 roots: 1
1678 roots: 1
1679 common: 300
1679 common: 300
1680 missing: 100
1680 missing: 100
1681 common heads: 3ee37d65064a
1681 common heads: 3ee37d65064a
1682
1682
1683 both local and remove are subset
1683 both local and remove are subset
1684 ------------------------
1684 ------------------------
1685
1685
1686 remote will be last 25 heads of the local graph
1686 remote will be last 25 heads of the local graph
1687
1687
1688 $ cd $TESTTMP/manyheads
1688 $ cd $TESTTMP/manyheads
1689 $ hg -R a debugdiscovery \
1689 $ hg -R a debugdiscovery \
1690 > --debug \
1690 > --debug \
1691 > --local-as-revs 'first(heads(all()), 25)' \
1691 > --local-as-revs 'first(heads(all()), 25)' \
1692 > --remote-as-revs 'last(heads(all()), 25)' \
1692 > --remote-as-revs 'last(heads(all()), 25)' \
1693 > --config devel.discovery.randomize=false
1693 > --config devel.discovery.randomize=false
1694 query 1; heads
1694 query 1; heads
1695 searching for changes
1695 searching for changes
1696 taking quick initial sample
1696 taking quick initial sample
1697 query 2; still undecided: 375, sample size is: 81
1697 query 2; still undecided: 375, sample size is: 81
1698 sampling from both directions
1698 sampling from both directions
1699 query 3; still undecided: 3, sample size is: 3
1699 query 3; still undecided: 3, sample size is: 3
1700 3 total queries in *s (glob)
1700 3 total queries in *s (glob)
1701 elapsed time: * seconds (glob)
1701 elapsed time: * seconds (glob)
1702 round-trips: 3
1702 round-trips: 3
1703 heads summary:
1703 heads summary:
1704 total common heads: 1
1704 total common heads: 1
1705 also local heads: 0
1705 also local heads: 0
1706 also remote heads: 0
1706 also remote heads: 0
1707 both: 0
1707 both: 0
1708 local heads: 25
1708 local heads: 25
1709 common: 0
1709 common: 0
1710 missing: 25
1710 missing: 25
1711 remote heads: 25
1711 remote heads: 25
1712 common: 0
1712 common: 0
1713 unknown: 25
1713 unknown: 25
1714 local changesets: 400
1714 local changesets: 400
1715 common: 300
1715 common: 300
1716 heads: 1
1716 heads: 1
1717 roots: 1
1717 roots: 1
1718 missing: 100
1718 missing: 100
1719 heads: 25
1719 heads: 25
1720 roots: 25
1720 roots: 25
1721 first undecided set: 400
1721 first undecided set: 400
1722 heads: 25
1722 heads: 25
1723 roots: 1
1723 roots: 1
1724 common: 300
1724 common: 300
1725 missing: 100
1725 missing: 100
1726 common heads: 3ee37d65064a
1726 common heads: 3ee37d65064a
1727
1727
1728 Test -T json output
1728 Test -T json output
1729 -------------------
1729 -------------------
1730
1730
1731 $ hg -R a debugdiscovery \
1731 $ hg -R a debugdiscovery \
1732 > -T json \
1732 > -T json \
1733 > --debug \
1733 > --debug \
1734 > --local-as-revs 'first(heads(all()), 25)' \
1734 > --local-as-revs 'first(heads(all()), 25)' \
1735 > --remote-as-revs 'last(heads(all()), 25)' \
1735 > --remote-as-revs 'last(heads(all()), 25)' \
1736 > --config devel.discovery.randomize=false
1736 > --config devel.discovery.randomize=false
1737 query 1; heads
1738 searching for changes
1739 taking quick initial sample
1740 query 2; still undecided: 375, sample size is: 81
1741 sampling from both directions
1742 query 3; still undecided: 3, sample size is: 3
1743 3 total queries in *s (glob)
1744 [
1737 [
1745 {
1738 {
1746 "elapsed": *, (glob)
1739 "elapsed": *, (glob)
1747 "nb-common-heads": 1,
1740 "nb-common-heads": 1,
1748 "nb-common-heads-both": 0,
1741 "nb-common-heads-both": 0,
1749 "nb-common-heads-local": 0,
1742 "nb-common-heads-local": 0,
1750 "nb-common-heads-remote": 0,
1743 "nb-common-heads-remote": 0,
1751 "nb-common-roots": 1,
1744 "nb-common-roots": 1,
1752 "nb-head-local": 25,
1745 "nb-head-local": 25,
1753 "nb-head-local-missing": 25,
1746 "nb-head-local-missing": 25,
1754 "nb-head-remote": 25,
1747 "nb-head-remote": 25,
1755 "nb-head-remote-unknown": 25,
1748 "nb-head-remote-unknown": 25,
1756 "nb-ini_und": 400,
1749 "nb-ini_und": 400,
1757 "nb-ini_und-common": 300,
1750 "nb-ini_und-common": 300,
1758 "nb-ini_und-heads": 25,
1751 "nb-ini_und-heads": 25,
1759 "nb-ini_und-missing": 100,
1752 "nb-ini_und-missing": 100,
1760 "nb-ini_und-roots": 1,
1753 "nb-ini_und-roots": 1,
1761 "nb-missing-heads": 25,
1754 "nb-missing-heads": 25,
1762 "nb-missing-roots": 25,
1755 "nb-missing-roots": 25,
1763 "nb-revs": 400,
1756 "nb-revs": 400,
1764 "nb-revs-common": 300,
1757 "nb-revs-common": 300,
1765 "nb-revs-missing": 100,
1758 "nb-revs-missing": 100,
1759 "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
1766 "total-roundtrips": 3
1760 "total-roundtrips": 3
1767 }
1761 }
1768 ]
1762 ]
General Comments 0
You need to be logged in to leave comments. Login now