##// END OF EJS Templates
debugdiscovery: display some information about the initial "undecided" set...
marmoute -
r46693:0e5065b6 default
parent child Browse files
Show More
@@ -1,4634 +1,4652
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 revlog,
72 revlog,
73 revset,
73 revset,
74 revsetlang,
74 revsetlang,
75 scmutil,
75 scmutil,
76 setdiscovery,
76 setdiscovery,
77 simplemerge,
77 simplemerge,
78 sshpeer,
78 sshpeer,
79 sslutil,
79 sslutil,
80 streamclone,
80 streamclone,
81 strip,
81 strip,
82 tags as tagsmod,
82 tags as tagsmod,
83 templater,
83 templater,
84 treediscovery,
84 treediscovery,
85 upgrade,
85 upgrade,
86 url as urlmod,
86 url as urlmod,
87 util,
87 util,
88 vfs as vfsmod,
88 vfs as vfsmod,
89 wireprotoframing,
89 wireprotoframing,
90 wireprotoserver,
90 wireprotoserver,
91 wireprotov2peer,
91 wireprotov2peer,
92 )
92 )
93 from .utils import (
93 from .utils import (
94 cborutil,
94 cborutil,
95 compression,
95 compression,
96 dateutil,
96 dateutil,
97 procutil,
97 procutil,
98 stringutil,
98 stringutil,
99 )
99 )
100
100
101 from .revlogutils import (
101 from .revlogutils import (
102 deltas as deltautil,
102 deltas as deltautil,
103 nodemap,
103 nodemap,
104 sidedata,
104 sidedata,
105 )
105 )
106
106
107 release = lockmod.release
107 release = lockmod.release
108
108
109 table = {}
109 table = {}
110 table.update(strip.command._table)
110 table.update(strip.command._table)
111 command = registrar.command(table)
111 command = registrar.command(table)
112
112
113
113
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 def debugancestor(ui, repo, *args):
115 def debugancestor(ui, repo, *args):
116 """find the ancestor revision of two revisions in a given index"""
116 """find the ancestor revision of two revisions in a given index"""
117 if len(args) == 3:
117 if len(args) == 3:
118 index, rev1, rev2 = args
118 index, rev1, rev2 = args
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 lookup = r.lookup
120 lookup = r.lookup
121 elif len(args) == 2:
121 elif len(args) == 2:
122 if not repo:
122 if not repo:
123 raise error.Abort(
123 raise error.Abort(
124 _(b'there is no Mercurial repository here (.hg not found)')
124 _(b'there is no Mercurial repository here (.hg not found)')
125 )
125 )
126 rev1, rev2 = args
126 rev1, rev2 = args
127 r = repo.changelog
127 r = repo.changelog
128 lookup = repo.lookup
128 lookup = repo.lookup
129 else:
129 else:
130 raise error.Abort(_(b'either two or three arguments required'))
130 raise error.Abort(_(b'either two or three arguments required'))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133
133
134
134
135 @command(b'debugantivirusrunning', [])
135 @command(b'debugantivirusrunning', [])
136 def debugantivirusrunning(ui, repo):
136 def debugantivirusrunning(ui, repo):
137 """attempt to trigger an antivirus scanner to see if one is active"""
137 """attempt to trigger an antivirus scanner to see if one is active"""
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 f.write(
139 f.write(
140 util.b85decode(
140 util.b85decode(
141 # This is a base85-armored version of the EICAR test file. See
141 # This is a base85-armored version of the EICAR test file. See
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 )
145 )
146 )
146 )
147 # Give an AV engine time to scan the file.
147 # Give an AV engine time to scan the file.
148 time.sleep(2)
148 time.sleep(2)
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150
150
151
151
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 def debugapplystreamclonebundle(ui, repo, fname):
153 def debugapplystreamclonebundle(ui, repo, fname):
154 """apply a stream clone bundle file"""
154 """apply a stream clone bundle file"""
155 f = hg.openpath(ui, fname)
155 f = hg.openpath(ui, fname)
156 gen = exchange.readbundle(ui, f, fname)
156 gen = exchange.readbundle(ui, f, fname)
157 gen.apply(repo)
157 gen.apply(repo)
158
158
159
159
160 @command(
160 @command(
161 b'debugbuilddag',
161 b'debugbuilddag',
162 [
162 [
163 (
163 (
164 b'm',
164 b'm',
165 b'mergeable-file',
165 b'mergeable-file',
166 None,
166 None,
167 _(b'add single file mergeable changes'),
167 _(b'add single file mergeable changes'),
168 ),
168 ),
169 (
169 (
170 b'o',
170 b'o',
171 b'overwritten-file',
171 b'overwritten-file',
172 None,
172 None,
173 _(b'add single file all revs overwrite'),
173 _(b'add single file all revs overwrite'),
174 ),
174 ),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 ],
176 ],
177 _(b'[OPTION]... [TEXT]'),
177 _(b'[OPTION]... [TEXT]'),
178 )
178 )
179 def debugbuilddag(
179 def debugbuilddag(
180 ui,
180 ui,
181 repo,
181 repo,
182 text=None,
182 text=None,
183 mergeable_file=False,
183 mergeable_file=False,
184 overwritten_file=False,
184 overwritten_file=False,
185 new_file=False,
185 new_file=False,
186 ):
186 ):
187 """builds a repo with a given DAG from scratch in the current empty repo
187 """builds a repo with a given DAG from scratch in the current empty repo
188
188
189 The description of the DAG is read from stdin if not given on the
189 The description of the DAG is read from stdin if not given on the
190 command line.
190 command line.
191
191
192 Elements:
192 Elements:
193
193
194 - "+n" is a linear run of n nodes based on the current default parent
194 - "+n" is a linear run of n nodes based on the current default parent
195 - "." is a single node based on the current default parent
195 - "." is a single node based on the current default parent
196 - "$" resets the default parent to null (implied at the start);
196 - "$" resets the default parent to null (implied at the start);
197 otherwise the default parent is always the last node created
197 otherwise the default parent is always the last node created
198 - "<p" sets the default parent to the backref p
198 - "<p" sets the default parent to the backref p
199 - "*p" is a fork at parent p, which is a backref
199 - "*p" is a fork at parent p, which is a backref
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 - "/p2" is a merge of the preceding node and p2
201 - "/p2" is a merge of the preceding node and p2
202 - ":tag" defines a local tag for the preceding node
202 - ":tag" defines a local tag for the preceding node
203 - "@branch" sets the named branch for subsequent nodes
203 - "@branch" sets the named branch for subsequent nodes
204 - "#...\\n" is a comment up to the end of the line
204 - "#...\\n" is a comment up to the end of the line
205
205
206 Whitespace between the above elements is ignored.
206 Whitespace between the above elements is ignored.
207
207
208 A backref is either
208 A backref is either
209
209
210 - a number n, which references the node curr-n, where curr is the current
210 - a number n, which references the node curr-n, where curr is the current
211 node, or
211 node, or
212 - the name of a local tag you placed earlier using ":tag", or
212 - the name of a local tag you placed earlier using ":tag", or
213 - empty to denote the default parent.
213 - empty to denote the default parent.
214
214
215 All string valued-elements are either strictly alphanumeric, or must
215 All string valued-elements are either strictly alphanumeric, or must
216 be enclosed in double quotes ("..."), with "\\" as escape character.
216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 """
217 """
218
218
219 if text is None:
219 if text is None:
220 ui.status(_(b"reading DAG from stdin\n"))
220 ui.status(_(b"reading DAG from stdin\n"))
221 text = ui.fin.read()
221 text = ui.fin.read()
222
222
223 cl = repo.changelog
223 cl = repo.changelog
224 if len(cl) > 0:
224 if len(cl) > 0:
225 raise error.Abort(_(b'repository is not empty'))
225 raise error.Abort(_(b'repository is not empty'))
226
226
227 # determine number of revs in DAG
227 # determine number of revs in DAG
228 total = 0
228 total = 0
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 total += 1
231 total += 1
232
232
233 if mergeable_file:
233 if mergeable_file:
234 linesperrev = 2
234 linesperrev = 2
235 # make a file with k lines per rev
235 # make a file with k lines per rev
236 initialmergedlines = [
236 initialmergedlines = [
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 ]
238 ]
239 initialmergedlines.append(b"")
239 initialmergedlines.append(b"")
240
240
241 tags = []
241 tags = []
242 progress = ui.makeprogress(
242 progress = ui.makeprogress(
243 _(b'building'), unit=_(b'revisions'), total=total
243 _(b'building'), unit=_(b'revisions'), total=total
244 )
244 )
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 at = -1
246 at = -1
247 atbranch = b'default'
247 atbranch = b'default'
248 nodeids = []
248 nodeids = []
249 id = 0
249 id = 0
250 progress.update(id)
250 progress.update(id)
251 for type, data in dagparser.parsedag(text):
251 for type, data in dagparser.parsedag(text):
252 if type == b'n':
252 if type == b'n':
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 id, ps = data
254 id, ps = data
255
255
256 files = []
256 files = []
257 filecontent = {}
257 filecontent = {}
258
258
259 p2 = None
259 p2 = None
260 if mergeable_file:
260 if mergeable_file:
261 fn = b"mf"
261 fn = b"mf"
262 p1 = repo[ps[0]]
262 p1 = repo[ps[0]]
263 if len(ps) > 1:
263 if len(ps) > 1:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 pa = p1.ancestor(p2)
265 pa = p1.ancestor(p2)
266 base, local, other = [
266 base, local, other = [
267 x[fn].data() for x in (pa, p1, p2)
267 x[fn].data() for x in (pa, p1, p2)
268 ]
268 ]
269 m3 = simplemerge.Merge3Text(base, local, other)
269 m3 = simplemerge.Merge3Text(base, local, other)
270 ml = [l.strip() for l in m3.merge_lines()]
270 ml = [l.strip() for l in m3.merge_lines()]
271 ml.append(b"")
271 ml.append(b"")
272 elif at > 0:
272 elif at > 0:
273 ml = p1[fn].data().split(b"\n")
273 ml = p1[fn].data().split(b"\n")
274 else:
274 else:
275 ml = initialmergedlines
275 ml = initialmergedlines
276 ml[id * linesperrev] += b" r%i" % id
276 ml[id * linesperrev] += b" r%i" % id
277 mergedtext = b"\n".join(ml)
277 mergedtext = b"\n".join(ml)
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = mergedtext
279 filecontent[fn] = mergedtext
280
280
281 if overwritten_file:
281 if overwritten_file:
282 fn = b"of"
282 fn = b"of"
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285
285
286 if new_file:
286 if new_file:
287 fn = b"nf%i" % id
287 fn = b"nf%i" % id
288 files.append(fn)
288 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
289 filecontent[fn] = b"r%i\n" % id
290 if len(ps) > 1:
290 if len(ps) > 1:
291 if not p2:
291 if not p2:
292 p2 = repo[ps[1]]
292 p2 = repo[ps[1]]
293 for fn in p2:
293 for fn in p2:
294 if fn.startswith(b"nf"):
294 if fn.startswith(b"nf"):
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = p2[fn].data()
296 filecontent[fn] = p2[fn].data()
297
297
298 def fctxfn(repo, cx, path):
298 def fctxfn(repo, cx, path):
299 if path in filecontent:
299 if path in filecontent:
300 return context.memfilectx(
300 return context.memfilectx(
301 repo, cx, path, filecontent[path]
301 repo, cx, path, filecontent[path]
302 )
302 )
303 return None
303 return None
304
304
305 if len(ps) == 0 or ps[0] < 0:
305 if len(ps) == 0 or ps[0] < 0:
306 pars = [None, None]
306 pars = [None, None]
307 elif len(ps) == 1:
307 elif len(ps) == 1:
308 pars = [nodeids[ps[0]], None]
308 pars = [nodeids[ps[0]], None]
309 else:
309 else:
310 pars = [nodeids[p] for p in ps]
310 pars = [nodeids[p] for p in ps]
311 cx = context.memctx(
311 cx = context.memctx(
312 repo,
312 repo,
313 pars,
313 pars,
314 b"r%i" % id,
314 b"r%i" % id,
315 files,
315 files,
316 fctxfn,
316 fctxfn,
317 date=(id, 0),
317 date=(id, 0),
318 user=b"debugbuilddag",
318 user=b"debugbuilddag",
319 extra={b'branch': atbranch},
319 extra={b'branch': atbranch},
320 )
320 )
321 nodeid = repo.commitctx(cx)
321 nodeid = repo.commitctx(cx)
322 nodeids.append(nodeid)
322 nodeids.append(nodeid)
323 at = id
323 at = id
324 elif type == b'l':
324 elif type == b'l':
325 id, name = data
325 id, name = data
326 ui.note((b'tag %s\n' % name))
326 ui.note((b'tag %s\n' % name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 elif type == b'a':
328 elif type == b'a':
329 ui.note((b'branch %s\n' % data))
329 ui.note((b'branch %s\n' % data))
330 atbranch = data
330 atbranch = data
331 progress.update(id)
331 progress.update(id)
332
332
333 if tags:
333 if tags:
334 repo.vfs.write(b"localtags", b"".join(tags))
334 repo.vfs.write(b"localtags", b"".join(tags))
335
335
336
336
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 indent_string = b' ' * indent
338 indent_string = b' ' * indent
339 if all:
339 if all:
340 ui.writenoi18n(
340 ui.writenoi18n(
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 % indent_string
342 % indent_string
343 )
343 )
344
344
345 def showchunks(named):
345 def showchunks(named):
346 ui.write(b"\n%s%s\n" % (indent_string, named))
346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 for deltadata in gen.deltaiter():
347 for deltadata in gen.deltaiter():
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 ui.write(
349 ui.write(
350 b"%s%s %s %s %s %s %d\n"
350 b"%s%s %s %s %s %s %d\n"
351 % (
351 % (
352 indent_string,
352 indent_string,
353 hex(node),
353 hex(node),
354 hex(p1),
354 hex(p1),
355 hex(p2),
355 hex(p2),
356 hex(cs),
356 hex(cs),
357 hex(deltabase),
357 hex(deltabase),
358 len(delta),
358 len(delta),
359 )
359 )
360 )
360 )
361
361
362 gen.changelogheader()
362 gen.changelogheader()
363 showchunks(b"changelog")
363 showchunks(b"changelog")
364 gen.manifestheader()
364 gen.manifestheader()
365 showchunks(b"manifest")
365 showchunks(b"manifest")
366 for chunkdata in iter(gen.filelogheader, {}):
366 for chunkdata in iter(gen.filelogheader, {}):
367 fname = chunkdata[b'filename']
367 fname = chunkdata[b'filename']
368 showchunks(fname)
368 showchunks(fname)
369 else:
369 else:
370 if isinstance(gen, bundle2.unbundle20):
370 if isinstance(gen, bundle2.unbundle20):
371 raise error.Abort(_(b'use debugbundle2 for this file'))
371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 gen.changelogheader()
372 gen.changelogheader()
373 for deltadata in gen.deltaiter():
373 for deltadata in gen.deltaiter():
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376
376
377
377
378 def _debugobsmarkers(ui, part, indent=0, **opts):
378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 """display version and markers contained in 'data'"""
379 """display version and markers contained in 'data'"""
380 opts = pycompat.byteskwargs(opts)
380 opts = pycompat.byteskwargs(opts)
381 data = part.read()
381 data = part.read()
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 try:
383 try:
384 version, markers = obsolete._readmarkers(data)
384 version, markers = obsolete._readmarkers(data)
385 except error.UnknownVersion as exc:
385 except error.UnknownVersion as exc:
386 msg = b"%sunsupported version: %s (%d bytes)\n"
386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 msg %= indent_string, exc.version, len(data)
387 msg %= indent_string, exc.version, len(data)
388 ui.write(msg)
388 ui.write(msg)
389 else:
389 else:
390 msg = b"%sversion: %d (%d bytes)\n"
390 msg = b"%sversion: %d (%d bytes)\n"
391 msg %= indent_string, version, len(data)
391 msg %= indent_string, version, len(data)
392 ui.write(msg)
392 ui.write(msg)
393 fm = ui.formatter(b'debugobsolete', opts)
393 fm = ui.formatter(b'debugobsolete', opts)
394 for rawmarker in sorted(markers):
394 for rawmarker in sorted(markers):
395 m = obsutil.marker(None, rawmarker)
395 m = obsutil.marker(None, rawmarker)
396 fm.startitem()
396 fm.startitem()
397 fm.plain(indent_string)
397 fm.plain(indent_string)
398 cmdutil.showmarker(fm, m)
398 cmdutil.showmarker(fm, m)
399 fm.end()
399 fm.end()
400
400
401
401
402 def _debugphaseheads(ui, data, indent=0):
402 def _debugphaseheads(ui, data, indent=0):
403 """display version and markers contained in 'data'"""
403 """display version and markers contained in 'data'"""
404 indent_string = b' ' * indent
404 indent_string = b' ' * indent
405 headsbyphase = phases.binarydecode(data)
405 headsbyphase = phases.binarydecode(data)
406 for phase in phases.allphases:
406 for phase in phases.allphases:
407 for head in headsbyphase[phase]:
407 for head in headsbyphase[phase]:
408 ui.write(indent_string)
408 ui.write(indent_string)
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410
410
411
411
412 def _quasirepr(thing):
412 def _quasirepr(thing):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 return b'{%s}' % (
414 return b'{%s}' % (
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 )
416 )
417 return pycompat.bytestr(repr(thing))
417 return pycompat.bytestr(repr(thing))
418
418
419
419
420 def _debugbundle2(ui, gen, all=None, **opts):
420 def _debugbundle2(ui, gen, all=None, **opts):
421 """lists the contents of a bundle2"""
421 """lists the contents of a bundle2"""
422 if not isinstance(gen, bundle2.unbundle20):
422 if not isinstance(gen, bundle2.unbundle20):
423 raise error.Abort(_(b'not a bundle2 file'))
423 raise error.Abort(_(b'not a bundle2 file'))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 parttypes = opts.get('part_type', [])
425 parttypes = opts.get('part_type', [])
426 for part in gen.iterparts():
426 for part in gen.iterparts():
427 if parttypes and part.type not in parttypes:
427 if parttypes and part.type not in parttypes:
428 continue
428 continue
429 msg = b'%s -- %s (mandatory: %r)\n'
429 msg = b'%s -- %s (mandatory: %r)\n'
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 if part.type == b'changegroup':
431 if part.type == b'changegroup':
432 version = part.params.get(b'version', b'01')
432 version = part.params.get(b'version', b'01')
433 cg = changegroup.getunbundler(version, part, b'UN')
433 cg = changegroup.getunbundler(version, part, b'UN')
434 if not ui.quiet:
434 if not ui.quiet:
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 if part.type == b'obsmarkers':
436 if part.type == b'obsmarkers':
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugobsmarkers(ui, part, indent=4, **opts)
438 _debugobsmarkers(ui, part, indent=4, **opts)
439 if part.type == b'phase-heads':
439 if part.type == b'phase-heads':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugphaseheads(ui, part, indent=4)
441 _debugphaseheads(ui, part, indent=4)
442
442
443
443
444 @command(
444 @command(
445 b'debugbundle',
445 b'debugbundle',
446 [
446 [
447 (b'a', b'all', None, _(b'show all details')),
447 (b'a', b'all', None, _(b'show all details')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 ],
450 ],
451 _(b'FILE'),
451 _(b'FILE'),
452 norepo=True,
452 norepo=True,
453 )
453 )
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 """lists the contents of a bundle"""
455 """lists the contents of a bundle"""
456 with hg.openpath(ui, bundlepath) as f:
456 with hg.openpath(ui, bundlepath) as f:
457 if spec:
457 if spec:
458 spec = exchange.getbundlespec(ui, f)
458 spec = exchange.getbundlespec(ui, f)
459 ui.write(b'%s\n' % spec)
459 ui.write(b'%s\n' % spec)
460 return
460 return
461
461
462 gen = exchange.readbundle(ui, f, bundlepath)
462 gen = exchange.readbundle(ui, f, bundlepath)
463 if isinstance(gen, bundle2.unbundle20):
463 if isinstance(gen, bundle2.unbundle20):
464 return _debugbundle2(ui, gen, all=all, **opts)
464 return _debugbundle2(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
466
466
467
467
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 def debugcapabilities(ui, path, **opts):
469 def debugcapabilities(ui, path, **opts):
470 """lists the capabilities of a remote peer"""
470 """lists the capabilities of a remote peer"""
471 opts = pycompat.byteskwargs(opts)
471 opts = pycompat.byteskwargs(opts)
472 peer = hg.peer(ui, opts, path)
472 peer = hg.peer(ui, opts, path)
473 caps = peer.capabilities()
473 caps = peer.capabilities()
474 ui.writenoi18n(b'Main capabilities:\n')
474 ui.writenoi18n(b'Main capabilities:\n')
475 for c in sorted(caps):
475 for c in sorted(caps):
476 ui.write(b' %s\n' % c)
476 ui.write(b' %s\n' % c)
477 b2caps = bundle2.bundle2caps(peer)
477 b2caps = bundle2.bundle2caps(peer)
478 if b2caps:
478 if b2caps:
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 for key, values in sorted(pycompat.iteritems(b2caps)):
480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 ui.write(b' %s\n' % key)
481 ui.write(b' %s\n' % key)
482 for v in values:
482 for v in values:
483 ui.write(b' %s\n' % v)
483 ui.write(b' %s\n' % v)
484
484
485
485
486 @command(b'debugchangedfiles', [], b'REV')
486 @command(b'debugchangedfiles', [], b'REV')
487 def debugchangedfiles(ui, repo, rev):
487 def debugchangedfiles(ui, repo, rev):
488 """list the stored files changes for a revision"""
488 """list the stored files changes for a revision"""
489 ctx = scmutil.revsingle(repo, rev, None)
489 ctx = scmutil.revsingle(repo, rev, None)
490 sd = repo.changelog.sidedata(ctx.rev())
490 sd = repo.changelog.sidedata(ctx.rev())
491 files_block = sd.get(sidedata.SD_FILES)
491 files_block = sd.get(sidedata.SD_FILES)
492 if files_block is not None:
492 if files_block is not None:
493 files = metadata.decode_files_sidedata(sd)
493 files = metadata.decode_files_sidedata(sd)
494 for f in sorted(files.touched):
494 for f in sorted(files.touched):
495 if f in files.added:
495 if f in files.added:
496 action = b"added"
496 action = b"added"
497 elif f in files.removed:
497 elif f in files.removed:
498 action = b"removed"
498 action = b"removed"
499 elif f in files.merged:
499 elif f in files.merged:
500 action = b"merged"
500 action = b"merged"
501 elif f in files.salvaged:
501 elif f in files.salvaged:
502 action = b"salvaged"
502 action = b"salvaged"
503 else:
503 else:
504 action = b"touched"
504 action = b"touched"
505
505
506 copy_parent = b""
506 copy_parent = b""
507 copy_source = b""
507 copy_source = b""
508 if f in files.copied_from_p1:
508 if f in files.copied_from_p1:
509 copy_parent = b"p1"
509 copy_parent = b"p1"
510 copy_source = files.copied_from_p1[f]
510 copy_source = files.copied_from_p1[f]
511 elif f in files.copied_from_p2:
511 elif f in files.copied_from_p2:
512 copy_parent = b"p2"
512 copy_parent = b"p2"
513 copy_source = files.copied_from_p2[f]
513 copy_source = files.copied_from_p2[f]
514
514
515 data = (action, copy_parent, f, copy_source)
515 data = (action, copy_parent, f, copy_source)
516 template = b"%-8s %2s: %s, %s;\n"
516 template = b"%-8s %2s: %s, %s;\n"
517 ui.write(template % data)
517 ui.write(template % data)
518
518
519
519
520 @command(b'debugcheckstate', [], b'')
520 @command(b'debugcheckstate', [], b'')
521 def debugcheckstate(ui, repo):
521 def debugcheckstate(ui, repo):
522 """validate the correctness of the current dirstate"""
522 """validate the correctness of the current dirstate"""
523 parent1, parent2 = repo.dirstate.parents()
523 parent1, parent2 = repo.dirstate.parents()
524 m1 = repo[parent1].manifest()
524 m1 = repo[parent1].manifest()
525 m2 = repo[parent2].manifest()
525 m2 = repo[parent2].manifest()
526 errors = 0
526 errors = 0
527 for f in repo.dirstate:
527 for f in repo.dirstate:
528 state = repo.dirstate[f]
528 state = repo.dirstate[f]
529 if state in b"nr" and f not in m1:
529 if state in b"nr" and f not in m1:
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 errors += 1
531 errors += 1
532 if state in b"a" and f in m1:
532 if state in b"a" and f in m1:
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 errors += 1
534 errors += 1
535 if state in b"m" and f not in m1 and f not in m2:
535 if state in b"m" and f not in m1 and f not in m2:
536 ui.warn(
536 ui.warn(
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 )
538 )
539 errors += 1
539 errors += 1
540 for f in m1:
540 for f in m1:
541 state = repo.dirstate[f]
541 state = repo.dirstate[f]
542 if state not in b"nrm":
542 if state not in b"nrm":
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 errors += 1
544 errors += 1
545 if errors:
545 if errors:
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 raise error.Abort(errstr)
547 raise error.Abort(errstr)
548
548
549
549
550 @command(
550 @command(
551 b'debugcolor',
551 b'debugcolor',
552 [(b'', b'style', None, _(b'show all configured styles'))],
552 [(b'', b'style', None, _(b'show all configured styles'))],
553 b'hg debugcolor',
553 b'hg debugcolor',
554 )
554 )
555 def debugcolor(ui, repo, **opts):
555 def debugcolor(ui, repo, **opts):
556 """show available color, effects or style"""
556 """show available color, effects or style"""
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 if opts.get('style'):
558 if opts.get('style'):
559 return _debugdisplaystyle(ui)
559 return _debugdisplaystyle(ui)
560 else:
560 else:
561 return _debugdisplaycolor(ui)
561 return _debugdisplaycolor(ui)
562
562
563
563
564 def _debugdisplaycolor(ui):
564 def _debugdisplaycolor(ui):
565 ui = ui.copy()
565 ui = ui.copy()
566 ui._styles.clear()
566 ui._styles.clear()
567 for effect in color._activeeffects(ui).keys():
567 for effect in color._activeeffects(ui).keys():
568 ui._styles[effect] = effect
568 ui._styles[effect] = effect
569 if ui._terminfoparams:
569 if ui._terminfoparams:
570 for k, v in ui.configitems(b'color'):
570 for k, v in ui.configitems(b'color'):
571 if k.startswith(b'color.'):
571 if k.startswith(b'color.'):
572 ui._styles[k] = k[6:]
572 ui._styles[k] = k[6:]
573 elif k.startswith(b'terminfo.'):
573 elif k.startswith(b'terminfo.'):
574 ui._styles[k] = k[9:]
574 ui._styles[k] = k[9:]
575 ui.write(_(b'available colors:\n'))
575 ui.write(_(b'available colors:\n'))
576 # sort label with a '_' after the other to group '_background' entry.
576 # sort label with a '_' after the other to group '_background' entry.
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 for colorname, label in items:
578 for colorname, label in items:
579 ui.write(b'%s\n' % colorname, label=label)
579 ui.write(b'%s\n' % colorname, label=label)
580
580
581
581
582 def _debugdisplaystyle(ui):
582 def _debugdisplaystyle(ui):
583 ui.write(_(b'available style:\n'))
583 ui.write(_(b'available style:\n'))
584 if not ui._styles:
584 if not ui._styles:
585 return
585 return
586 width = max(len(s) for s in ui._styles)
586 width = max(len(s) for s in ui._styles)
587 for label, effects in sorted(ui._styles.items()):
587 for label, effects in sorted(ui._styles.items()):
588 ui.write(b'%s' % label, label=label)
588 ui.write(b'%s' % label, label=label)
589 if effects:
589 if effects:
590 # 50
590 # 50
591 ui.write(b': ')
591 ui.write(b': ')
592 ui.write(b' ' * (max(0, width - len(label))))
592 ui.write(b' ' * (max(0, width - len(label))))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 ui.write(b'\n')
594 ui.write(b'\n')
595
595
596
596
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 def debugcreatestreamclonebundle(ui, repo, fname):
598 def debugcreatestreamclonebundle(ui, repo, fname):
599 """create a stream clone bundle file
599 """create a stream clone bundle file
600
600
601 Stream bundles are special bundles that are essentially archives of
601 Stream bundles are special bundles that are essentially archives of
602 revlog files. They are commonly used for cloning very quickly.
602 revlog files. They are commonly used for cloning very quickly.
603 """
603 """
604 # TODO we may want to turn this into an abort when this functionality
604 # TODO we may want to turn this into an abort when this functionality
605 # is moved into `hg bundle`.
605 # is moved into `hg bundle`.
606 if phases.hassecret(repo):
606 if phases.hassecret(repo):
607 ui.warn(
607 ui.warn(
608 _(
608 _(
609 b'(warning: stream clone bundle will contain secret '
609 b'(warning: stream clone bundle will contain secret '
610 b'revisions)\n'
610 b'revisions)\n'
611 )
611 )
612 )
612 )
613
613
614 requirements, gen = streamclone.generatebundlev1(repo)
614 requirements, gen = streamclone.generatebundlev1(repo)
615 changegroup.writechunks(ui, gen, fname)
615 changegroup.writechunks(ui, gen, fname)
616
616
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618
618
619
619
620 @command(
620 @command(
621 b'debugdag',
621 b'debugdag',
622 [
622 [
623 (b't', b'tags', None, _(b'use tags as labels')),
623 (b't', b'tags', None, _(b'use tags as labels')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 (b'', b'dots', None, _(b'use dots for runs')),
625 (b'', b'dots', None, _(b'use dots for runs')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 ],
627 ],
628 _(b'[OPTION]... [FILE [REV]...]'),
628 _(b'[OPTION]... [FILE [REV]...]'),
629 optionalrepo=True,
629 optionalrepo=True,
630 )
630 )
631 def debugdag(ui, repo, file_=None, *revs, **opts):
631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 """format the changelog or an index DAG as a concise textual description
632 """format the changelog or an index DAG as a concise textual description
633
633
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 revision numbers, they get labeled in the output as rN.
635 revision numbers, they get labeled in the output as rN.
636
636
637 Otherwise, the changelog DAG of the current repo is emitted.
637 Otherwise, the changelog DAG of the current repo is emitted.
638 """
638 """
639 spaces = opts.get('spaces')
639 spaces = opts.get('spaces')
640 dots = opts.get('dots')
640 dots = opts.get('dots')
641 if file_:
641 if file_:
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 revs = {int(r) for r in revs}
643 revs = {int(r) for r in revs}
644
644
645 def events():
645 def events():
646 for r in rlog:
646 for r in rlog:
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 if r in revs:
648 if r in revs:
649 yield b'l', (r, b"r%i" % r)
649 yield b'l', (r, b"r%i" % r)
650
650
651 elif repo:
651 elif repo:
652 cl = repo.changelog
652 cl = repo.changelog
653 tags = opts.get('tags')
653 tags = opts.get('tags')
654 branches = opts.get('branches')
654 branches = opts.get('branches')
655 if tags:
655 if tags:
656 labels = {}
656 labels = {}
657 for l, n in repo.tags().items():
657 for l, n in repo.tags().items():
658 labels.setdefault(cl.rev(n), []).append(l)
658 labels.setdefault(cl.rev(n), []).append(l)
659
659
660 def events():
660 def events():
661 b = b"default"
661 b = b"default"
662 for r in cl:
662 for r in cl:
663 if branches:
663 if branches:
664 newb = cl.read(cl.node(r))[5][b'branch']
664 newb = cl.read(cl.node(r))[5][b'branch']
665 if newb != b:
665 if newb != b:
666 yield b'a', newb
666 yield b'a', newb
667 b = newb
667 b = newb
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 if tags:
669 if tags:
670 ls = labels.get(r)
670 ls = labels.get(r)
671 if ls:
671 if ls:
672 for l in ls:
672 for l in ls:
673 yield b'l', (r, l)
673 yield b'l', (r, l)
674
674
675 else:
675 else:
676 raise error.Abort(_(b'need repo for changelog dag'))
676 raise error.Abort(_(b'need repo for changelog dag'))
677
677
678 for line in dagparser.dagtextlines(
678 for line in dagparser.dagtextlines(
679 events(),
679 events(),
680 addspaces=spaces,
680 addspaces=spaces,
681 wraplabels=True,
681 wraplabels=True,
682 wrapannotations=True,
682 wrapannotations=True,
683 wrapnonlinear=dots,
683 wrapnonlinear=dots,
684 usedots=dots,
684 usedots=dots,
685 maxlinewidth=70,
685 maxlinewidth=70,
686 ):
686 ):
687 ui.write(line)
687 ui.write(line)
688 ui.write(b"\n")
688 ui.write(b"\n")
689
689
690
690
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 def debugdata(ui, repo, file_, rev=None, **opts):
692 def debugdata(ui, repo, file_, rev=None, **opts):
693 """dump the contents of a data file revision"""
693 """dump the contents of a data file revision"""
694 opts = pycompat.byteskwargs(opts)
694 opts = pycompat.byteskwargs(opts)
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 if rev is not None:
696 if rev is not None:
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 file_, rev = None, file_
698 file_, rev = None, file_
699 elif rev is None:
699 elif rev is None:
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 try:
702 try:
703 ui.write(r.rawdata(r.lookup(rev)))
703 ui.write(r.rawdata(r.lookup(rev)))
704 except KeyError:
704 except KeyError:
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706
706
707
707
708 @command(
708 @command(
709 b'debugdate',
709 b'debugdate',
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 _(b'[-e] DATE [RANGE]'),
711 _(b'[-e] DATE [RANGE]'),
712 norepo=True,
712 norepo=True,
713 optionalrepo=True,
713 optionalrepo=True,
714 )
714 )
715 def debugdate(ui, date, range=None, **opts):
715 def debugdate(ui, date, range=None, **opts):
716 """parse and display a date"""
716 """parse and display a date"""
717 if opts["extended"]:
717 if opts["extended"]:
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 else:
719 else:
720 d = dateutil.parsedate(date)
720 d = dateutil.parsedate(date)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 if range:
723 if range:
724 m = dateutil.matchdate(range)
724 m = dateutil.matchdate(range)
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726
726
727
727
728 @command(
728 @command(
729 b'debugdeltachain',
729 b'debugdeltachain',
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 _(b'-c|-m|FILE'),
731 _(b'-c|-m|FILE'),
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdeltachain(ui, repo, file_=None, **opts):
734 def debugdeltachain(ui, repo, file_=None, **opts):
735 """dump information about delta chains in a revlog
735 """dump information about delta chains in a revlog
736
736
737 Output can be templatized. Available template keywords are:
737 Output can be templatized. Available template keywords are:
738
738
739 :``rev``: revision number
739 :``rev``: revision number
740 :``chainid``: delta chain identifier (numbered by unique base)
740 :``chainid``: delta chain identifier (numbered by unique base)
741 :``chainlen``: delta chain length to this revision
741 :``chainlen``: delta chain length to this revision
742 :``prevrev``: previous revision in delta chain
742 :``prevrev``: previous revision in delta chain
743 :``deltatype``: role of delta / how it was computed
743 :``deltatype``: role of delta / how it was computed
744 :``compsize``: compressed size of revision
744 :``compsize``: compressed size of revision
745 :``uncompsize``: uncompressed size of revision
745 :``uncompsize``: uncompressed size of revision
746 :``chainsize``: total size of compressed revisions in chain
746 :``chainsize``: total size of compressed revisions in chain
747 :``chainratio``: total chain size divided by uncompressed revision size
747 :``chainratio``: total chain size divided by uncompressed revision size
748 (new delta chains typically start at ratio 2.00)
748 (new delta chains typically start at ratio 2.00)
749 :``lindist``: linear distance from base revision in delta chain to end
749 :``lindist``: linear distance from base revision in delta chain to end
750 of this revision
750 of this revision
751 :``extradist``: total size of revisions not part of this delta chain from
751 :``extradist``: total size of revisions not part of this delta chain from
752 base of delta chain to end of this revision; a measurement
752 base of delta chain to end of this revision; a measurement
753 of how much extra data we need to read/seek across to read
753 of how much extra data we need to read/seek across to read
754 the delta chain for this revision
754 the delta chain for this revision
755 :``extraratio``: extradist divided by chainsize; another representation of
755 :``extraratio``: extradist divided by chainsize; another representation of
756 how much unrelated data is needed to load this delta chain
756 how much unrelated data is needed to load this delta chain
757
757
758 If the repository is configured to use the sparse read, additional keywords
758 If the repository is configured to use the sparse read, additional keywords
759 are available:
759 are available:
760
760
761 :``readsize``: total size of data read from the disk for a revision
761 :``readsize``: total size of data read from the disk for a revision
762 (sum of the sizes of all the blocks)
762 (sum of the sizes of all the blocks)
763 :``largestblock``: size of the largest block of data read from the disk
763 :``largestblock``: size of the largest block of data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
765 :``srchunks``: in how many data hunks the whole revision would be read
765 :``srchunks``: in how many data hunks the whole revision would be read
766
766
767 The sparse read can be enabled with experimental.sparse-read = True
767 The sparse read can be enabled with experimental.sparse-read = True
768 """
768 """
769 opts = pycompat.byteskwargs(opts)
769 opts = pycompat.byteskwargs(opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 index = r.index
771 index = r.index
772 start = r.start
772 start = r.start
773 length = r.length
773 length = r.length
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 withsparseread = getattr(r, '_withsparseread', False)
775 withsparseread = getattr(r, '_withsparseread', False)
776
776
777 def revinfo(rev):
777 def revinfo(rev):
778 e = index[rev]
778 e = index[rev]
779 compsize = e[1]
779 compsize = e[1]
780 uncompsize = e[2]
780 uncompsize = e[2]
781 chainsize = 0
781 chainsize = 0
782
782
783 if generaldelta:
783 if generaldelta:
784 if e[3] == e[5]:
784 if e[3] == e[5]:
785 deltatype = b'p1'
785 deltatype = b'p1'
786 elif e[3] == e[6]:
786 elif e[3] == e[6]:
787 deltatype = b'p2'
787 deltatype = b'p2'
788 elif e[3] == rev - 1:
788 elif e[3] == rev - 1:
789 deltatype = b'prev'
789 deltatype = b'prev'
790 elif e[3] == rev:
790 elif e[3] == rev:
791 deltatype = b'base'
791 deltatype = b'base'
792 else:
792 else:
793 deltatype = b'other'
793 deltatype = b'other'
794 else:
794 else:
795 if e[3] == rev:
795 if e[3] == rev:
796 deltatype = b'base'
796 deltatype = b'base'
797 else:
797 else:
798 deltatype = b'prev'
798 deltatype = b'prev'
799
799
800 chain = r._deltachain(rev)[0]
800 chain = r._deltachain(rev)[0]
801 for iterrev in chain:
801 for iterrev in chain:
802 e = index[iterrev]
802 e = index[iterrev]
803 chainsize += e[1]
803 chainsize += e[1]
804
804
805 return compsize, uncompsize, deltatype, chain, chainsize
805 return compsize, uncompsize, deltatype, chain, chainsize
806
806
807 fm = ui.formatter(b'debugdeltachain', opts)
807 fm = ui.formatter(b'debugdeltachain', opts)
808
808
809 fm.plain(
809 fm.plain(
810 b' rev chain# chainlen prev delta '
810 b' rev chain# chainlen prev delta '
811 b'size rawsize chainsize ratio lindist extradist '
811 b'size rawsize chainsize ratio lindist extradist '
812 b'extraratio'
812 b'extraratio'
813 )
813 )
814 if withsparseread:
814 if withsparseread:
815 fm.plain(b' readsize largestblk rddensity srchunks')
815 fm.plain(b' readsize largestblk rddensity srchunks')
816 fm.plain(b'\n')
816 fm.plain(b'\n')
817
817
818 chainbases = {}
818 chainbases = {}
819 for rev in r:
819 for rev in r:
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 chainbase = chain[0]
821 chainbase = chain[0]
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 basestart = start(chainbase)
823 basestart = start(chainbase)
824 revstart = start(rev)
824 revstart = start(rev)
825 lineardist = revstart + comp - basestart
825 lineardist = revstart + comp - basestart
826 extradist = lineardist - chainsize
826 extradist = lineardist - chainsize
827 try:
827 try:
828 prevrev = chain[-2]
828 prevrev = chain[-2]
829 except IndexError:
829 except IndexError:
830 prevrev = -1
830 prevrev = -1
831
831
832 if uncomp != 0:
832 if uncomp != 0:
833 chainratio = float(chainsize) / float(uncomp)
833 chainratio = float(chainsize) / float(uncomp)
834 else:
834 else:
835 chainratio = chainsize
835 chainratio = chainsize
836
836
837 if chainsize != 0:
837 if chainsize != 0:
838 extraratio = float(extradist) / float(chainsize)
838 extraratio = float(extradist) / float(chainsize)
839 else:
839 else:
840 extraratio = extradist
840 extraratio = extradist
841
841
842 fm.startitem()
842 fm.startitem()
843 fm.write(
843 fm.write(
844 b'rev chainid chainlen prevrev deltatype compsize '
844 b'rev chainid chainlen prevrev deltatype compsize '
845 b'uncompsize chainsize chainratio lindist extradist '
845 b'uncompsize chainsize chainratio lindist extradist '
846 b'extraratio',
846 b'extraratio',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 rev,
848 rev,
849 chainid,
849 chainid,
850 len(chain),
850 len(chain),
851 prevrev,
851 prevrev,
852 deltatype,
852 deltatype,
853 comp,
853 comp,
854 uncomp,
854 uncomp,
855 chainsize,
855 chainsize,
856 chainratio,
856 chainratio,
857 lineardist,
857 lineardist,
858 extradist,
858 extradist,
859 extraratio,
859 extraratio,
860 rev=rev,
860 rev=rev,
861 chainid=chainid,
861 chainid=chainid,
862 chainlen=len(chain),
862 chainlen=len(chain),
863 prevrev=prevrev,
863 prevrev=prevrev,
864 deltatype=deltatype,
864 deltatype=deltatype,
865 compsize=comp,
865 compsize=comp,
866 uncompsize=uncomp,
866 uncompsize=uncomp,
867 chainsize=chainsize,
867 chainsize=chainsize,
868 chainratio=chainratio,
868 chainratio=chainratio,
869 lindist=lineardist,
869 lindist=lineardist,
870 extradist=extradist,
870 extradist=extradist,
871 extraratio=extraratio,
871 extraratio=extraratio,
872 )
872 )
873 if withsparseread:
873 if withsparseread:
874 readsize = 0
874 readsize = 0
875 largestblock = 0
875 largestblock = 0
876 srchunks = 0
876 srchunks = 0
877
877
878 for revschunk in deltautil.slicechunk(r, chain):
878 for revschunk in deltautil.slicechunk(r, chain):
879 srchunks += 1
879 srchunks += 1
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 blksize = blkend - start(revschunk[0])
881 blksize = blkend - start(revschunk[0])
882
882
883 readsize += blksize
883 readsize += blksize
884 if largestblock < blksize:
884 if largestblock < blksize:
885 largestblock = blksize
885 largestblock = blksize
886
886
887 if readsize:
887 if readsize:
888 readdensity = float(chainsize) / float(readsize)
888 readdensity = float(chainsize) / float(readsize)
889 else:
889 else:
890 readdensity = 1
890 readdensity = 1
891
891
892 fm.write(
892 fm.write(
893 b'readsize largestblock readdensity srchunks',
893 b'readsize largestblock readdensity srchunks',
894 b' %10d %10d %9.5f %8d',
894 b' %10d %10d %9.5f %8d',
895 readsize,
895 readsize,
896 largestblock,
896 largestblock,
897 readdensity,
897 readdensity,
898 srchunks,
898 srchunks,
899 readsize=readsize,
899 readsize=readsize,
900 largestblock=largestblock,
900 largestblock=largestblock,
901 readdensity=readdensity,
901 readdensity=readdensity,
902 srchunks=srchunks,
902 srchunks=srchunks,
903 )
903 )
904
904
905 fm.plain(b'\n')
905 fm.plain(b'\n')
906
906
907 fm.end()
907 fm.end()
908
908
909
909
910 @command(
910 @command(
911 b'debugdirstate|debugstate',
911 b'debugdirstate|debugstate',
912 [
912 [
913 (
913 (
914 b'',
914 b'',
915 b'nodates',
915 b'nodates',
916 None,
916 None,
917 _(b'do not display the saved mtime (DEPRECATED)'),
917 _(b'do not display the saved mtime (DEPRECATED)'),
918 ),
918 ),
919 (b'', b'dates', True, _(b'display the saved mtime')),
919 (b'', b'dates', True, _(b'display the saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 ],
921 ],
922 _(b'[OPTION]...'),
922 _(b'[OPTION]...'),
923 )
923 )
924 def debugstate(ui, repo, **opts):
924 def debugstate(ui, repo, **opts):
925 """show the contents of the current dirstate"""
925 """show the contents of the current dirstate"""
926
926
927 nodates = not opts['dates']
927 nodates = not opts['dates']
928 if opts.get('nodates') is not None:
928 if opts.get('nodates') is not None:
929 nodates = True
929 nodates = True
930 datesort = opts.get('datesort')
930 datesort = opts.get('datesort')
931
931
932 if datesort:
932 if datesort:
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 else:
934 else:
935 keyfunc = None # sort by filename
935 keyfunc = None # sort by filename
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 if ent[3] == -1:
937 if ent[3] == -1:
938 timestr = b'unset '
938 timestr = b'unset '
939 elif nodates:
939 elif nodates:
940 timestr = b'set '
940 timestr = b'set '
941 else:
941 else:
942 timestr = time.strftime(
942 timestr = time.strftime(
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 )
944 )
945 timestr = encoding.strtolocal(timestr)
945 timestr = encoding.strtolocal(timestr)
946 if ent[1] & 0o20000:
946 if ent[1] & 0o20000:
947 mode = b'lnk'
947 mode = b'lnk'
948 else:
948 else:
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 for f in repo.dirstate.copies():
951 for f in repo.dirstate.copies():
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953
953
954
954
955 @command(
955 @command(
956 b'debugdiscovery',
956 b'debugdiscovery',
957 [
957 [
958 (b'', b'old', None, _(b'use old-style discovery')),
958 (b'', b'old', None, _(b'use old-style discovery')),
959 (
959 (
960 b'',
960 b'',
961 b'nonheads',
961 b'nonheads',
962 None,
962 None,
963 _(b'use old-style discovery with non-heads included'),
963 _(b'use old-style discovery with non-heads included'),
964 ),
964 ),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 ]
967 ]
968 + cmdutil.remoteopts,
968 + cmdutil.remoteopts,
969 _(b'[--rev REV] [OTHER]'),
969 _(b'[--rev REV] [OTHER]'),
970 )
970 )
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 """runs the changeset discovery protocol in isolation"""
972 """runs the changeset discovery protocol in isolation"""
973 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 remote = hg.peer(repo, opts, remoteurl)
975 remote = hg.peer(repo, opts, remoteurl)
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977
977
978 # make sure tests are repeatable
978 # make sure tests are repeatable
979 random.seed(int(opts[b'seed']))
979 random.seed(int(opts[b'seed']))
980
980
981 if opts.get(b'old'):
981 if opts.get(b'old'):
982
982
983 def doit(pushedrevs, remoteheads, remote=remote):
983 def doit(pushedrevs, remoteheads, remote=remote):
984 if not util.safehasattr(remote, b'branches'):
984 if not util.safehasattr(remote, b'branches'):
985 # enable in-client legacy support
985 # enable in-client legacy support
986 remote = localrepo.locallegacypeer(remote.local())
986 remote = localrepo.locallegacypeer(remote.local())
987 common, _in, hds = treediscovery.findcommonincoming(
987 common, _in, hds = treediscovery.findcommonincoming(
988 repo, remote, force=True
988 repo, remote, force=True
989 )
989 )
990 common = set(common)
990 common = set(common)
991 if not opts.get(b'nonheads'):
991 if not opts.get(b'nonheads'):
992 ui.writenoi18n(
992 ui.writenoi18n(
993 b"unpruned common: %s\n"
993 b"unpruned common: %s\n"
994 % b" ".join(sorted(short(n) for n in common))
994 % b" ".join(sorted(short(n) for n in common))
995 )
995 )
996
996
997 clnode = repo.changelog.node
997 clnode = repo.changelog.node
998 common = repo.revs(b'heads(::%ln)', common)
998 common = repo.revs(b'heads(::%ln)', common)
999 common = {clnode(r) for r in common}
999 common = {clnode(r) for r in common}
1000 return common, hds
1000 return common, hds
1001
1001
1002 else:
1002 else:
1003
1003
1004 def doit(pushedrevs, remoteheads, remote=remote):
1004 def doit(pushedrevs, remoteheads, remote=remote):
1005 nodes = None
1005 nodes = None
1006 if pushedrevs:
1006 if pushedrevs:
1007 revs = scmutil.revrange(repo, pushedrevs)
1007 revs = scmutil.revrange(repo, pushedrevs)
1008 nodes = [repo[r].node() for r in revs]
1008 nodes = [repo[r].node() for r in revs]
1009 common, any, hds = setdiscovery.findcommonheads(
1009 common, any, hds = setdiscovery.findcommonheads(
1010 ui, repo, remote, ancestorsof=nodes
1010 ui, repo, remote, ancestorsof=nodes
1011 )
1011 )
1012 return common, hds
1012 return common, hds
1013
1013
1014 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1014 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 localrevs = opts[b'rev']
1015 localrevs = opts[b'rev']
1016 with util.timedcm('debug-discovery') as t:
1016 with util.timedcm('debug-discovery') as t:
1017 common, hds = doit(localrevs, remoterevs)
1017 common, hds = doit(localrevs, remoterevs)
1018
1018
1019 # compute all statistics
1019 # compute all statistics
1020 heads_common = set(common)
1020 heads_common = set(common)
1021 heads_remote = set(hds)
1021 heads_remote = set(hds)
1022 heads_local = set(repo.heads())
1022 heads_local = set(repo.heads())
1023 # note: they cannot be a local or remote head that is in common and not
1023 # note: they cannot be a local or remote head that is in common and not
1024 # itself a head of common.
1024 # itself a head of common.
1025 heads_common_local = heads_common & heads_local
1025 heads_common_local = heads_common & heads_local
1026 heads_common_remote = heads_common & heads_remote
1026 heads_common_remote = heads_common & heads_remote
1027 heads_common_both = heads_common & heads_remote & heads_local
1027 heads_common_both = heads_common & heads_remote & heads_local
1028
1028
1029 all = repo.revs(b'all()')
1029 all = repo.revs(b'all()')
1030 common = repo.revs(b'::%ln', common)
1030 common = repo.revs(b'::%ln', common)
1031 roots_common = repo.revs(b'roots(::%ld)', common)
1031 roots_common = repo.revs(b'roots(::%ld)', common)
1032 missing = repo.revs(b'not ::%ld', common)
1032 missing = repo.revs(b'not ::%ld', common)
1033 heads_missing = repo.revs(b'heads(%ld)', missing)
1033 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 roots_missing = repo.revs(b'roots(%ld)', missing)
1034 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 assert len(common) + len(missing) == len(all)
1035 assert len(common) + len(missing) == len(all)
1036
1036
1037 initial_undecided = repo.revs(
1038 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 )
1040 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 common_initial_undecided = initial_undecided & common
1043 missing_initial_undecided = initial_undecided & missing
1044
1037 data = {}
1045 data = {}
1038 data[b'elapsed'] = t.elapsed
1046 data[b'elapsed'] = t.elapsed
1039 data[b'nb-common-heads'] = len(heads_common)
1047 data[b'nb-common-heads'] = len(heads_common)
1040 data[b'nb-common-heads-local'] = len(heads_common_local)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1041 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1042 data[b'nb-common-heads-both'] = len(heads_common_both)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1043 data[b'nb-common-roots'] = len(roots_common)
1051 data[b'nb-common-roots'] = len(roots_common)
1044 data[b'nb-head-local'] = len(heads_local)
1052 data[b'nb-head-local'] = len(heads_local)
1045 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1046 data[b'nb-head-remote'] = len(heads_remote)
1054 data[b'nb-head-remote'] = len(heads_remote)
1047 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1048 heads_common_remote
1056 heads_common_remote
1049 )
1057 )
1050 data[b'nb-revs'] = len(all)
1058 data[b'nb-revs'] = len(all)
1051 data[b'nb-revs-common'] = len(common)
1059 data[b'nb-revs-common'] = len(common)
1052 data[b'nb-revs-missing'] = len(missing)
1060 data[b'nb-revs-missing'] = len(missing)
1053 data[b'nb-missing-heads'] = len(roots_missing)
1061 data[b'nb-missing-heads'] = len(roots_missing)
1054 data[b'nb-missing-roots'] = len(heads_missing)
1062 data[b'nb-missing-roots'] = len(heads_missing)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1055
1068
1056 # display discovery summary
1069 # display discovery summary
1057 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1058 ui.writenoi18n(b"heads summary:\n")
1071 ui.writenoi18n(b"heads summary:\n")
1059 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1072 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1060 ui.writenoi18n(
1073 ui.writenoi18n(
1061 b" also local heads: %(nb-common-heads-local)9d\n" % data
1074 b" also local heads: %(nb-common-heads-local)9d\n" % data
1062 )
1075 )
1063 ui.writenoi18n(
1076 ui.writenoi18n(
1064 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1077 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1065 )
1078 )
1066 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1079 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1067 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1080 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1068 ui.writenoi18n(
1081 ui.writenoi18n(
1069 b" common: %(nb-common-heads-local)9d\n" % data
1082 b" common: %(nb-common-heads-local)9d\n" % data
1070 )
1083 )
1071 ui.writenoi18n(
1084 ui.writenoi18n(
1072 b" missing: %(nb-head-local-missing)9d\n" % data
1085 b" missing: %(nb-head-local-missing)9d\n" % data
1073 )
1086 )
1074 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1087 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1075 ui.writenoi18n(
1088 ui.writenoi18n(
1076 b" common: %(nb-common-heads-remote)9d\n" % data
1089 b" common: %(nb-common-heads-remote)9d\n" % data
1077 )
1090 )
1078 ui.writenoi18n(
1091 ui.writenoi18n(
1079 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1092 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1080 )
1093 )
1081 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1094 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1082 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1095 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1083 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1096 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1084 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1097 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1085 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1098 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1086 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1099 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1087 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1100 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1088
1106
1089 if ui.verbose:
1107 if ui.verbose:
1090 ui.writenoi18n(
1108 ui.writenoi18n(
1091 b"common heads: %s\n"
1109 b"common heads: %s\n"
1092 % b" ".join(sorted(short(n) for n in heads_common))
1110 % b" ".join(sorted(short(n) for n in heads_common))
1093 )
1111 )
1094
1112
1095
1113
1096 _chunksize = 4 << 10
1114 _chunksize = 4 << 10
1097
1115
1098
1116
1099 @command(
1117 @command(
1100 b'debugdownload',
1118 b'debugdownload',
1101 [
1119 [
1102 (b'o', b'output', b'', _(b'path')),
1120 (b'o', b'output', b'', _(b'path')),
1103 ],
1121 ],
1104 optionalrepo=True,
1122 optionalrepo=True,
1105 )
1123 )
1106 def debugdownload(ui, repo, url, output=None, **opts):
1124 def debugdownload(ui, repo, url, output=None, **opts):
1107 """download a resource using Mercurial logic and config"""
1125 """download a resource using Mercurial logic and config"""
1108 fh = urlmod.open(ui, url, output)
1126 fh = urlmod.open(ui, url, output)
1109
1127
1110 dest = ui
1128 dest = ui
1111 if output:
1129 if output:
1112 dest = open(output, b"wb", _chunksize)
1130 dest = open(output, b"wb", _chunksize)
1113 try:
1131 try:
1114 data = fh.read(_chunksize)
1132 data = fh.read(_chunksize)
1115 while data:
1133 while data:
1116 dest.write(data)
1134 dest.write(data)
1117 data = fh.read(_chunksize)
1135 data = fh.read(_chunksize)
1118 finally:
1136 finally:
1119 if output:
1137 if output:
1120 dest.close()
1138 dest.close()
1121
1139
1122
1140
1123 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1141 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1124 def debugextensions(ui, repo, **opts):
1142 def debugextensions(ui, repo, **opts):
1125 '''show information about active extensions'''
1143 '''show information about active extensions'''
1126 opts = pycompat.byteskwargs(opts)
1144 opts = pycompat.byteskwargs(opts)
1127 exts = extensions.extensions(ui)
1145 exts = extensions.extensions(ui)
1128 hgver = util.version()
1146 hgver = util.version()
1129 fm = ui.formatter(b'debugextensions', opts)
1147 fm = ui.formatter(b'debugextensions', opts)
1130 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1148 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1131 isinternal = extensions.ismoduleinternal(extmod)
1149 isinternal = extensions.ismoduleinternal(extmod)
1132 extsource = None
1150 extsource = None
1133
1151
1134 if util.safehasattr(extmod, '__file__'):
1152 if util.safehasattr(extmod, '__file__'):
1135 extsource = pycompat.fsencode(extmod.__file__)
1153 extsource = pycompat.fsencode(extmod.__file__)
1136 elif getattr(sys, 'oxidized', False):
1154 elif getattr(sys, 'oxidized', False):
1137 extsource = pycompat.sysexecutable
1155 extsource = pycompat.sysexecutable
1138 if isinternal:
1156 if isinternal:
1139 exttestedwith = [] # never expose magic string to users
1157 exttestedwith = [] # never expose magic string to users
1140 else:
1158 else:
1141 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1159 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1142 extbuglink = getattr(extmod, 'buglink', None)
1160 extbuglink = getattr(extmod, 'buglink', None)
1143
1161
1144 fm.startitem()
1162 fm.startitem()
1145
1163
1146 if ui.quiet or ui.verbose:
1164 if ui.quiet or ui.verbose:
1147 fm.write(b'name', b'%s\n', extname)
1165 fm.write(b'name', b'%s\n', extname)
1148 else:
1166 else:
1149 fm.write(b'name', b'%s', extname)
1167 fm.write(b'name', b'%s', extname)
1150 if isinternal or hgver in exttestedwith:
1168 if isinternal or hgver in exttestedwith:
1151 fm.plain(b'\n')
1169 fm.plain(b'\n')
1152 elif not exttestedwith:
1170 elif not exttestedwith:
1153 fm.plain(_(b' (untested!)\n'))
1171 fm.plain(_(b' (untested!)\n'))
1154 else:
1172 else:
1155 lasttestedversion = exttestedwith[-1]
1173 lasttestedversion = exttestedwith[-1]
1156 fm.plain(b' (%s!)\n' % lasttestedversion)
1174 fm.plain(b' (%s!)\n' % lasttestedversion)
1157
1175
1158 fm.condwrite(
1176 fm.condwrite(
1159 ui.verbose and extsource,
1177 ui.verbose and extsource,
1160 b'source',
1178 b'source',
1161 _(b' location: %s\n'),
1179 _(b' location: %s\n'),
1162 extsource or b"",
1180 extsource or b"",
1163 )
1181 )
1164
1182
1165 if ui.verbose:
1183 if ui.verbose:
1166 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1184 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1167 fm.data(bundled=isinternal)
1185 fm.data(bundled=isinternal)
1168
1186
1169 fm.condwrite(
1187 fm.condwrite(
1170 ui.verbose and exttestedwith,
1188 ui.verbose and exttestedwith,
1171 b'testedwith',
1189 b'testedwith',
1172 _(b' tested with: %s\n'),
1190 _(b' tested with: %s\n'),
1173 fm.formatlist(exttestedwith, name=b'ver'),
1191 fm.formatlist(exttestedwith, name=b'ver'),
1174 )
1192 )
1175
1193
1176 fm.condwrite(
1194 fm.condwrite(
1177 ui.verbose and extbuglink,
1195 ui.verbose and extbuglink,
1178 b'buglink',
1196 b'buglink',
1179 _(b' bug reporting: %s\n'),
1197 _(b' bug reporting: %s\n'),
1180 extbuglink or b"",
1198 extbuglink or b"",
1181 )
1199 )
1182
1200
1183 fm.end()
1201 fm.end()
1184
1202
1185
1203
1186 @command(
1204 @command(
1187 b'debugfileset',
1205 b'debugfileset',
1188 [
1206 [
1189 (
1207 (
1190 b'r',
1208 b'r',
1191 b'rev',
1209 b'rev',
1192 b'',
1210 b'',
1193 _(b'apply the filespec on this revision'),
1211 _(b'apply the filespec on this revision'),
1194 _(b'REV'),
1212 _(b'REV'),
1195 ),
1213 ),
1196 (
1214 (
1197 b'',
1215 b'',
1198 b'all-files',
1216 b'all-files',
1199 False,
1217 False,
1200 _(b'test files from all revisions and working directory'),
1218 _(b'test files from all revisions and working directory'),
1201 ),
1219 ),
1202 (
1220 (
1203 b's',
1221 b's',
1204 b'show-matcher',
1222 b'show-matcher',
1205 None,
1223 None,
1206 _(b'print internal representation of matcher'),
1224 _(b'print internal representation of matcher'),
1207 ),
1225 ),
1208 (
1226 (
1209 b'p',
1227 b'p',
1210 b'show-stage',
1228 b'show-stage',
1211 [],
1229 [],
1212 _(b'print parsed tree at the given stage'),
1230 _(b'print parsed tree at the given stage'),
1213 _(b'NAME'),
1231 _(b'NAME'),
1214 ),
1232 ),
1215 ],
1233 ],
1216 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1234 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1217 )
1235 )
1218 def debugfileset(ui, repo, expr, **opts):
1236 def debugfileset(ui, repo, expr, **opts):
1219 '''parse and apply a fileset specification'''
1237 '''parse and apply a fileset specification'''
1220 from . import fileset
1238 from . import fileset
1221
1239
1222 fileset.symbols # force import of fileset so we have predicates to optimize
1240 fileset.symbols # force import of fileset so we have predicates to optimize
1223 opts = pycompat.byteskwargs(opts)
1241 opts = pycompat.byteskwargs(opts)
1224 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1242 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1225
1243
1226 stages = [
1244 stages = [
1227 (b'parsed', pycompat.identity),
1245 (b'parsed', pycompat.identity),
1228 (b'analyzed', filesetlang.analyze),
1246 (b'analyzed', filesetlang.analyze),
1229 (b'optimized', filesetlang.optimize),
1247 (b'optimized', filesetlang.optimize),
1230 ]
1248 ]
1231 stagenames = {n for n, f in stages}
1249 stagenames = {n for n, f in stages}
1232
1250
1233 showalways = set()
1251 showalways = set()
1234 if ui.verbose and not opts[b'show_stage']:
1252 if ui.verbose and not opts[b'show_stage']:
1235 # show parsed tree by --verbose (deprecated)
1253 # show parsed tree by --verbose (deprecated)
1236 showalways.add(b'parsed')
1254 showalways.add(b'parsed')
1237 if opts[b'show_stage'] == [b'all']:
1255 if opts[b'show_stage'] == [b'all']:
1238 showalways.update(stagenames)
1256 showalways.update(stagenames)
1239 else:
1257 else:
1240 for n in opts[b'show_stage']:
1258 for n in opts[b'show_stage']:
1241 if n not in stagenames:
1259 if n not in stagenames:
1242 raise error.Abort(_(b'invalid stage name: %s') % n)
1260 raise error.Abort(_(b'invalid stage name: %s') % n)
1243 showalways.update(opts[b'show_stage'])
1261 showalways.update(opts[b'show_stage'])
1244
1262
1245 tree = filesetlang.parse(expr)
1263 tree = filesetlang.parse(expr)
1246 for n, f in stages:
1264 for n, f in stages:
1247 tree = f(tree)
1265 tree = f(tree)
1248 if n in showalways:
1266 if n in showalways:
1249 if opts[b'show_stage'] or n != b'parsed':
1267 if opts[b'show_stage'] or n != b'parsed':
1250 ui.write(b"* %s:\n" % n)
1268 ui.write(b"* %s:\n" % n)
1251 ui.write(filesetlang.prettyformat(tree), b"\n")
1269 ui.write(filesetlang.prettyformat(tree), b"\n")
1252
1270
1253 files = set()
1271 files = set()
1254 if opts[b'all_files']:
1272 if opts[b'all_files']:
1255 for r in repo:
1273 for r in repo:
1256 c = repo[r]
1274 c = repo[r]
1257 files.update(c.files())
1275 files.update(c.files())
1258 files.update(c.substate)
1276 files.update(c.substate)
1259 if opts[b'all_files'] or ctx.rev() is None:
1277 if opts[b'all_files'] or ctx.rev() is None:
1260 wctx = repo[None]
1278 wctx = repo[None]
1261 files.update(
1279 files.update(
1262 repo.dirstate.walk(
1280 repo.dirstate.walk(
1263 scmutil.matchall(repo),
1281 scmutil.matchall(repo),
1264 subrepos=list(wctx.substate),
1282 subrepos=list(wctx.substate),
1265 unknown=True,
1283 unknown=True,
1266 ignored=True,
1284 ignored=True,
1267 )
1285 )
1268 )
1286 )
1269 files.update(wctx.substate)
1287 files.update(wctx.substate)
1270 else:
1288 else:
1271 files.update(ctx.files())
1289 files.update(ctx.files())
1272 files.update(ctx.substate)
1290 files.update(ctx.substate)
1273
1291
1274 m = ctx.matchfileset(repo.getcwd(), expr)
1292 m = ctx.matchfileset(repo.getcwd(), expr)
1275 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1293 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1276 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1294 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1277 for f in sorted(files):
1295 for f in sorted(files):
1278 if not m(f):
1296 if not m(f):
1279 continue
1297 continue
1280 ui.write(b"%s\n" % f)
1298 ui.write(b"%s\n" % f)
1281
1299
1282
1300
1283 @command(b'debugformat', [] + cmdutil.formatteropts)
1301 @command(b'debugformat', [] + cmdutil.formatteropts)
1284 def debugformat(ui, repo, **opts):
1302 def debugformat(ui, repo, **opts):
1285 """display format information about the current repository
1303 """display format information about the current repository
1286
1304
1287 Use --verbose to get extra information about current config value and
1305 Use --verbose to get extra information about current config value and
1288 Mercurial default."""
1306 Mercurial default."""
1289 opts = pycompat.byteskwargs(opts)
1307 opts = pycompat.byteskwargs(opts)
1290 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1308 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1291 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1309 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1292
1310
1293 def makeformatname(name):
1311 def makeformatname(name):
1294 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1312 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1295
1313
1296 fm = ui.formatter(b'debugformat', opts)
1314 fm = ui.formatter(b'debugformat', opts)
1297 if fm.isplain():
1315 if fm.isplain():
1298
1316
1299 def formatvalue(value):
1317 def formatvalue(value):
1300 if util.safehasattr(value, b'startswith'):
1318 if util.safehasattr(value, b'startswith'):
1301 return value
1319 return value
1302 if value:
1320 if value:
1303 return b'yes'
1321 return b'yes'
1304 else:
1322 else:
1305 return b'no'
1323 return b'no'
1306
1324
1307 else:
1325 else:
1308 formatvalue = pycompat.identity
1326 formatvalue = pycompat.identity
1309
1327
1310 fm.plain(b'format-variant')
1328 fm.plain(b'format-variant')
1311 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1329 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1312 fm.plain(b' repo')
1330 fm.plain(b' repo')
1313 if ui.verbose:
1331 if ui.verbose:
1314 fm.plain(b' config default')
1332 fm.plain(b' config default')
1315 fm.plain(b'\n')
1333 fm.plain(b'\n')
1316 for fv in upgrade.allformatvariant:
1334 for fv in upgrade.allformatvariant:
1317 fm.startitem()
1335 fm.startitem()
1318 repovalue = fv.fromrepo(repo)
1336 repovalue = fv.fromrepo(repo)
1319 configvalue = fv.fromconfig(repo)
1337 configvalue = fv.fromconfig(repo)
1320
1338
1321 if repovalue != configvalue:
1339 if repovalue != configvalue:
1322 namelabel = b'formatvariant.name.mismatchconfig'
1340 namelabel = b'formatvariant.name.mismatchconfig'
1323 repolabel = b'formatvariant.repo.mismatchconfig'
1341 repolabel = b'formatvariant.repo.mismatchconfig'
1324 elif repovalue != fv.default:
1342 elif repovalue != fv.default:
1325 namelabel = b'formatvariant.name.mismatchdefault'
1343 namelabel = b'formatvariant.name.mismatchdefault'
1326 repolabel = b'formatvariant.repo.mismatchdefault'
1344 repolabel = b'formatvariant.repo.mismatchdefault'
1327 else:
1345 else:
1328 namelabel = b'formatvariant.name.uptodate'
1346 namelabel = b'formatvariant.name.uptodate'
1329 repolabel = b'formatvariant.repo.uptodate'
1347 repolabel = b'formatvariant.repo.uptodate'
1330
1348
1331 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1349 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1332 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1350 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1333 if fv.default != configvalue:
1351 if fv.default != configvalue:
1334 configlabel = b'formatvariant.config.special'
1352 configlabel = b'formatvariant.config.special'
1335 else:
1353 else:
1336 configlabel = b'formatvariant.config.default'
1354 configlabel = b'formatvariant.config.default'
1337 fm.condwrite(
1355 fm.condwrite(
1338 ui.verbose,
1356 ui.verbose,
1339 b'config',
1357 b'config',
1340 b' %6s',
1358 b' %6s',
1341 formatvalue(configvalue),
1359 formatvalue(configvalue),
1342 label=configlabel,
1360 label=configlabel,
1343 )
1361 )
1344 fm.condwrite(
1362 fm.condwrite(
1345 ui.verbose,
1363 ui.verbose,
1346 b'default',
1364 b'default',
1347 b' %7s',
1365 b' %7s',
1348 formatvalue(fv.default),
1366 formatvalue(fv.default),
1349 label=b'formatvariant.default',
1367 label=b'formatvariant.default',
1350 )
1368 )
1351 fm.plain(b'\n')
1369 fm.plain(b'\n')
1352 fm.end()
1370 fm.end()
1353
1371
1354
1372
1355 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1373 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1356 def debugfsinfo(ui, path=b"."):
1374 def debugfsinfo(ui, path=b"."):
1357 """show information detected about current filesystem"""
1375 """show information detected about current filesystem"""
1358 ui.writenoi18n(b'path: %s\n' % path)
1376 ui.writenoi18n(b'path: %s\n' % path)
1359 ui.writenoi18n(
1377 ui.writenoi18n(
1360 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1378 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1361 )
1379 )
1362 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1380 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1363 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1381 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1364 ui.writenoi18n(
1382 ui.writenoi18n(
1365 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1383 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1366 )
1384 )
1367 ui.writenoi18n(
1385 ui.writenoi18n(
1368 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1386 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1369 )
1387 )
1370 casesensitive = b'(unknown)'
1388 casesensitive = b'(unknown)'
1371 try:
1389 try:
1372 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1390 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1373 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1391 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1374 except OSError:
1392 except OSError:
1375 pass
1393 pass
1376 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1394 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1377
1395
1378
1396
1379 @command(
1397 @command(
1380 b'debuggetbundle',
1398 b'debuggetbundle',
1381 [
1399 [
1382 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1400 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1383 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1401 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1384 (
1402 (
1385 b't',
1403 b't',
1386 b'type',
1404 b'type',
1387 b'bzip2',
1405 b'bzip2',
1388 _(b'bundle compression type to use'),
1406 _(b'bundle compression type to use'),
1389 _(b'TYPE'),
1407 _(b'TYPE'),
1390 ),
1408 ),
1391 ],
1409 ],
1392 _(b'REPO FILE [-H|-C ID]...'),
1410 _(b'REPO FILE [-H|-C ID]...'),
1393 norepo=True,
1411 norepo=True,
1394 )
1412 )
1395 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1413 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1396 """retrieves a bundle from a repo
1414 """retrieves a bundle from a repo
1397
1415
1398 Every ID must be a full-length hex node id string. Saves the bundle to the
1416 Every ID must be a full-length hex node id string. Saves the bundle to the
1399 given file.
1417 given file.
1400 """
1418 """
1401 opts = pycompat.byteskwargs(opts)
1419 opts = pycompat.byteskwargs(opts)
1402 repo = hg.peer(ui, opts, repopath)
1420 repo = hg.peer(ui, opts, repopath)
1403 if not repo.capable(b'getbundle'):
1421 if not repo.capable(b'getbundle'):
1404 raise error.Abort(b"getbundle() not supported by target repository")
1422 raise error.Abort(b"getbundle() not supported by target repository")
1405 args = {}
1423 args = {}
1406 if common:
1424 if common:
1407 args['common'] = [bin(s) for s in common]
1425 args['common'] = [bin(s) for s in common]
1408 if head:
1426 if head:
1409 args['heads'] = [bin(s) for s in head]
1427 args['heads'] = [bin(s) for s in head]
1410 # TODO: get desired bundlecaps from command line.
1428 # TODO: get desired bundlecaps from command line.
1411 args['bundlecaps'] = None
1429 args['bundlecaps'] = None
1412 bundle = repo.getbundle(b'debug', **args)
1430 bundle = repo.getbundle(b'debug', **args)
1413
1431
1414 bundletype = opts.get(b'type', b'bzip2').lower()
1432 bundletype = opts.get(b'type', b'bzip2').lower()
1415 btypes = {
1433 btypes = {
1416 b'none': b'HG10UN',
1434 b'none': b'HG10UN',
1417 b'bzip2': b'HG10BZ',
1435 b'bzip2': b'HG10BZ',
1418 b'gzip': b'HG10GZ',
1436 b'gzip': b'HG10GZ',
1419 b'bundle2': b'HG20',
1437 b'bundle2': b'HG20',
1420 }
1438 }
1421 bundletype = btypes.get(bundletype)
1439 bundletype = btypes.get(bundletype)
1422 if bundletype not in bundle2.bundletypes:
1440 if bundletype not in bundle2.bundletypes:
1423 raise error.Abort(_(b'unknown bundle type specified with --type'))
1441 raise error.Abort(_(b'unknown bundle type specified with --type'))
1424 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1442 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1425
1443
1426
1444
1427 @command(b'debugignore', [], b'[FILE]')
1445 @command(b'debugignore', [], b'[FILE]')
1428 def debugignore(ui, repo, *files, **opts):
1446 def debugignore(ui, repo, *files, **opts):
1429 """display the combined ignore pattern and information about ignored files
1447 """display the combined ignore pattern and information about ignored files
1430
1448
1431 With no argument display the combined ignore pattern.
1449 With no argument display the combined ignore pattern.
1432
1450
1433 Given space separated file names, shows if the given file is ignored and
1451 Given space separated file names, shows if the given file is ignored and
1434 if so, show the ignore rule (file and line number) that matched it.
1452 if so, show the ignore rule (file and line number) that matched it.
1435 """
1453 """
1436 ignore = repo.dirstate._ignore
1454 ignore = repo.dirstate._ignore
1437 if not files:
1455 if not files:
1438 # Show all the patterns
1456 # Show all the patterns
1439 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1457 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1440 else:
1458 else:
1441 m = scmutil.match(repo[None], pats=files)
1459 m = scmutil.match(repo[None], pats=files)
1442 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1460 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1443 for f in m.files():
1461 for f in m.files():
1444 nf = util.normpath(f)
1462 nf = util.normpath(f)
1445 ignored = None
1463 ignored = None
1446 ignoredata = None
1464 ignoredata = None
1447 if nf != b'.':
1465 if nf != b'.':
1448 if ignore(nf):
1466 if ignore(nf):
1449 ignored = nf
1467 ignored = nf
1450 ignoredata = repo.dirstate._ignorefileandline(nf)
1468 ignoredata = repo.dirstate._ignorefileandline(nf)
1451 else:
1469 else:
1452 for p in pathutil.finddirs(nf):
1470 for p in pathutil.finddirs(nf):
1453 if ignore(p):
1471 if ignore(p):
1454 ignored = p
1472 ignored = p
1455 ignoredata = repo.dirstate._ignorefileandline(p)
1473 ignoredata = repo.dirstate._ignorefileandline(p)
1456 break
1474 break
1457 if ignored:
1475 if ignored:
1458 if ignored == nf:
1476 if ignored == nf:
1459 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1477 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1460 else:
1478 else:
1461 ui.write(
1479 ui.write(
1462 _(
1480 _(
1463 b"%s is ignored because of "
1481 b"%s is ignored because of "
1464 b"containing directory %s\n"
1482 b"containing directory %s\n"
1465 )
1483 )
1466 % (uipathfn(f), ignored)
1484 % (uipathfn(f), ignored)
1467 )
1485 )
1468 ignorefile, lineno, line = ignoredata
1486 ignorefile, lineno, line = ignoredata
1469 ui.write(
1487 ui.write(
1470 _(b"(ignore rule in %s, line %d: '%s')\n")
1488 _(b"(ignore rule in %s, line %d: '%s')\n")
1471 % (ignorefile, lineno, line)
1489 % (ignorefile, lineno, line)
1472 )
1490 )
1473 else:
1491 else:
1474 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1492 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1475
1493
1476
1494
1477 @command(
1495 @command(
1478 b'debugindex',
1496 b'debugindex',
1479 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1480 _(b'-c|-m|FILE'),
1498 _(b'-c|-m|FILE'),
1481 )
1499 )
1482 def debugindex(ui, repo, file_=None, **opts):
1500 def debugindex(ui, repo, file_=None, **opts):
1483 """dump index data for a storage primitive"""
1501 """dump index data for a storage primitive"""
1484 opts = pycompat.byteskwargs(opts)
1502 opts = pycompat.byteskwargs(opts)
1485 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1503 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1486
1504
1487 if ui.debugflag:
1505 if ui.debugflag:
1488 shortfn = hex
1506 shortfn = hex
1489 else:
1507 else:
1490 shortfn = short
1508 shortfn = short
1491
1509
1492 idlen = 12
1510 idlen = 12
1493 for i in store:
1511 for i in store:
1494 idlen = len(shortfn(store.node(i)))
1512 idlen = len(shortfn(store.node(i)))
1495 break
1513 break
1496
1514
1497 fm = ui.formatter(b'debugindex', opts)
1515 fm = ui.formatter(b'debugindex', opts)
1498 fm.plain(
1516 fm.plain(
1499 b' rev linkrev %s %s p2\n'
1517 b' rev linkrev %s %s p2\n'
1500 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1518 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1501 )
1519 )
1502
1520
1503 for rev in store:
1521 for rev in store:
1504 node = store.node(rev)
1522 node = store.node(rev)
1505 parents = store.parents(node)
1523 parents = store.parents(node)
1506
1524
1507 fm.startitem()
1525 fm.startitem()
1508 fm.write(b'rev', b'%6d ', rev)
1526 fm.write(b'rev', b'%6d ', rev)
1509 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1527 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1510 fm.write(b'node', b'%s ', shortfn(node))
1528 fm.write(b'node', b'%s ', shortfn(node))
1511 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1529 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1512 fm.write(b'p2', b'%s', shortfn(parents[1]))
1530 fm.write(b'p2', b'%s', shortfn(parents[1]))
1513 fm.plain(b'\n')
1531 fm.plain(b'\n')
1514
1532
1515 fm.end()
1533 fm.end()
1516
1534
1517
1535
1518 @command(
1536 @command(
1519 b'debugindexdot',
1537 b'debugindexdot',
1520 cmdutil.debugrevlogopts,
1538 cmdutil.debugrevlogopts,
1521 _(b'-c|-m|FILE'),
1539 _(b'-c|-m|FILE'),
1522 optionalrepo=True,
1540 optionalrepo=True,
1523 )
1541 )
1524 def debugindexdot(ui, repo, file_=None, **opts):
1542 def debugindexdot(ui, repo, file_=None, **opts):
1525 """dump an index DAG as a graphviz dot file"""
1543 """dump an index DAG as a graphviz dot file"""
1526 opts = pycompat.byteskwargs(opts)
1544 opts = pycompat.byteskwargs(opts)
1527 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1545 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1528 ui.writenoi18n(b"digraph G {\n")
1546 ui.writenoi18n(b"digraph G {\n")
1529 for i in r:
1547 for i in r:
1530 node = r.node(i)
1548 node = r.node(i)
1531 pp = r.parents(node)
1549 pp = r.parents(node)
1532 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1550 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1533 if pp[1] != nullid:
1551 if pp[1] != nullid:
1534 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1552 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1535 ui.write(b"}\n")
1553 ui.write(b"}\n")
1536
1554
1537
1555
1538 @command(b'debugindexstats', [])
1556 @command(b'debugindexstats', [])
1539 def debugindexstats(ui, repo):
1557 def debugindexstats(ui, repo):
1540 """show stats related to the changelog index"""
1558 """show stats related to the changelog index"""
1541 repo.changelog.shortest(nullid, 1)
1559 repo.changelog.shortest(nullid, 1)
1542 index = repo.changelog.index
1560 index = repo.changelog.index
1543 if not util.safehasattr(index, b'stats'):
1561 if not util.safehasattr(index, b'stats'):
1544 raise error.Abort(_(b'debugindexstats only works with native code'))
1562 raise error.Abort(_(b'debugindexstats only works with native code'))
1545 for k, v in sorted(index.stats().items()):
1563 for k, v in sorted(index.stats().items()):
1546 ui.write(b'%s: %d\n' % (k, v))
1564 ui.write(b'%s: %d\n' % (k, v))
1547
1565
1548
1566
1549 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1567 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1550 def debuginstall(ui, **opts):
1568 def debuginstall(ui, **opts):
1551 """test Mercurial installation
1569 """test Mercurial installation
1552
1570
1553 Returns 0 on success.
1571 Returns 0 on success.
1554 """
1572 """
1555 opts = pycompat.byteskwargs(opts)
1573 opts = pycompat.byteskwargs(opts)
1556
1574
1557 problems = 0
1575 problems = 0
1558
1576
1559 fm = ui.formatter(b'debuginstall', opts)
1577 fm = ui.formatter(b'debuginstall', opts)
1560 fm.startitem()
1578 fm.startitem()
1561
1579
1562 # encoding might be unknown or wrong. don't translate these messages.
1580 # encoding might be unknown or wrong. don't translate these messages.
1563 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1581 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1564 err = None
1582 err = None
1565 try:
1583 try:
1566 codecs.lookup(pycompat.sysstr(encoding.encoding))
1584 codecs.lookup(pycompat.sysstr(encoding.encoding))
1567 except LookupError as inst:
1585 except LookupError as inst:
1568 err = stringutil.forcebytestr(inst)
1586 err = stringutil.forcebytestr(inst)
1569 problems += 1
1587 problems += 1
1570 fm.condwrite(
1588 fm.condwrite(
1571 err,
1589 err,
1572 b'encodingerror',
1590 b'encodingerror',
1573 b" %s\n (check that your locale is properly set)\n",
1591 b" %s\n (check that your locale is properly set)\n",
1574 err,
1592 err,
1575 )
1593 )
1576
1594
1577 # Python
1595 # Python
1578 pythonlib = None
1596 pythonlib = None
1579 if util.safehasattr(os, '__file__'):
1597 if util.safehasattr(os, '__file__'):
1580 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1598 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1581 elif getattr(sys, 'oxidized', False):
1599 elif getattr(sys, 'oxidized', False):
1582 pythonlib = pycompat.sysexecutable
1600 pythonlib = pycompat.sysexecutable
1583
1601
1584 fm.write(
1602 fm.write(
1585 b'pythonexe',
1603 b'pythonexe',
1586 _(b"checking Python executable (%s)\n"),
1604 _(b"checking Python executable (%s)\n"),
1587 pycompat.sysexecutable or _(b"unknown"),
1605 pycompat.sysexecutable or _(b"unknown"),
1588 )
1606 )
1589 fm.write(
1607 fm.write(
1590 b'pythonimplementation',
1608 b'pythonimplementation',
1591 _(b"checking Python implementation (%s)\n"),
1609 _(b"checking Python implementation (%s)\n"),
1592 pycompat.sysbytes(platform.python_implementation()),
1610 pycompat.sysbytes(platform.python_implementation()),
1593 )
1611 )
1594 fm.write(
1612 fm.write(
1595 b'pythonver',
1613 b'pythonver',
1596 _(b"checking Python version (%s)\n"),
1614 _(b"checking Python version (%s)\n"),
1597 (b"%d.%d.%d" % sys.version_info[:3]),
1615 (b"%d.%d.%d" % sys.version_info[:3]),
1598 )
1616 )
1599 fm.write(
1617 fm.write(
1600 b'pythonlib',
1618 b'pythonlib',
1601 _(b"checking Python lib (%s)...\n"),
1619 _(b"checking Python lib (%s)...\n"),
1602 pythonlib or _(b"unknown"),
1620 pythonlib or _(b"unknown"),
1603 )
1621 )
1604
1622
1605 try:
1623 try:
1606 from . import rustext
1624 from . import rustext
1607
1625
1608 rustext.__doc__ # trigger lazy import
1626 rustext.__doc__ # trigger lazy import
1609 except ImportError:
1627 except ImportError:
1610 rustext = None
1628 rustext = None
1611
1629
1612 security = set(sslutil.supportedprotocols)
1630 security = set(sslutil.supportedprotocols)
1613 if sslutil.hassni:
1631 if sslutil.hassni:
1614 security.add(b'sni')
1632 security.add(b'sni')
1615
1633
1616 fm.write(
1634 fm.write(
1617 b'pythonsecurity',
1635 b'pythonsecurity',
1618 _(b"checking Python security support (%s)\n"),
1636 _(b"checking Python security support (%s)\n"),
1619 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1637 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1620 )
1638 )
1621
1639
1622 # These are warnings, not errors. So don't increment problem count. This
1640 # These are warnings, not errors. So don't increment problem count. This
1623 # may change in the future.
1641 # may change in the future.
1624 if b'tls1.2' not in security:
1642 if b'tls1.2' not in security:
1625 fm.plain(
1643 fm.plain(
1626 _(
1644 _(
1627 b' TLS 1.2 not supported by Python install; '
1645 b' TLS 1.2 not supported by Python install; '
1628 b'network connections lack modern security\n'
1646 b'network connections lack modern security\n'
1629 )
1647 )
1630 )
1648 )
1631 if b'sni' not in security:
1649 if b'sni' not in security:
1632 fm.plain(
1650 fm.plain(
1633 _(
1651 _(
1634 b' SNI not supported by Python install; may have '
1652 b' SNI not supported by Python install; may have '
1635 b'connectivity issues with some servers\n'
1653 b'connectivity issues with some servers\n'
1636 )
1654 )
1637 )
1655 )
1638
1656
1639 fm.plain(
1657 fm.plain(
1640 _(
1658 _(
1641 b"checking Rust extensions (%s)\n"
1659 b"checking Rust extensions (%s)\n"
1642 % (b'missing' if rustext is None else b'installed')
1660 % (b'missing' if rustext is None else b'installed')
1643 ),
1661 ),
1644 )
1662 )
1645
1663
1646 # TODO print CA cert info
1664 # TODO print CA cert info
1647
1665
1648 # hg version
1666 # hg version
1649 hgver = util.version()
1667 hgver = util.version()
1650 fm.write(
1668 fm.write(
1651 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1669 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1652 )
1670 )
1653 fm.write(
1671 fm.write(
1654 b'hgverextra',
1672 b'hgverextra',
1655 _(b"checking Mercurial custom build (%s)\n"),
1673 _(b"checking Mercurial custom build (%s)\n"),
1656 b'+'.join(hgver.split(b'+')[1:]),
1674 b'+'.join(hgver.split(b'+')[1:]),
1657 )
1675 )
1658
1676
1659 # compiled modules
1677 # compiled modules
1660 hgmodules = None
1678 hgmodules = None
1661 if util.safehasattr(sys.modules[__name__], '__file__'):
1679 if util.safehasattr(sys.modules[__name__], '__file__'):
1662 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1680 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1663 elif getattr(sys, 'oxidized', False):
1681 elif getattr(sys, 'oxidized', False):
1664 hgmodules = pycompat.sysexecutable
1682 hgmodules = pycompat.sysexecutable
1665
1683
1666 fm.write(
1684 fm.write(
1667 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1685 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1668 )
1686 )
1669 fm.write(
1687 fm.write(
1670 b'hgmodules',
1688 b'hgmodules',
1671 _(b"checking installed modules (%s)...\n"),
1689 _(b"checking installed modules (%s)...\n"),
1672 hgmodules or _(b"unknown"),
1690 hgmodules or _(b"unknown"),
1673 )
1691 )
1674
1692
1675 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1693 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1676 rustext = rustandc # for now, that's the only case
1694 rustext = rustandc # for now, that's the only case
1677 cext = policy.policy in (b'c', b'allow') or rustandc
1695 cext = policy.policy in (b'c', b'allow') or rustandc
1678 nopure = cext or rustext
1696 nopure = cext or rustext
1679 if nopure:
1697 if nopure:
1680 err = None
1698 err = None
1681 try:
1699 try:
1682 if cext:
1700 if cext:
1683 from .cext import ( # pytype: disable=import-error
1701 from .cext import ( # pytype: disable=import-error
1684 base85,
1702 base85,
1685 bdiff,
1703 bdiff,
1686 mpatch,
1704 mpatch,
1687 osutil,
1705 osutil,
1688 )
1706 )
1689
1707
1690 # quiet pyflakes
1708 # quiet pyflakes
1691 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1709 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1692 if rustext:
1710 if rustext:
1693 from .rustext import ( # pytype: disable=import-error
1711 from .rustext import ( # pytype: disable=import-error
1694 ancestor,
1712 ancestor,
1695 dirstate,
1713 dirstate,
1696 )
1714 )
1697
1715
1698 dir(ancestor), dir(dirstate) # quiet pyflakes
1716 dir(ancestor), dir(dirstate) # quiet pyflakes
1699 except Exception as inst:
1717 except Exception as inst:
1700 err = stringutil.forcebytestr(inst)
1718 err = stringutil.forcebytestr(inst)
1701 problems += 1
1719 problems += 1
1702 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1720 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1703
1721
1704 compengines = util.compengines._engines.values()
1722 compengines = util.compengines._engines.values()
1705 fm.write(
1723 fm.write(
1706 b'compengines',
1724 b'compengines',
1707 _(b'checking registered compression engines (%s)\n'),
1725 _(b'checking registered compression engines (%s)\n'),
1708 fm.formatlist(
1726 fm.formatlist(
1709 sorted(e.name() for e in compengines),
1727 sorted(e.name() for e in compengines),
1710 name=b'compengine',
1728 name=b'compengine',
1711 fmt=b'%s',
1729 fmt=b'%s',
1712 sep=b', ',
1730 sep=b', ',
1713 ),
1731 ),
1714 )
1732 )
1715 fm.write(
1733 fm.write(
1716 b'compenginesavail',
1734 b'compenginesavail',
1717 _(b'checking available compression engines (%s)\n'),
1735 _(b'checking available compression engines (%s)\n'),
1718 fm.formatlist(
1736 fm.formatlist(
1719 sorted(e.name() for e in compengines if e.available()),
1737 sorted(e.name() for e in compengines if e.available()),
1720 name=b'compengine',
1738 name=b'compengine',
1721 fmt=b'%s',
1739 fmt=b'%s',
1722 sep=b', ',
1740 sep=b', ',
1723 ),
1741 ),
1724 )
1742 )
1725 wirecompengines = compression.compengines.supportedwireengines(
1743 wirecompengines = compression.compengines.supportedwireengines(
1726 compression.SERVERROLE
1744 compression.SERVERROLE
1727 )
1745 )
1728 fm.write(
1746 fm.write(
1729 b'compenginesserver',
1747 b'compenginesserver',
1730 _(
1748 _(
1731 b'checking available compression engines '
1749 b'checking available compression engines '
1732 b'for wire protocol (%s)\n'
1750 b'for wire protocol (%s)\n'
1733 ),
1751 ),
1734 fm.formatlist(
1752 fm.formatlist(
1735 [e.name() for e in wirecompengines if e.wireprotosupport()],
1753 [e.name() for e in wirecompengines if e.wireprotosupport()],
1736 name=b'compengine',
1754 name=b'compengine',
1737 fmt=b'%s',
1755 fmt=b'%s',
1738 sep=b', ',
1756 sep=b', ',
1739 ),
1757 ),
1740 )
1758 )
1741 re2 = b'missing'
1759 re2 = b'missing'
1742 if util._re2:
1760 if util._re2:
1743 re2 = b'available'
1761 re2 = b'available'
1744 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1762 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1745 fm.data(re2=bool(util._re2))
1763 fm.data(re2=bool(util._re2))
1746
1764
1747 # templates
1765 # templates
1748 p = templater.templatedir()
1766 p = templater.templatedir()
1749 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1767 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1750 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1768 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1751 if p:
1769 if p:
1752 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1770 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1753 if m:
1771 if m:
1754 # template found, check if it is working
1772 # template found, check if it is working
1755 err = None
1773 err = None
1756 try:
1774 try:
1757 templater.templater.frommapfile(m)
1775 templater.templater.frommapfile(m)
1758 except Exception as inst:
1776 except Exception as inst:
1759 err = stringutil.forcebytestr(inst)
1777 err = stringutil.forcebytestr(inst)
1760 p = None
1778 p = None
1761 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1779 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1762 else:
1780 else:
1763 p = None
1781 p = None
1764 fm.condwrite(
1782 fm.condwrite(
1765 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1783 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1766 )
1784 )
1767 fm.condwrite(
1785 fm.condwrite(
1768 not m,
1786 not m,
1769 b'defaulttemplatenotfound',
1787 b'defaulttemplatenotfound',
1770 _(b" template '%s' not found\n"),
1788 _(b" template '%s' not found\n"),
1771 b"default",
1789 b"default",
1772 )
1790 )
1773 if not p:
1791 if not p:
1774 problems += 1
1792 problems += 1
1775 fm.condwrite(
1793 fm.condwrite(
1776 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1794 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1777 )
1795 )
1778
1796
1779 # editor
1797 # editor
1780 editor = ui.geteditor()
1798 editor = ui.geteditor()
1781 editor = util.expandpath(editor)
1799 editor = util.expandpath(editor)
1782 editorbin = procutil.shellsplit(editor)[0]
1800 editorbin = procutil.shellsplit(editor)[0]
1783 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1801 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1784 cmdpath = procutil.findexe(editorbin)
1802 cmdpath = procutil.findexe(editorbin)
1785 fm.condwrite(
1803 fm.condwrite(
1786 not cmdpath and editor == b'vi',
1804 not cmdpath and editor == b'vi',
1787 b'vinotfound',
1805 b'vinotfound',
1788 _(
1806 _(
1789 b" No commit editor set and can't find %s in PATH\n"
1807 b" No commit editor set and can't find %s in PATH\n"
1790 b" (specify a commit editor in your configuration"
1808 b" (specify a commit editor in your configuration"
1791 b" file)\n"
1809 b" file)\n"
1792 ),
1810 ),
1793 not cmdpath and editor == b'vi' and editorbin,
1811 not cmdpath and editor == b'vi' and editorbin,
1794 )
1812 )
1795 fm.condwrite(
1813 fm.condwrite(
1796 not cmdpath and editor != b'vi',
1814 not cmdpath and editor != b'vi',
1797 b'editornotfound',
1815 b'editornotfound',
1798 _(
1816 _(
1799 b" Can't find editor '%s' in PATH\n"
1817 b" Can't find editor '%s' in PATH\n"
1800 b" (specify a commit editor in your configuration"
1818 b" (specify a commit editor in your configuration"
1801 b" file)\n"
1819 b" file)\n"
1802 ),
1820 ),
1803 not cmdpath and editorbin,
1821 not cmdpath and editorbin,
1804 )
1822 )
1805 if not cmdpath and editor != b'vi':
1823 if not cmdpath and editor != b'vi':
1806 problems += 1
1824 problems += 1
1807
1825
1808 # check username
1826 # check username
1809 username = None
1827 username = None
1810 err = None
1828 err = None
1811 try:
1829 try:
1812 username = ui.username()
1830 username = ui.username()
1813 except error.Abort as e:
1831 except error.Abort as e:
1814 err = e.message
1832 err = e.message
1815 problems += 1
1833 problems += 1
1816
1834
1817 fm.condwrite(
1835 fm.condwrite(
1818 username, b'username', _(b"checking username (%s)\n"), username
1836 username, b'username', _(b"checking username (%s)\n"), username
1819 )
1837 )
1820 fm.condwrite(
1838 fm.condwrite(
1821 err,
1839 err,
1822 b'usernameerror',
1840 b'usernameerror',
1823 _(
1841 _(
1824 b"checking username...\n %s\n"
1842 b"checking username...\n %s\n"
1825 b" (specify a username in your configuration file)\n"
1843 b" (specify a username in your configuration file)\n"
1826 ),
1844 ),
1827 err,
1845 err,
1828 )
1846 )
1829
1847
1830 for name, mod in extensions.extensions():
1848 for name, mod in extensions.extensions():
1831 handler = getattr(mod, 'debuginstall', None)
1849 handler = getattr(mod, 'debuginstall', None)
1832 if handler is not None:
1850 if handler is not None:
1833 problems += handler(ui, fm)
1851 problems += handler(ui, fm)
1834
1852
1835 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1853 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1836 if not problems:
1854 if not problems:
1837 fm.data(problems=problems)
1855 fm.data(problems=problems)
1838 fm.condwrite(
1856 fm.condwrite(
1839 problems,
1857 problems,
1840 b'problems',
1858 b'problems',
1841 _(b"%d problems detected, please check your install!\n"),
1859 _(b"%d problems detected, please check your install!\n"),
1842 problems,
1860 problems,
1843 )
1861 )
1844 fm.end()
1862 fm.end()
1845
1863
1846 return problems
1864 return problems
1847
1865
1848
1866
1849 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1867 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1850 def debugknown(ui, repopath, *ids, **opts):
1868 def debugknown(ui, repopath, *ids, **opts):
1851 """test whether node ids are known to a repo
1869 """test whether node ids are known to a repo
1852
1870
1853 Every ID must be a full-length hex node id string. Returns a list of 0s
1871 Every ID must be a full-length hex node id string. Returns a list of 0s
1854 and 1s indicating unknown/known.
1872 and 1s indicating unknown/known.
1855 """
1873 """
1856 opts = pycompat.byteskwargs(opts)
1874 opts = pycompat.byteskwargs(opts)
1857 repo = hg.peer(ui, opts, repopath)
1875 repo = hg.peer(ui, opts, repopath)
1858 if not repo.capable(b'known'):
1876 if not repo.capable(b'known'):
1859 raise error.Abort(b"known() not supported by target repository")
1877 raise error.Abort(b"known() not supported by target repository")
1860 flags = repo.known([bin(s) for s in ids])
1878 flags = repo.known([bin(s) for s in ids])
1861 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1879 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1862
1880
1863
1881
1864 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1882 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1865 def debuglabelcomplete(ui, repo, *args):
1883 def debuglabelcomplete(ui, repo, *args):
1866 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1884 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1867 debugnamecomplete(ui, repo, *args)
1885 debugnamecomplete(ui, repo, *args)
1868
1886
1869
1887
1870 @command(
1888 @command(
1871 b'debuglocks',
1889 b'debuglocks',
1872 [
1890 [
1873 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1891 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1874 (
1892 (
1875 b'W',
1893 b'W',
1876 b'force-wlock',
1894 b'force-wlock',
1877 None,
1895 None,
1878 _(b'free the working state lock (DANGEROUS)'),
1896 _(b'free the working state lock (DANGEROUS)'),
1879 ),
1897 ),
1880 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1898 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1881 (
1899 (
1882 b'S',
1900 b'S',
1883 b'set-wlock',
1901 b'set-wlock',
1884 None,
1902 None,
1885 _(b'set the working state lock until stopped'),
1903 _(b'set the working state lock until stopped'),
1886 ),
1904 ),
1887 ],
1905 ],
1888 _(b'[OPTION]...'),
1906 _(b'[OPTION]...'),
1889 )
1907 )
1890 def debuglocks(ui, repo, **opts):
1908 def debuglocks(ui, repo, **opts):
1891 """show or modify state of locks
1909 """show or modify state of locks
1892
1910
1893 By default, this command will show which locks are held. This
1911 By default, this command will show which locks are held. This
1894 includes the user and process holding the lock, the amount of time
1912 includes the user and process holding the lock, the amount of time
1895 the lock has been held, and the machine name where the process is
1913 the lock has been held, and the machine name where the process is
1896 running if it's not local.
1914 running if it's not local.
1897
1915
1898 Locks protect the integrity of Mercurial's data, so should be
1916 Locks protect the integrity of Mercurial's data, so should be
1899 treated with care. System crashes or other interruptions may cause
1917 treated with care. System crashes or other interruptions may cause
1900 locks to not be properly released, though Mercurial will usually
1918 locks to not be properly released, though Mercurial will usually
1901 detect and remove such stale locks automatically.
1919 detect and remove such stale locks automatically.
1902
1920
1903 However, detecting stale locks may not always be possible (for
1921 However, detecting stale locks may not always be possible (for
1904 instance, on a shared filesystem). Removing locks may also be
1922 instance, on a shared filesystem). Removing locks may also be
1905 blocked by filesystem permissions.
1923 blocked by filesystem permissions.
1906
1924
1907 Setting a lock will prevent other commands from changing the data.
1925 Setting a lock will prevent other commands from changing the data.
1908 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1926 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1909 The set locks are removed when the command exits.
1927 The set locks are removed when the command exits.
1910
1928
1911 Returns 0 if no locks are held.
1929 Returns 0 if no locks are held.
1912
1930
1913 """
1931 """
1914
1932
1915 if opts.get('force_lock'):
1933 if opts.get('force_lock'):
1916 repo.svfs.unlink(b'lock')
1934 repo.svfs.unlink(b'lock')
1917 if opts.get('force_wlock'):
1935 if opts.get('force_wlock'):
1918 repo.vfs.unlink(b'wlock')
1936 repo.vfs.unlink(b'wlock')
1919 if opts.get('force_lock') or opts.get('force_wlock'):
1937 if opts.get('force_lock') or opts.get('force_wlock'):
1920 return 0
1938 return 0
1921
1939
1922 locks = []
1940 locks = []
1923 try:
1941 try:
1924 if opts.get('set_wlock'):
1942 if opts.get('set_wlock'):
1925 try:
1943 try:
1926 locks.append(repo.wlock(False))
1944 locks.append(repo.wlock(False))
1927 except error.LockHeld:
1945 except error.LockHeld:
1928 raise error.Abort(_(b'wlock is already held'))
1946 raise error.Abort(_(b'wlock is already held'))
1929 if opts.get('set_lock'):
1947 if opts.get('set_lock'):
1930 try:
1948 try:
1931 locks.append(repo.lock(False))
1949 locks.append(repo.lock(False))
1932 except error.LockHeld:
1950 except error.LockHeld:
1933 raise error.Abort(_(b'lock is already held'))
1951 raise error.Abort(_(b'lock is already held'))
1934 if len(locks):
1952 if len(locks):
1935 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1953 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1936 return 0
1954 return 0
1937 finally:
1955 finally:
1938 release(*locks)
1956 release(*locks)
1939
1957
1940 now = time.time()
1958 now = time.time()
1941 held = 0
1959 held = 0
1942
1960
1943 def report(vfs, name, method):
1961 def report(vfs, name, method):
1944 # this causes stale locks to get reaped for more accurate reporting
1962 # this causes stale locks to get reaped for more accurate reporting
1945 try:
1963 try:
1946 l = method(False)
1964 l = method(False)
1947 except error.LockHeld:
1965 except error.LockHeld:
1948 l = None
1966 l = None
1949
1967
1950 if l:
1968 if l:
1951 l.release()
1969 l.release()
1952 else:
1970 else:
1953 try:
1971 try:
1954 st = vfs.lstat(name)
1972 st = vfs.lstat(name)
1955 age = now - st[stat.ST_MTIME]
1973 age = now - st[stat.ST_MTIME]
1956 user = util.username(st.st_uid)
1974 user = util.username(st.st_uid)
1957 locker = vfs.readlock(name)
1975 locker = vfs.readlock(name)
1958 if b":" in locker:
1976 if b":" in locker:
1959 host, pid = locker.split(b':')
1977 host, pid = locker.split(b':')
1960 if host == socket.gethostname():
1978 if host == socket.gethostname():
1961 locker = b'user %s, process %s' % (user or b'None', pid)
1979 locker = b'user %s, process %s' % (user or b'None', pid)
1962 else:
1980 else:
1963 locker = b'user %s, process %s, host %s' % (
1981 locker = b'user %s, process %s, host %s' % (
1964 user or b'None',
1982 user or b'None',
1965 pid,
1983 pid,
1966 host,
1984 host,
1967 )
1985 )
1968 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1986 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1969 return 1
1987 return 1
1970 except OSError as e:
1988 except OSError as e:
1971 if e.errno != errno.ENOENT:
1989 if e.errno != errno.ENOENT:
1972 raise
1990 raise
1973
1991
1974 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1992 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1975 return 0
1993 return 0
1976
1994
1977 held += report(repo.svfs, b"lock", repo.lock)
1995 held += report(repo.svfs, b"lock", repo.lock)
1978 held += report(repo.vfs, b"wlock", repo.wlock)
1996 held += report(repo.vfs, b"wlock", repo.wlock)
1979
1997
1980 return held
1998 return held
1981
1999
1982
2000
1983 @command(
2001 @command(
1984 b'debugmanifestfulltextcache',
2002 b'debugmanifestfulltextcache',
1985 [
2003 [
1986 (b'', b'clear', False, _(b'clear the cache')),
2004 (b'', b'clear', False, _(b'clear the cache')),
1987 (
2005 (
1988 b'a',
2006 b'a',
1989 b'add',
2007 b'add',
1990 [],
2008 [],
1991 _(b'add the given manifest nodes to the cache'),
2009 _(b'add the given manifest nodes to the cache'),
1992 _(b'NODE'),
2010 _(b'NODE'),
1993 ),
2011 ),
1994 ],
2012 ],
1995 b'',
2013 b'',
1996 )
2014 )
1997 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2015 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1998 """show, clear or amend the contents of the manifest fulltext cache"""
2016 """show, clear or amend the contents of the manifest fulltext cache"""
1999
2017
2000 def getcache():
2018 def getcache():
2001 r = repo.manifestlog.getstorage(b'')
2019 r = repo.manifestlog.getstorage(b'')
2002 try:
2020 try:
2003 return r._fulltextcache
2021 return r._fulltextcache
2004 except AttributeError:
2022 except AttributeError:
2005 msg = _(
2023 msg = _(
2006 b"Current revlog implementation doesn't appear to have a "
2024 b"Current revlog implementation doesn't appear to have a "
2007 b"manifest fulltext cache\n"
2025 b"manifest fulltext cache\n"
2008 )
2026 )
2009 raise error.Abort(msg)
2027 raise error.Abort(msg)
2010
2028
2011 if opts.get('clear'):
2029 if opts.get('clear'):
2012 with repo.wlock():
2030 with repo.wlock():
2013 cache = getcache()
2031 cache = getcache()
2014 cache.clear(clear_persisted_data=True)
2032 cache.clear(clear_persisted_data=True)
2015 return
2033 return
2016
2034
2017 if add:
2035 if add:
2018 with repo.wlock():
2036 with repo.wlock():
2019 m = repo.manifestlog
2037 m = repo.manifestlog
2020 store = m.getstorage(b'')
2038 store = m.getstorage(b'')
2021 for n in add:
2039 for n in add:
2022 try:
2040 try:
2023 manifest = m[store.lookup(n)]
2041 manifest = m[store.lookup(n)]
2024 except error.LookupError as e:
2042 except error.LookupError as e:
2025 raise error.Abort(e, hint=b"Check your manifest node id")
2043 raise error.Abort(e, hint=b"Check your manifest node id")
2026 manifest.read() # stores revisision in cache too
2044 manifest.read() # stores revisision in cache too
2027 return
2045 return
2028
2046
2029 cache = getcache()
2047 cache = getcache()
2030 if not len(cache):
2048 if not len(cache):
2031 ui.write(_(b'cache empty\n'))
2049 ui.write(_(b'cache empty\n'))
2032 else:
2050 else:
2033 ui.write(
2051 ui.write(
2034 _(
2052 _(
2035 b'cache contains %d manifest entries, in order of most to '
2053 b'cache contains %d manifest entries, in order of most to '
2036 b'least recent:\n'
2054 b'least recent:\n'
2037 )
2055 )
2038 % (len(cache),)
2056 % (len(cache),)
2039 )
2057 )
2040 totalsize = 0
2058 totalsize = 0
2041 for nodeid in cache:
2059 for nodeid in cache:
2042 # Use cache.get to not update the LRU order
2060 # Use cache.get to not update the LRU order
2043 data = cache.peek(nodeid)
2061 data = cache.peek(nodeid)
2044 size = len(data)
2062 size = len(data)
2045 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2063 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2046 ui.write(
2064 ui.write(
2047 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2065 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2048 )
2066 )
2049 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2067 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2050 ui.write(
2068 ui.write(
2051 _(b'total cache data size %s, on-disk %s\n')
2069 _(b'total cache data size %s, on-disk %s\n')
2052 % (util.bytecount(totalsize), util.bytecount(ondisk))
2070 % (util.bytecount(totalsize), util.bytecount(ondisk))
2053 )
2071 )
2054
2072
2055
2073
2056 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2074 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2057 def debugmergestate(ui, repo, *args, **opts):
2075 def debugmergestate(ui, repo, *args, **opts):
2058 """print merge state
2076 """print merge state
2059
2077
2060 Use --verbose to print out information about whether v1 or v2 merge state
2078 Use --verbose to print out information about whether v1 or v2 merge state
2061 was chosen."""
2079 was chosen."""
2062
2080
2063 if ui.verbose:
2081 if ui.verbose:
2064 ms = mergestatemod.mergestate(repo)
2082 ms = mergestatemod.mergestate(repo)
2065
2083
2066 # sort so that reasonable information is on top
2084 # sort so that reasonable information is on top
2067 v1records = ms._readrecordsv1()
2085 v1records = ms._readrecordsv1()
2068 v2records = ms._readrecordsv2()
2086 v2records = ms._readrecordsv2()
2069
2087
2070 if not v1records and not v2records:
2088 if not v1records and not v2records:
2071 pass
2089 pass
2072 elif not v2records:
2090 elif not v2records:
2073 ui.writenoi18n(b'no version 2 merge state\n')
2091 ui.writenoi18n(b'no version 2 merge state\n')
2074 elif ms._v1v2match(v1records, v2records):
2092 elif ms._v1v2match(v1records, v2records):
2075 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2093 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2076 else:
2094 else:
2077 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2095 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2078
2096
2079 opts = pycompat.byteskwargs(opts)
2097 opts = pycompat.byteskwargs(opts)
2080 if not opts[b'template']:
2098 if not opts[b'template']:
2081 opts[b'template'] = (
2099 opts[b'template'] = (
2082 b'{if(commits, "", "no merge state found\n")}'
2100 b'{if(commits, "", "no merge state found\n")}'
2083 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2101 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2084 b'{files % "file: {path} (state \\"{state}\\")\n'
2102 b'{files % "file: {path} (state \\"{state}\\")\n'
2085 b'{if(local_path, "'
2103 b'{if(local_path, "'
2086 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2104 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2087 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2105 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2088 b' other path: {other_path} (node {other_node})\n'
2106 b' other path: {other_path} (node {other_node})\n'
2089 b'")}'
2107 b'")}'
2090 b'{if(rename_side, "'
2108 b'{if(rename_side, "'
2091 b' rename side: {rename_side}\n'
2109 b' rename side: {rename_side}\n'
2092 b' renamed path: {renamed_path}\n'
2110 b' renamed path: {renamed_path}\n'
2093 b'")}'
2111 b'")}'
2094 b'{extras % " extra: {key} = {value}\n"}'
2112 b'{extras % " extra: {key} = {value}\n"}'
2095 b'"}'
2113 b'"}'
2096 b'{extras % "extra: {file} ({key} = {value})\n"}'
2114 b'{extras % "extra: {file} ({key} = {value})\n"}'
2097 )
2115 )
2098
2116
2099 ms = mergestatemod.mergestate.read(repo)
2117 ms = mergestatemod.mergestate.read(repo)
2100
2118
2101 fm = ui.formatter(b'debugmergestate', opts)
2119 fm = ui.formatter(b'debugmergestate', opts)
2102 fm.startitem()
2120 fm.startitem()
2103
2121
2104 fm_commits = fm.nested(b'commits')
2122 fm_commits = fm.nested(b'commits')
2105 if ms.active():
2123 if ms.active():
2106 for name, node, label_index in (
2124 for name, node, label_index in (
2107 (b'local', ms.local, 0),
2125 (b'local', ms.local, 0),
2108 (b'other', ms.other, 1),
2126 (b'other', ms.other, 1),
2109 ):
2127 ):
2110 fm_commits.startitem()
2128 fm_commits.startitem()
2111 fm_commits.data(name=name)
2129 fm_commits.data(name=name)
2112 fm_commits.data(node=hex(node))
2130 fm_commits.data(node=hex(node))
2113 if ms._labels and len(ms._labels) > label_index:
2131 if ms._labels and len(ms._labels) > label_index:
2114 fm_commits.data(label=ms._labels[label_index])
2132 fm_commits.data(label=ms._labels[label_index])
2115 fm_commits.end()
2133 fm_commits.end()
2116
2134
2117 fm_files = fm.nested(b'files')
2135 fm_files = fm.nested(b'files')
2118 if ms.active():
2136 if ms.active():
2119 for f in ms:
2137 for f in ms:
2120 fm_files.startitem()
2138 fm_files.startitem()
2121 fm_files.data(path=f)
2139 fm_files.data(path=f)
2122 state = ms._state[f]
2140 state = ms._state[f]
2123 fm_files.data(state=state[0])
2141 fm_files.data(state=state[0])
2124 if state[0] in (
2142 if state[0] in (
2125 mergestatemod.MERGE_RECORD_UNRESOLVED,
2143 mergestatemod.MERGE_RECORD_UNRESOLVED,
2126 mergestatemod.MERGE_RECORD_RESOLVED,
2144 mergestatemod.MERGE_RECORD_RESOLVED,
2127 ):
2145 ):
2128 fm_files.data(local_key=state[1])
2146 fm_files.data(local_key=state[1])
2129 fm_files.data(local_path=state[2])
2147 fm_files.data(local_path=state[2])
2130 fm_files.data(ancestor_path=state[3])
2148 fm_files.data(ancestor_path=state[3])
2131 fm_files.data(ancestor_node=state[4])
2149 fm_files.data(ancestor_node=state[4])
2132 fm_files.data(other_path=state[5])
2150 fm_files.data(other_path=state[5])
2133 fm_files.data(other_node=state[6])
2151 fm_files.data(other_node=state[6])
2134 fm_files.data(local_flags=state[7])
2152 fm_files.data(local_flags=state[7])
2135 elif state[0] in (
2153 elif state[0] in (
2136 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2154 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2137 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2155 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2138 ):
2156 ):
2139 fm_files.data(renamed_path=state[1])
2157 fm_files.data(renamed_path=state[1])
2140 fm_files.data(rename_side=state[2])
2158 fm_files.data(rename_side=state[2])
2141 fm_extras = fm_files.nested(b'extras')
2159 fm_extras = fm_files.nested(b'extras')
2142 for k, v in sorted(ms.extras(f).items()):
2160 for k, v in sorted(ms.extras(f).items()):
2143 fm_extras.startitem()
2161 fm_extras.startitem()
2144 fm_extras.data(key=k)
2162 fm_extras.data(key=k)
2145 fm_extras.data(value=v)
2163 fm_extras.data(value=v)
2146 fm_extras.end()
2164 fm_extras.end()
2147
2165
2148 fm_files.end()
2166 fm_files.end()
2149
2167
2150 fm_extras = fm.nested(b'extras')
2168 fm_extras = fm.nested(b'extras')
2151 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2169 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2152 if f in ms:
2170 if f in ms:
2153 # If file is in mergestate, we have already processed it's extras
2171 # If file is in mergestate, we have already processed it's extras
2154 continue
2172 continue
2155 for k, v in pycompat.iteritems(d):
2173 for k, v in pycompat.iteritems(d):
2156 fm_extras.startitem()
2174 fm_extras.startitem()
2157 fm_extras.data(file=f)
2175 fm_extras.data(file=f)
2158 fm_extras.data(key=k)
2176 fm_extras.data(key=k)
2159 fm_extras.data(value=v)
2177 fm_extras.data(value=v)
2160 fm_extras.end()
2178 fm_extras.end()
2161
2179
2162 fm.end()
2180 fm.end()
2163
2181
2164
2182
2165 @command(b'debugnamecomplete', [], _(b'NAME...'))
2183 @command(b'debugnamecomplete', [], _(b'NAME...'))
2166 def debugnamecomplete(ui, repo, *args):
2184 def debugnamecomplete(ui, repo, *args):
2167 '''complete "names" - tags, open branch names, bookmark names'''
2185 '''complete "names" - tags, open branch names, bookmark names'''
2168
2186
2169 names = set()
2187 names = set()
2170 # since we previously only listed open branches, we will handle that
2188 # since we previously only listed open branches, we will handle that
2171 # specially (after this for loop)
2189 # specially (after this for loop)
2172 for name, ns in pycompat.iteritems(repo.names):
2190 for name, ns in pycompat.iteritems(repo.names):
2173 if name != b'branches':
2191 if name != b'branches':
2174 names.update(ns.listnames(repo))
2192 names.update(ns.listnames(repo))
2175 names.update(
2193 names.update(
2176 tag
2194 tag
2177 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2195 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2178 if not closed
2196 if not closed
2179 )
2197 )
2180 completions = set()
2198 completions = set()
2181 if not args:
2199 if not args:
2182 args = [b'']
2200 args = [b'']
2183 for a in args:
2201 for a in args:
2184 completions.update(n for n in names if n.startswith(a))
2202 completions.update(n for n in names if n.startswith(a))
2185 ui.write(b'\n'.join(sorted(completions)))
2203 ui.write(b'\n'.join(sorted(completions)))
2186 ui.write(b'\n')
2204 ui.write(b'\n')
2187
2205
2188
2206
2189 @command(
2207 @command(
2190 b'debugnodemap',
2208 b'debugnodemap',
2191 [
2209 [
2192 (
2210 (
2193 b'',
2211 b'',
2194 b'dump-new',
2212 b'dump-new',
2195 False,
2213 False,
2196 _(b'write a (new) persistent binary nodemap on stdin'),
2214 _(b'write a (new) persistent binary nodemap on stdin'),
2197 ),
2215 ),
2198 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2216 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2199 (
2217 (
2200 b'',
2218 b'',
2201 b'check',
2219 b'check',
2202 False,
2220 False,
2203 _(b'check that the data on disk data are correct.'),
2221 _(b'check that the data on disk data are correct.'),
2204 ),
2222 ),
2205 (
2223 (
2206 b'',
2224 b'',
2207 b'metadata',
2225 b'metadata',
2208 False,
2226 False,
2209 _(b'display the on disk meta data for the nodemap'),
2227 _(b'display the on disk meta data for the nodemap'),
2210 ),
2228 ),
2211 ],
2229 ],
2212 )
2230 )
2213 def debugnodemap(ui, repo, **opts):
2231 def debugnodemap(ui, repo, **opts):
2214 """write and inspect on disk nodemap"""
2232 """write and inspect on disk nodemap"""
2215 if opts['dump_new']:
2233 if opts['dump_new']:
2216 unfi = repo.unfiltered()
2234 unfi = repo.unfiltered()
2217 cl = unfi.changelog
2235 cl = unfi.changelog
2218 if util.safehasattr(cl.index, "nodemap_data_all"):
2236 if util.safehasattr(cl.index, "nodemap_data_all"):
2219 data = cl.index.nodemap_data_all()
2237 data = cl.index.nodemap_data_all()
2220 else:
2238 else:
2221 data = nodemap.persistent_data(cl.index)
2239 data = nodemap.persistent_data(cl.index)
2222 ui.write(data)
2240 ui.write(data)
2223 elif opts['dump_disk']:
2241 elif opts['dump_disk']:
2224 unfi = repo.unfiltered()
2242 unfi = repo.unfiltered()
2225 cl = unfi.changelog
2243 cl = unfi.changelog
2226 nm_data = nodemap.persisted_data(cl)
2244 nm_data = nodemap.persisted_data(cl)
2227 if nm_data is not None:
2245 if nm_data is not None:
2228 docket, data = nm_data
2246 docket, data = nm_data
2229 ui.write(data[:])
2247 ui.write(data[:])
2230 elif opts['check']:
2248 elif opts['check']:
2231 unfi = repo.unfiltered()
2249 unfi = repo.unfiltered()
2232 cl = unfi.changelog
2250 cl = unfi.changelog
2233 nm_data = nodemap.persisted_data(cl)
2251 nm_data = nodemap.persisted_data(cl)
2234 if nm_data is not None:
2252 if nm_data is not None:
2235 docket, data = nm_data
2253 docket, data = nm_data
2236 return nodemap.check_data(ui, cl.index, data)
2254 return nodemap.check_data(ui, cl.index, data)
2237 elif opts['metadata']:
2255 elif opts['metadata']:
2238 unfi = repo.unfiltered()
2256 unfi = repo.unfiltered()
2239 cl = unfi.changelog
2257 cl = unfi.changelog
2240 nm_data = nodemap.persisted_data(cl)
2258 nm_data = nodemap.persisted_data(cl)
2241 if nm_data is not None:
2259 if nm_data is not None:
2242 docket, data = nm_data
2260 docket, data = nm_data
2243 ui.write((b"uid: %s\n") % docket.uid)
2261 ui.write((b"uid: %s\n") % docket.uid)
2244 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2262 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2245 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2263 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2246 ui.write((b"data-length: %d\n") % docket.data_length)
2264 ui.write((b"data-length: %d\n") % docket.data_length)
2247 ui.write((b"data-unused: %d\n") % docket.data_unused)
2265 ui.write((b"data-unused: %d\n") % docket.data_unused)
2248 unused_perc = docket.data_unused * 100.0 / docket.data_length
2266 unused_perc = docket.data_unused * 100.0 / docket.data_length
2249 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2267 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2250
2268
2251
2269
2252 @command(
2270 @command(
2253 b'debugobsolete',
2271 b'debugobsolete',
2254 [
2272 [
2255 (b'', b'flags', 0, _(b'markers flag')),
2273 (b'', b'flags', 0, _(b'markers flag')),
2256 (
2274 (
2257 b'',
2275 b'',
2258 b'record-parents',
2276 b'record-parents',
2259 False,
2277 False,
2260 _(b'record parent information for the precursor'),
2278 _(b'record parent information for the precursor'),
2261 ),
2279 ),
2262 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2280 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2263 (
2281 (
2264 b'',
2282 b'',
2265 b'exclusive',
2283 b'exclusive',
2266 False,
2284 False,
2267 _(b'restrict display to markers only relevant to REV'),
2285 _(b'restrict display to markers only relevant to REV'),
2268 ),
2286 ),
2269 (b'', b'index', False, _(b'display index of the marker')),
2287 (b'', b'index', False, _(b'display index of the marker')),
2270 (b'', b'delete', [], _(b'delete markers specified by indices')),
2288 (b'', b'delete', [], _(b'delete markers specified by indices')),
2271 ]
2289 ]
2272 + cmdutil.commitopts2
2290 + cmdutil.commitopts2
2273 + cmdutil.formatteropts,
2291 + cmdutil.formatteropts,
2274 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2292 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2275 )
2293 )
2276 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2294 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2277 """create arbitrary obsolete marker
2295 """create arbitrary obsolete marker
2278
2296
2279 With no arguments, displays the list of obsolescence markers."""
2297 With no arguments, displays the list of obsolescence markers."""
2280
2298
2281 opts = pycompat.byteskwargs(opts)
2299 opts = pycompat.byteskwargs(opts)
2282
2300
2283 def parsenodeid(s):
2301 def parsenodeid(s):
2284 try:
2302 try:
2285 # We do not use revsingle/revrange functions here to accept
2303 # We do not use revsingle/revrange functions here to accept
2286 # arbitrary node identifiers, possibly not present in the
2304 # arbitrary node identifiers, possibly not present in the
2287 # local repository.
2305 # local repository.
2288 n = bin(s)
2306 n = bin(s)
2289 if len(n) != len(nullid):
2307 if len(n) != len(nullid):
2290 raise TypeError()
2308 raise TypeError()
2291 return n
2309 return n
2292 except TypeError:
2310 except TypeError:
2293 raise error.InputError(
2311 raise error.InputError(
2294 b'changeset references must be full hexadecimal '
2312 b'changeset references must be full hexadecimal '
2295 b'node identifiers'
2313 b'node identifiers'
2296 )
2314 )
2297
2315
2298 if opts.get(b'delete'):
2316 if opts.get(b'delete'):
2299 indices = []
2317 indices = []
2300 for v in opts.get(b'delete'):
2318 for v in opts.get(b'delete'):
2301 try:
2319 try:
2302 indices.append(int(v))
2320 indices.append(int(v))
2303 except ValueError:
2321 except ValueError:
2304 raise error.InputError(
2322 raise error.InputError(
2305 _(b'invalid index value: %r') % v,
2323 _(b'invalid index value: %r') % v,
2306 hint=_(b'use integers for indices'),
2324 hint=_(b'use integers for indices'),
2307 )
2325 )
2308
2326
2309 if repo.currenttransaction():
2327 if repo.currenttransaction():
2310 raise error.Abort(
2328 raise error.Abort(
2311 _(b'cannot delete obsmarkers in the middle of transaction.')
2329 _(b'cannot delete obsmarkers in the middle of transaction.')
2312 )
2330 )
2313
2331
2314 with repo.lock():
2332 with repo.lock():
2315 n = repair.deleteobsmarkers(repo.obsstore, indices)
2333 n = repair.deleteobsmarkers(repo.obsstore, indices)
2316 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2334 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2317
2335
2318 return
2336 return
2319
2337
2320 if precursor is not None:
2338 if precursor is not None:
2321 if opts[b'rev']:
2339 if opts[b'rev']:
2322 raise error.InputError(
2340 raise error.InputError(
2323 b'cannot select revision when creating marker'
2341 b'cannot select revision when creating marker'
2324 )
2342 )
2325 metadata = {}
2343 metadata = {}
2326 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2344 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2327 succs = tuple(parsenodeid(succ) for succ in successors)
2345 succs = tuple(parsenodeid(succ) for succ in successors)
2328 l = repo.lock()
2346 l = repo.lock()
2329 try:
2347 try:
2330 tr = repo.transaction(b'debugobsolete')
2348 tr = repo.transaction(b'debugobsolete')
2331 try:
2349 try:
2332 date = opts.get(b'date')
2350 date = opts.get(b'date')
2333 if date:
2351 if date:
2334 date = dateutil.parsedate(date)
2352 date = dateutil.parsedate(date)
2335 else:
2353 else:
2336 date = None
2354 date = None
2337 prec = parsenodeid(precursor)
2355 prec = parsenodeid(precursor)
2338 parents = None
2356 parents = None
2339 if opts[b'record_parents']:
2357 if opts[b'record_parents']:
2340 if prec not in repo.unfiltered():
2358 if prec not in repo.unfiltered():
2341 raise error.Abort(
2359 raise error.Abort(
2342 b'cannot used --record-parents on '
2360 b'cannot used --record-parents on '
2343 b'unknown changesets'
2361 b'unknown changesets'
2344 )
2362 )
2345 parents = repo.unfiltered()[prec].parents()
2363 parents = repo.unfiltered()[prec].parents()
2346 parents = tuple(p.node() for p in parents)
2364 parents = tuple(p.node() for p in parents)
2347 repo.obsstore.create(
2365 repo.obsstore.create(
2348 tr,
2366 tr,
2349 prec,
2367 prec,
2350 succs,
2368 succs,
2351 opts[b'flags'],
2369 opts[b'flags'],
2352 parents=parents,
2370 parents=parents,
2353 date=date,
2371 date=date,
2354 metadata=metadata,
2372 metadata=metadata,
2355 ui=ui,
2373 ui=ui,
2356 )
2374 )
2357 tr.close()
2375 tr.close()
2358 except ValueError as exc:
2376 except ValueError as exc:
2359 raise error.Abort(
2377 raise error.Abort(
2360 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2378 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2361 )
2379 )
2362 finally:
2380 finally:
2363 tr.release()
2381 tr.release()
2364 finally:
2382 finally:
2365 l.release()
2383 l.release()
2366 else:
2384 else:
2367 if opts[b'rev']:
2385 if opts[b'rev']:
2368 revs = scmutil.revrange(repo, opts[b'rev'])
2386 revs = scmutil.revrange(repo, opts[b'rev'])
2369 nodes = [repo[r].node() for r in revs]
2387 nodes = [repo[r].node() for r in revs]
2370 markers = list(
2388 markers = list(
2371 obsutil.getmarkers(
2389 obsutil.getmarkers(
2372 repo, nodes=nodes, exclusive=opts[b'exclusive']
2390 repo, nodes=nodes, exclusive=opts[b'exclusive']
2373 )
2391 )
2374 )
2392 )
2375 markers.sort(key=lambda x: x._data)
2393 markers.sort(key=lambda x: x._data)
2376 else:
2394 else:
2377 markers = obsutil.getmarkers(repo)
2395 markers = obsutil.getmarkers(repo)
2378
2396
2379 markerstoiter = markers
2397 markerstoiter = markers
2380 isrelevant = lambda m: True
2398 isrelevant = lambda m: True
2381 if opts.get(b'rev') and opts.get(b'index'):
2399 if opts.get(b'rev') and opts.get(b'index'):
2382 markerstoiter = obsutil.getmarkers(repo)
2400 markerstoiter = obsutil.getmarkers(repo)
2383 markerset = set(markers)
2401 markerset = set(markers)
2384 isrelevant = lambda m: m in markerset
2402 isrelevant = lambda m: m in markerset
2385
2403
2386 fm = ui.formatter(b'debugobsolete', opts)
2404 fm = ui.formatter(b'debugobsolete', opts)
2387 for i, m in enumerate(markerstoiter):
2405 for i, m in enumerate(markerstoiter):
2388 if not isrelevant(m):
2406 if not isrelevant(m):
2389 # marker can be irrelevant when we're iterating over a set
2407 # marker can be irrelevant when we're iterating over a set
2390 # of markers (markerstoiter) which is bigger than the set
2408 # of markers (markerstoiter) which is bigger than the set
2391 # of markers we want to display (markers)
2409 # of markers we want to display (markers)
2392 # this can happen if both --index and --rev options are
2410 # this can happen if both --index and --rev options are
2393 # provided and thus we need to iterate over all of the markers
2411 # provided and thus we need to iterate over all of the markers
2394 # to get the correct indices, but only display the ones that
2412 # to get the correct indices, but only display the ones that
2395 # are relevant to --rev value
2413 # are relevant to --rev value
2396 continue
2414 continue
2397 fm.startitem()
2415 fm.startitem()
2398 ind = i if opts.get(b'index') else None
2416 ind = i if opts.get(b'index') else None
2399 cmdutil.showmarker(fm, m, index=ind)
2417 cmdutil.showmarker(fm, m, index=ind)
2400 fm.end()
2418 fm.end()
2401
2419
2402
2420
2403 @command(
2421 @command(
2404 b'debugp1copies',
2422 b'debugp1copies',
2405 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2423 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2406 _(b'[-r REV]'),
2424 _(b'[-r REV]'),
2407 )
2425 )
2408 def debugp1copies(ui, repo, **opts):
2426 def debugp1copies(ui, repo, **opts):
2409 """dump copy information compared to p1"""
2427 """dump copy information compared to p1"""
2410
2428
2411 opts = pycompat.byteskwargs(opts)
2429 opts = pycompat.byteskwargs(opts)
2412 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2430 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2413 for dst, src in ctx.p1copies().items():
2431 for dst, src in ctx.p1copies().items():
2414 ui.write(b'%s -> %s\n' % (src, dst))
2432 ui.write(b'%s -> %s\n' % (src, dst))
2415
2433
2416
2434
2417 @command(
2435 @command(
2418 b'debugp2copies',
2436 b'debugp2copies',
2419 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2437 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2420 _(b'[-r REV]'),
2438 _(b'[-r REV]'),
2421 )
2439 )
2422 def debugp1copies(ui, repo, **opts):
2440 def debugp1copies(ui, repo, **opts):
2423 """dump copy information compared to p2"""
2441 """dump copy information compared to p2"""
2424
2442
2425 opts = pycompat.byteskwargs(opts)
2443 opts = pycompat.byteskwargs(opts)
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2444 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2427 for dst, src in ctx.p2copies().items():
2445 for dst, src in ctx.p2copies().items():
2428 ui.write(b'%s -> %s\n' % (src, dst))
2446 ui.write(b'%s -> %s\n' % (src, dst))
2429
2447
2430
2448
2431 @command(
2449 @command(
2432 b'debugpathcomplete',
2450 b'debugpathcomplete',
2433 [
2451 [
2434 (b'f', b'full', None, _(b'complete an entire path')),
2452 (b'f', b'full', None, _(b'complete an entire path')),
2435 (b'n', b'normal', None, _(b'show only normal files')),
2453 (b'n', b'normal', None, _(b'show only normal files')),
2436 (b'a', b'added', None, _(b'show only added files')),
2454 (b'a', b'added', None, _(b'show only added files')),
2437 (b'r', b'removed', None, _(b'show only removed files')),
2455 (b'r', b'removed', None, _(b'show only removed files')),
2438 ],
2456 ],
2439 _(b'FILESPEC...'),
2457 _(b'FILESPEC...'),
2440 )
2458 )
2441 def debugpathcomplete(ui, repo, *specs, **opts):
2459 def debugpathcomplete(ui, repo, *specs, **opts):
2442 """complete part or all of a tracked path
2460 """complete part or all of a tracked path
2443
2461
2444 This command supports shells that offer path name completion. It
2462 This command supports shells that offer path name completion. It
2445 currently completes only files already known to the dirstate.
2463 currently completes only files already known to the dirstate.
2446
2464
2447 Completion extends only to the next path segment unless
2465 Completion extends only to the next path segment unless
2448 --full is specified, in which case entire paths are used."""
2466 --full is specified, in which case entire paths are used."""
2449
2467
2450 def complete(path, acceptable):
2468 def complete(path, acceptable):
2451 dirstate = repo.dirstate
2469 dirstate = repo.dirstate
2452 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2470 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2453 rootdir = repo.root + pycompat.ossep
2471 rootdir = repo.root + pycompat.ossep
2454 if spec != repo.root and not spec.startswith(rootdir):
2472 if spec != repo.root and not spec.startswith(rootdir):
2455 return [], []
2473 return [], []
2456 if os.path.isdir(spec):
2474 if os.path.isdir(spec):
2457 spec += b'/'
2475 spec += b'/'
2458 spec = spec[len(rootdir) :]
2476 spec = spec[len(rootdir) :]
2459 fixpaths = pycompat.ossep != b'/'
2477 fixpaths = pycompat.ossep != b'/'
2460 if fixpaths:
2478 if fixpaths:
2461 spec = spec.replace(pycompat.ossep, b'/')
2479 spec = spec.replace(pycompat.ossep, b'/')
2462 speclen = len(spec)
2480 speclen = len(spec)
2463 fullpaths = opts['full']
2481 fullpaths = opts['full']
2464 files, dirs = set(), set()
2482 files, dirs = set(), set()
2465 adddir, addfile = dirs.add, files.add
2483 adddir, addfile = dirs.add, files.add
2466 for f, st in pycompat.iteritems(dirstate):
2484 for f, st in pycompat.iteritems(dirstate):
2467 if f.startswith(spec) and st[0] in acceptable:
2485 if f.startswith(spec) and st[0] in acceptable:
2468 if fixpaths:
2486 if fixpaths:
2469 f = f.replace(b'/', pycompat.ossep)
2487 f = f.replace(b'/', pycompat.ossep)
2470 if fullpaths:
2488 if fullpaths:
2471 addfile(f)
2489 addfile(f)
2472 continue
2490 continue
2473 s = f.find(pycompat.ossep, speclen)
2491 s = f.find(pycompat.ossep, speclen)
2474 if s >= 0:
2492 if s >= 0:
2475 adddir(f[:s])
2493 adddir(f[:s])
2476 else:
2494 else:
2477 addfile(f)
2495 addfile(f)
2478 return files, dirs
2496 return files, dirs
2479
2497
2480 acceptable = b''
2498 acceptable = b''
2481 if opts['normal']:
2499 if opts['normal']:
2482 acceptable += b'nm'
2500 acceptable += b'nm'
2483 if opts['added']:
2501 if opts['added']:
2484 acceptable += b'a'
2502 acceptable += b'a'
2485 if opts['removed']:
2503 if opts['removed']:
2486 acceptable += b'r'
2504 acceptable += b'r'
2487 cwd = repo.getcwd()
2505 cwd = repo.getcwd()
2488 if not specs:
2506 if not specs:
2489 specs = [b'.']
2507 specs = [b'.']
2490
2508
2491 files, dirs = set(), set()
2509 files, dirs = set(), set()
2492 for spec in specs:
2510 for spec in specs:
2493 f, d = complete(spec, acceptable or b'nmar')
2511 f, d = complete(spec, acceptable or b'nmar')
2494 files.update(f)
2512 files.update(f)
2495 dirs.update(d)
2513 dirs.update(d)
2496 files.update(dirs)
2514 files.update(dirs)
2497 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2515 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2498 ui.write(b'\n')
2516 ui.write(b'\n')
2499
2517
2500
2518
2501 @command(
2519 @command(
2502 b'debugpathcopies',
2520 b'debugpathcopies',
2503 cmdutil.walkopts,
2521 cmdutil.walkopts,
2504 b'hg debugpathcopies REV1 REV2 [FILE]',
2522 b'hg debugpathcopies REV1 REV2 [FILE]',
2505 inferrepo=True,
2523 inferrepo=True,
2506 )
2524 )
2507 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2525 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2508 """show copies between two revisions"""
2526 """show copies between two revisions"""
2509 ctx1 = scmutil.revsingle(repo, rev1)
2527 ctx1 = scmutil.revsingle(repo, rev1)
2510 ctx2 = scmutil.revsingle(repo, rev2)
2528 ctx2 = scmutil.revsingle(repo, rev2)
2511 m = scmutil.match(ctx1, pats, opts)
2529 m = scmutil.match(ctx1, pats, opts)
2512 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2530 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2513 ui.write(b'%s -> %s\n' % (src, dst))
2531 ui.write(b'%s -> %s\n' % (src, dst))
2514
2532
2515
2533
2516 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2534 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2517 def debugpeer(ui, path):
2535 def debugpeer(ui, path):
2518 """establish a connection to a peer repository"""
2536 """establish a connection to a peer repository"""
2519 # Always enable peer request logging. Requires --debug to display
2537 # Always enable peer request logging. Requires --debug to display
2520 # though.
2538 # though.
2521 overrides = {
2539 overrides = {
2522 (b'devel', b'debug.peer-request'): True,
2540 (b'devel', b'debug.peer-request'): True,
2523 }
2541 }
2524
2542
2525 with ui.configoverride(overrides):
2543 with ui.configoverride(overrides):
2526 peer = hg.peer(ui, {}, path)
2544 peer = hg.peer(ui, {}, path)
2527
2545
2528 local = peer.local() is not None
2546 local = peer.local() is not None
2529 canpush = peer.canpush()
2547 canpush = peer.canpush()
2530
2548
2531 ui.write(_(b'url: %s\n') % peer.url())
2549 ui.write(_(b'url: %s\n') % peer.url())
2532 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2550 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2533 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2551 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2534
2552
2535
2553
2536 @command(
2554 @command(
2537 b'debugpickmergetool',
2555 b'debugpickmergetool',
2538 [
2556 [
2539 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2557 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2540 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2558 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2541 ]
2559 ]
2542 + cmdutil.walkopts
2560 + cmdutil.walkopts
2543 + cmdutil.mergetoolopts,
2561 + cmdutil.mergetoolopts,
2544 _(b'[PATTERN]...'),
2562 _(b'[PATTERN]...'),
2545 inferrepo=True,
2563 inferrepo=True,
2546 )
2564 )
2547 def debugpickmergetool(ui, repo, *pats, **opts):
2565 def debugpickmergetool(ui, repo, *pats, **opts):
2548 """examine which merge tool is chosen for specified file
2566 """examine which merge tool is chosen for specified file
2549
2567
2550 As described in :hg:`help merge-tools`, Mercurial examines
2568 As described in :hg:`help merge-tools`, Mercurial examines
2551 configurations below in this order to decide which merge tool is
2569 configurations below in this order to decide which merge tool is
2552 chosen for specified file.
2570 chosen for specified file.
2553
2571
2554 1. ``--tool`` option
2572 1. ``--tool`` option
2555 2. ``HGMERGE`` environment variable
2573 2. ``HGMERGE`` environment variable
2556 3. configurations in ``merge-patterns`` section
2574 3. configurations in ``merge-patterns`` section
2557 4. configuration of ``ui.merge``
2575 4. configuration of ``ui.merge``
2558 5. configurations in ``merge-tools`` section
2576 5. configurations in ``merge-tools`` section
2559 6. ``hgmerge`` tool (for historical reason only)
2577 6. ``hgmerge`` tool (for historical reason only)
2560 7. default tool for fallback (``:merge`` or ``:prompt``)
2578 7. default tool for fallback (``:merge`` or ``:prompt``)
2561
2579
2562 This command writes out examination result in the style below::
2580 This command writes out examination result in the style below::
2563
2581
2564 FILE = MERGETOOL
2582 FILE = MERGETOOL
2565
2583
2566 By default, all files known in the first parent context of the
2584 By default, all files known in the first parent context of the
2567 working directory are examined. Use file patterns and/or -I/-X
2585 working directory are examined. Use file patterns and/or -I/-X
2568 options to limit target files. -r/--rev is also useful to examine
2586 options to limit target files. -r/--rev is also useful to examine
2569 files in another context without actual updating to it.
2587 files in another context without actual updating to it.
2570
2588
2571 With --debug, this command shows warning messages while matching
2589 With --debug, this command shows warning messages while matching
2572 against ``merge-patterns`` and so on, too. It is recommended to
2590 against ``merge-patterns`` and so on, too. It is recommended to
2573 use this option with explicit file patterns and/or -I/-X options,
2591 use this option with explicit file patterns and/or -I/-X options,
2574 because this option increases amount of output per file according
2592 because this option increases amount of output per file according
2575 to configurations in hgrc.
2593 to configurations in hgrc.
2576
2594
2577 With -v/--verbose, this command shows configurations below at
2595 With -v/--verbose, this command shows configurations below at
2578 first (only if specified).
2596 first (only if specified).
2579
2597
2580 - ``--tool`` option
2598 - ``--tool`` option
2581 - ``HGMERGE`` environment variable
2599 - ``HGMERGE`` environment variable
2582 - configuration of ``ui.merge``
2600 - configuration of ``ui.merge``
2583
2601
2584 If merge tool is chosen before matching against
2602 If merge tool is chosen before matching against
2585 ``merge-patterns``, this command can't show any helpful
2603 ``merge-patterns``, this command can't show any helpful
2586 information, even with --debug. In such case, information above is
2604 information, even with --debug. In such case, information above is
2587 useful to know why a merge tool is chosen.
2605 useful to know why a merge tool is chosen.
2588 """
2606 """
2589 opts = pycompat.byteskwargs(opts)
2607 opts = pycompat.byteskwargs(opts)
2590 overrides = {}
2608 overrides = {}
2591 if opts[b'tool']:
2609 if opts[b'tool']:
2592 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2610 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2593 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2611 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2594
2612
2595 with ui.configoverride(overrides, b'debugmergepatterns'):
2613 with ui.configoverride(overrides, b'debugmergepatterns'):
2596 hgmerge = encoding.environ.get(b"HGMERGE")
2614 hgmerge = encoding.environ.get(b"HGMERGE")
2597 if hgmerge is not None:
2615 if hgmerge is not None:
2598 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2616 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2599 uimerge = ui.config(b"ui", b"merge")
2617 uimerge = ui.config(b"ui", b"merge")
2600 if uimerge:
2618 if uimerge:
2601 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2619 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2602
2620
2603 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2621 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2604 m = scmutil.match(ctx, pats, opts)
2622 m = scmutil.match(ctx, pats, opts)
2605 changedelete = opts[b'changedelete']
2623 changedelete = opts[b'changedelete']
2606 for path in ctx.walk(m):
2624 for path in ctx.walk(m):
2607 fctx = ctx[path]
2625 fctx = ctx[path]
2608 try:
2626 try:
2609 if not ui.debugflag:
2627 if not ui.debugflag:
2610 ui.pushbuffer(error=True)
2628 ui.pushbuffer(error=True)
2611 tool, toolpath = filemerge._picktool(
2629 tool, toolpath = filemerge._picktool(
2612 repo,
2630 repo,
2613 ui,
2631 ui,
2614 path,
2632 path,
2615 fctx.isbinary(),
2633 fctx.isbinary(),
2616 b'l' in fctx.flags(),
2634 b'l' in fctx.flags(),
2617 changedelete,
2635 changedelete,
2618 )
2636 )
2619 finally:
2637 finally:
2620 if not ui.debugflag:
2638 if not ui.debugflag:
2621 ui.popbuffer()
2639 ui.popbuffer()
2622 ui.write(b'%s = %s\n' % (path, tool))
2640 ui.write(b'%s = %s\n' % (path, tool))
2623
2641
2624
2642
2625 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2643 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2626 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2644 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2627 """access the pushkey key/value protocol
2645 """access the pushkey key/value protocol
2628
2646
2629 With two args, list the keys in the given namespace.
2647 With two args, list the keys in the given namespace.
2630
2648
2631 With five args, set a key to new if it currently is set to old.
2649 With five args, set a key to new if it currently is set to old.
2632 Reports success or failure.
2650 Reports success or failure.
2633 """
2651 """
2634
2652
2635 target = hg.peer(ui, {}, repopath)
2653 target = hg.peer(ui, {}, repopath)
2636 if keyinfo:
2654 if keyinfo:
2637 key, old, new = keyinfo
2655 key, old, new = keyinfo
2638 with target.commandexecutor() as e:
2656 with target.commandexecutor() as e:
2639 r = e.callcommand(
2657 r = e.callcommand(
2640 b'pushkey',
2658 b'pushkey',
2641 {
2659 {
2642 b'namespace': namespace,
2660 b'namespace': namespace,
2643 b'key': key,
2661 b'key': key,
2644 b'old': old,
2662 b'old': old,
2645 b'new': new,
2663 b'new': new,
2646 },
2664 },
2647 ).result()
2665 ).result()
2648
2666
2649 ui.status(pycompat.bytestr(r) + b'\n')
2667 ui.status(pycompat.bytestr(r) + b'\n')
2650 return not r
2668 return not r
2651 else:
2669 else:
2652 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2670 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2653 ui.write(
2671 ui.write(
2654 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2672 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2655 )
2673 )
2656
2674
2657
2675
2658 @command(b'debugpvec', [], _(b'A B'))
2676 @command(b'debugpvec', [], _(b'A B'))
2659 def debugpvec(ui, repo, a, b=None):
2677 def debugpvec(ui, repo, a, b=None):
2660 ca = scmutil.revsingle(repo, a)
2678 ca = scmutil.revsingle(repo, a)
2661 cb = scmutil.revsingle(repo, b)
2679 cb = scmutil.revsingle(repo, b)
2662 pa = pvec.ctxpvec(ca)
2680 pa = pvec.ctxpvec(ca)
2663 pb = pvec.ctxpvec(cb)
2681 pb = pvec.ctxpvec(cb)
2664 if pa == pb:
2682 if pa == pb:
2665 rel = b"="
2683 rel = b"="
2666 elif pa > pb:
2684 elif pa > pb:
2667 rel = b">"
2685 rel = b">"
2668 elif pa < pb:
2686 elif pa < pb:
2669 rel = b"<"
2687 rel = b"<"
2670 elif pa | pb:
2688 elif pa | pb:
2671 rel = b"|"
2689 rel = b"|"
2672 ui.write(_(b"a: %s\n") % pa)
2690 ui.write(_(b"a: %s\n") % pa)
2673 ui.write(_(b"b: %s\n") % pb)
2691 ui.write(_(b"b: %s\n") % pb)
2674 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2692 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2675 ui.write(
2693 ui.write(
2676 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2694 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2677 % (
2695 % (
2678 abs(pa._depth - pb._depth),
2696 abs(pa._depth - pb._depth),
2679 pvec._hamming(pa._vec, pb._vec),
2697 pvec._hamming(pa._vec, pb._vec),
2680 pa.distance(pb),
2698 pa.distance(pb),
2681 rel,
2699 rel,
2682 )
2700 )
2683 )
2701 )
2684
2702
2685
2703
2686 @command(
2704 @command(
2687 b'debugrebuilddirstate|debugrebuildstate',
2705 b'debugrebuilddirstate|debugrebuildstate',
2688 [
2706 [
2689 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2707 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2690 (
2708 (
2691 b'',
2709 b'',
2692 b'minimal',
2710 b'minimal',
2693 None,
2711 None,
2694 _(
2712 _(
2695 b'only rebuild files that are inconsistent with '
2713 b'only rebuild files that are inconsistent with '
2696 b'the working copy parent'
2714 b'the working copy parent'
2697 ),
2715 ),
2698 ),
2716 ),
2699 ],
2717 ],
2700 _(b'[-r REV]'),
2718 _(b'[-r REV]'),
2701 )
2719 )
2702 def debugrebuilddirstate(ui, repo, rev, **opts):
2720 def debugrebuilddirstate(ui, repo, rev, **opts):
2703 """rebuild the dirstate as it would look like for the given revision
2721 """rebuild the dirstate as it would look like for the given revision
2704
2722
2705 If no revision is specified the first current parent will be used.
2723 If no revision is specified the first current parent will be used.
2706
2724
2707 The dirstate will be set to the files of the given revision.
2725 The dirstate will be set to the files of the given revision.
2708 The actual working directory content or existing dirstate
2726 The actual working directory content or existing dirstate
2709 information such as adds or removes is not considered.
2727 information such as adds or removes is not considered.
2710
2728
2711 ``minimal`` will only rebuild the dirstate status for files that claim to be
2729 ``minimal`` will only rebuild the dirstate status for files that claim to be
2712 tracked but are not in the parent manifest, or that exist in the parent
2730 tracked but are not in the parent manifest, or that exist in the parent
2713 manifest but are not in the dirstate. It will not change adds, removes, or
2731 manifest but are not in the dirstate. It will not change adds, removes, or
2714 modified files that are in the working copy parent.
2732 modified files that are in the working copy parent.
2715
2733
2716 One use of this command is to make the next :hg:`status` invocation
2734 One use of this command is to make the next :hg:`status` invocation
2717 check the actual file content.
2735 check the actual file content.
2718 """
2736 """
2719 ctx = scmutil.revsingle(repo, rev)
2737 ctx = scmutil.revsingle(repo, rev)
2720 with repo.wlock():
2738 with repo.wlock():
2721 dirstate = repo.dirstate
2739 dirstate = repo.dirstate
2722 changedfiles = None
2740 changedfiles = None
2723 # See command doc for what minimal does.
2741 # See command doc for what minimal does.
2724 if opts.get('minimal'):
2742 if opts.get('minimal'):
2725 manifestfiles = set(ctx.manifest().keys())
2743 manifestfiles = set(ctx.manifest().keys())
2726 dirstatefiles = set(dirstate)
2744 dirstatefiles = set(dirstate)
2727 manifestonly = manifestfiles - dirstatefiles
2745 manifestonly = manifestfiles - dirstatefiles
2728 dsonly = dirstatefiles - manifestfiles
2746 dsonly = dirstatefiles - manifestfiles
2729 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2747 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2730 changedfiles = manifestonly | dsnotadded
2748 changedfiles = manifestonly | dsnotadded
2731
2749
2732 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2750 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2733
2751
2734
2752
2735 @command(b'debugrebuildfncache', [], b'')
2753 @command(b'debugrebuildfncache', [], b'')
2736 def debugrebuildfncache(ui, repo):
2754 def debugrebuildfncache(ui, repo):
2737 """rebuild the fncache file"""
2755 """rebuild the fncache file"""
2738 repair.rebuildfncache(ui, repo)
2756 repair.rebuildfncache(ui, repo)
2739
2757
2740
2758
2741 @command(
2759 @command(
2742 b'debugrename',
2760 b'debugrename',
2743 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2761 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2744 _(b'[-r REV] [FILE]...'),
2762 _(b'[-r REV] [FILE]...'),
2745 )
2763 )
2746 def debugrename(ui, repo, *pats, **opts):
2764 def debugrename(ui, repo, *pats, **opts):
2747 """dump rename information"""
2765 """dump rename information"""
2748
2766
2749 opts = pycompat.byteskwargs(opts)
2767 opts = pycompat.byteskwargs(opts)
2750 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2768 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2751 m = scmutil.match(ctx, pats, opts)
2769 m = scmutil.match(ctx, pats, opts)
2752 for abs in ctx.walk(m):
2770 for abs in ctx.walk(m):
2753 fctx = ctx[abs]
2771 fctx = ctx[abs]
2754 o = fctx.filelog().renamed(fctx.filenode())
2772 o = fctx.filelog().renamed(fctx.filenode())
2755 rel = repo.pathto(abs)
2773 rel = repo.pathto(abs)
2756 if o:
2774 if o:
2757 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2775 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2758 else:
2776 else:
2759 ui.write(_(b"%s not renamed\n") % rel)
2777 ui.write(_(b"%s not renamed\n") % rel)
2760
2778
2761
2779
2762 @command(b'debugrequires|debugrequirements', [], b'')
2780 @command(b'debugrequires|debugrequirements', [], b'')
2763 def debugrequirements(ui, repo):
2781 def debugrequirements(ui, repo):
2764 """ print the current repo requirements """
2782 """ print the current repo requirements """
2765 for r in sorted(repo.requirements):
2783 for r in sorted(repo.requirements):
2766 ui.write(b"%s\n" % r)
2784 ui.write(b"%s\n" % r)
2767
2785
2768
2786
2769 @command(
2787 @command(
2770 b'debugrevlog',
2788 b'debugrevlog',
2771 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2789 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2772 _(b'-c|-m|FILE'),
2790 _(b'-c|-m|FILE'),
2773 optionalrepo=True,
2791 optionalrepo=True,
2774 )
2792 )
2775 def debugrevlog(ui, repo, file_=None, **opts):
2793 def debugrevlog(ui, repo, file_=None, **opts):
2776 """show data and statistics about a revlog"""
2794 """show data and statistics about a revlog"""
2777 opts = pycompat.byteskwargs(opts)
2795 opts = pycompat.byteskwargs(opts)
2778 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2796 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2779
2797
2780 if opts.get(b"dump"):
2798 if opts.get(b"dump"):
2781 numrevs = len(r)
2799 numrevs = len(r)
2782 ui.write(
2800 ui.write(
2783 (
2801 (
2784 b"# rev p1rev p2rev start end deltastart base p1 p2"
2802 b"# rev p1rev p2rev start end deltastart base p1 p2"
2785 b" rawsize totalsize compression heads chainlen\n"
2803 b" rawsize totalsize compression heads chainlen\n"
2786 )
2804 )
2787 )
2805 )
2788 ts = 0
2806 ts = 0
2789 heads = set()
2807 heads = set()
2790
2808
2791 for rev in pycompat.xrange(numrevs):
2809 for rev in pycompat.xrange(numrevs):
2792 dbase = r.deltaparent(rev)
2810 dbase = r.deltaparent(rev)
2793 if dbase == -1:
2811 if dbase == -1:
2794 dbase = rev
2812 dbase = rev
2795 cbase = r.chainbase(rev)
2813 cbase = r.chainbase(rev)
2796 clen = r.chainlen(rev)
2814 clen = r.chainlen(rev)
2797 p1, p2 = r.parentrevs(rev)
2815 p1, p2 = r.parentrevs(rev)
2798 rs = r.rawsize(rev)
2816 rs = r.rawsize(rev)
2799 ts = ts + rs
2817 ts = ts + rs
2800 heads -= set(r.parentrevs(rev))
2818 heads -= set(r.parentrevs(rev))
2801 heads.add(rev)
2819 heads.add(rev)
2802 try:
2820 try:
2803 compression = ts / r.end(rev)
2821 compression = ts / r.end(rev)
2804 except ZeroDivisionError:
2822 except ZeroDivisionError:
2805 compression = 0
2823 compression = 0
2806 ui.write(
2824 ui.write(
2807 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2825 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2808 b"%11d %5d %8d\n"
2826 b"%11d %5d %8d\n"
2809 % (
2827 % (
2810 rev,
2828 rev,
2811 p1,
2829 p1,
2812 p2,
2830 p2,
2813 r.start(rev),
2831 r.start(rev),
2814 r.end(rev),
2832 r.end(rev),
2815 r.start(dbase),
2833 r.start(dbase),
2816 r.start(cbase),
2834 r.start(cbase),
2817 r.start(p1),
2835 r.start(p1),
2818 r.start(p2),
2836 r.start(p2),
2819 rs,
2837 rs,
2820 ts,
2838 ts,
2821 compression,
2839 compression,
2822 len(heads),
2840 len(heads),
2823 clen,
2841 clen,
2824 )
2842 )
2825 )
2843 )
2826 return 0
2844 return 0
2827
2845
2828 v = r.version
2846 v = r.version
2829 format = v & 0xFFFF
2847 format = v & 0xFFFF
2830 flags = []
2848 flags = []
2831 gdelta = False
2849 gdelta = False
2832 if v & revlog.FLAG_INLINE_DATA:
2850 if v & revlog.FLAG_INLINE_DATA:
2833 flags.append(b'inline')
2851 flags.append(b'inline')
2834 if v & revlog.FLAG_GENERALDELTA:
2852 if v & revlog.FLAG_GENERALDELTA:
2835 gdelta = True
2853 gdelta = True
2836 flags.append(b'generaldelta')
2854 flags.append(b'generaldelta')
2837 if not flags:
2855 if not flags:
2838 flags = [b'(none)']
2856 flags = [b'(none)']
2839
2857
2840 ### tracks merge vs single parent
2858 ### tracks merge vs single parent
2841 nummerges = 0
2859 nummerges = 0
2842
2860
2843 ### tracks ways the "delta" are build
2861 ### tracks ways the "delta" are build
2844 # nodelta
2862 # nodelta
2845 numempty = 0
2863 numempty = 0
2846 numemptytext = 0
2864 numemptytext = 0
2847 numemptydelta = 0
2865 numemptydelta = 0
2848 # full file content
2866 # full file content
2849 numfull = 0
2867 numfull = 0
2850 # intermediate snapshot against a prior snapshot
2868 # intermediate snapshot against a prior snapshot
2851 numsemi = 0
2869 numsemi = 0
2852 # snapshot count per depth
2870 # snapshot count per depth
2853 numsnapdepth = collections.defaultdict(lambda: 0)
2871 numsnapdepth = collections.defaultdict(lambda: 0)
2854 # delta against previous revision
2872 # delta against previous revision
2855 numprev = 0
2873 numprev = 0
2856 # delta against first or second parent (not prev)
2874 # delta against first or second parent (not prev)
2857 nump1 = 0
2875 nump1 = 0
2858 nump2 = 0
2876 nump2 = 0
2859 # delta against neither prev nor parents
2877 # delta against neither prev nor parents
2860 numother = 0
2878 numother = 0
2861 # delta against prev that are also first or second parent
2879 # delta against prev that are also first or second parent
2862 # (details of `numprev`)
2880 # (details of `numprev`)
2863 nump1prev = 0
2881 nump1prev = 0
2864 nump2prev = 0
2882 nump2prev = 0
2865
2883
2866 # data about delta chain of each revs
2884 # data about delta chain of each revs
2867 chainlengths = []
2885 chainlengths = []
2868 chainbases = []
2886 chainbases = []
2869 chainspans = []
2887 chainspans = []
2870
2888
2871 # data about each revision
2889 # data about each revision
2872 datasize = [None, 0, 0]
2890 datasize = [None, 0, 0]
2873 fullsize = [None, 0, 0]
2891 fullsize = [None, 0, 0]
2874 semisize = [None, 0, 0]
2892 semisize = [None, 0, 0]
2875 # snapshot count per depth
2893 # snapshot count per depth
2876 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2894 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2877 deltasize = [None, 0, 0]
2895 deltasize = [None, 0, 0]
2878 chunktypecounts = {}
2896 chunktypecounts = {}
2879 chunktypesizes = {}
2897 chunktypesizes = {}
2880
2898
2881 def addsize(size, l):
2899 def addsize(size, l):
2882 if l[0] is None or size < l[0]:
2900 if l[0] is None or size < l[0]:
2883 l[0] = size
2901 l[0] = size
2884 if size > l[1]:
2902 if size > l[1]:
2885 l[1] = size
2903 l[1] = size
2886 l[2] += size
2904 l[2] += size
2887
2905
2888 numrevs = len(r)
2906 numrevs = len(r)
2889 for rev in pycompat.xrange(numrevs):
2907 for rev in pycompat.xrange(numrevs):
2890 p1, p2 = r.parentrevs(rev)
2908 p1, p2 = r.parentrevs(rev)
2891 delta = r.deltaparent(rev)
2909 delta = r.deltaparent(rev)
2892 if format > 0:
2910 if format > 0:
2893 addsize(r.rawsize(rev), datasize)
2911 addsize(r.rawsize(rev), datasize)
2894 if p2 != nullrev:
2912 if p2 != nullrev:
2895 nummerges += 1
2913 nummerges += 1
2896 size = r.length(rev)
2914 size = r.length(rev)
2897 if delta == nullrev:
2915 if delta == nullrev:
2898 chainlengths.append(0)
2916 chainlengths.append(0)
2899 chainbases.append(r.start(rev))
2917 chainbases.append(r.start(rev))
2900 chainspans.append(size)
2918 chainspans.append(size)
2901 if size == 0:
2919 if size == 0:
2902 numempty += 1
2920 numempty += 1
2903 numemptytext += 1
2921 numemptytext += 1
2904 else:
2922 else:
2905 numfull += 1
2923 numfull += 1
2906 numsnapdepth[0] += 1
2924 numsnapdepth[0] += 1
2907 addsize(size, fullsize)
2925 addsize(size, fullsize)
2908 addsize(size, snapsizedepth[0])
2926 addsize(size, snapsizedepth[0])
2909 else:
2927 else:
2910 chainlengths.append(chainlengths[delta] + 1)
2928 chainlengths.append(chainlengths[delta] + 1)
2911 baseaddr = chainbases[delta]
2929 baseaddr = chainbases[delta]
2912 revaddr = r.start(rev)
2930 revaddr = r.start(rev)
2913 chainbases.append(baseaddr)
2931 chainbases.append(baseaddr)
2914 chainspans.append((revaddr - baseaddr) + size)
2932 chainspans.append((revaddr - baseaddr) + size)
2915 if size == 0:
2933 if size == 0:
2916 numempty += 1
2934 numempty += 1
2917 numemptydelta += 1
2935 numemptydelta += 1
2918 elif r.issnapshot(rev):
2936 elif r.issnapshot(rev):
2919 addsize(size, semisize)
2937 addsize(size, semisize)
2920 numsemi += 1
2938 numsemi += 1
2921 depth = r.snapshotdepth(rev)
2939 depth = r.snapshotdepth(rev)
2922 numsnapdepth[depth] += 1
2940 numsnapdepth[depth] += 1
2923 addsize(size, snapsizedepth[depth])
2941 addsize(size, snapsizedepth[depth])
2924 else:
2942 else:
2925 addsize(size, deltasize)
2943 addsize(size, deltasize)
2926 if delta == rev - 1:
2944 if delta == rev - 1:
2927 numprev += 1
2945 numprev += 1
2928 if delta == p1:
2946 if delta == p1:
2929 nump1prev += 1
2947 nump1prev += 1
2930 elif delta == p2:
2948 elif delta == p2:
2931 nump2prev += 1
2949 nump2prev += 1
2932 elif delta == p1:
2950 elif delta == p1:
2933 nump1 += 1
2951 nump1 += 1
2934 elif delta == p2:
2952 elif delta == p2:
2935 nump2 += 1
2953 nump2 += 1
2936 elif delta != nullrev:
2954 elif delta != nullrev:
2937 numother += 1
2955 numother += 1
2938
2956
2939 # Obtain data on the raw chunks in the revlog.
2957 # Obtain data on the raw chunks in the revlog.
2940 if util.safehasattr(r, b'_getsegmentforrevs'):
2958 if util.safehasattr(r, b'_getsegmentforrevs'):
2941 segment = r._getsegmentforrevs(rev, rev)[1]
2959 segment = r._getsegmentforrevs(rev, rev)[1]
2942 else:
2960 else:
2943 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2961 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2944 if segment:
2962 if segment:
2945 chunktype = bytes(segment[0:1])
2963 chunktype = bytes(segment[0:1])
2946 else:
2964 else:
2947 chunktype = b'empty'
2965 chunktype = b'empty'
2948
2966
2949 if chunktype not in chunktypecounts:
2967 if chunktype not in chunktypecounts:
2950 chunktypecounts[chunktype] = 0
2968 chunktypecounts[chunktype] = 0
2951 chunktypesizes[chunktype] = 0
2969 chunktypesizes[chunktype] = 0
2952
2970
2953 chunktypecounts[chunktype] += 1
2971 chunktypecounts[chunktype] += 1
2954 chunktypesizes[chunktype] += size
2972 chunktypesizes[chunktype] += size
2955
2973
2956 # Adjust size min value for empty cases
2974 # Adjust size min value for empty cases
2957 for size in (datasize, fullsize, semisize, deltasize):
2975 for size in (datasize, fullsize, semisize, deltasize):
2958 if size[0] is None:
2976 if size[0] is None:
2959 size[0] = 0
2977 size[0] = 0
2960
2978
2961 numdeltas = numrevs - numfull - numempty - numsemi
2979 numdeltas = numrevs - numfull - numempty - numsemi
2962 numoprev = numprev - nump1prev - nump2prev
2980 numoprev = numprev - nump1prev - nump2prev
2963 totalrawsize = datasize[2]
2981 totalrawsize = datasize[2]
2964 datasize[2] /= numrevs
2982 datasize[2] /= numrevs
2965 fulltotal = fullsize[2]
2983 fulltotal = fullsize[2]
2966 if numfull == 0:
2984 if numfull == 0:
2967 fullsize[2] = 0
2985 fullsize[2] = 0
2968 else:
2986 else:
2969 fullsize[2] /= numfull
2987 fullsize[2] /= numfull
2970 semitotal = semisize[2]
2988 semitotal = semisize[2]
2971 snaptotal = {}
2989 snaptotal = {}
2972 if numsemi > 0:
2990 if numsemi > 0:
2973 semisize[2] /= numsemi
2991 semisize[2] /= numsemi
2974 for depth in snapsizedepth:
2992 for depth in snapsizedepth:
2975 snaptotal[depth] = snapsizedepth[depth][2]
2993 snaptotal[depth] = snapsizedepth[depth][2]
2976 snapsizedepth[depth][2] /= numsnapdepth[depth]
2994 snapsizedepth[depth][2] /= numsnapdepth[depth]
2977
2995
2978 deltatotal = deltasize[2]
2996 deltatotal = deltasize[2]
2979 if numdeltas > 0:
2997 if numdeltas > 0:
2980 deltasize[2] /= numdeltas
2998 deltasize[2] /= numdeltas
2981 totalsize = fulltotal + semitotal + deltatotal
2999 totalsize = fulltotal + semitotal + deltatotal
2982 avgchainlen = sum(chainlengths) / numrevs
3000 avgchainlen = sum(chainlengths) / numrevs
2983 maxchainlen = max(chainlengths)
3001 maxchainlen = max(chainlengths)
2984 maxchainspan = max(chainspans)
3002 maxchainspan = max(chainspans)
2985 compratio = 1
3003 compratio = 1
2986 if totalsize:
3004 if totalsize:
2987 compratio = totalrawsize / totalsize
3005 compratio = totalrawsize / totalsize
2988
3006
2989 basedfmtstr = b'%%%dd\n'
3007 basedfmtstr = b'%%%dd\n'
2990 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3008 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2991
3009
2992 def dfmtstr(max):
3010 def dfmtstr(max):
2993 return basedfmtstr % len(str(max))
3011 return basedfmtstr % len(str(max))
2994
3012
2995 def pcfmtstr(max, padding=0):
3013 def pcfmtstr(max, padding=0):
2996 return basepcfmtstr % (len(str(max)), b' ' * padding)
3014 return basepcfmtstr % (len(str(max)), b' ' * padding)
2997
3015
2998 def pcfmt(value, total):
3016 def pcfmt(value, total):
2999 if total:
3017 if total:
3000 return (value, 100 * float(value) / total)
3018 return (value, 100 * float(value) / total)
3001 else:
3019 else:
3002 return value, 100.0
3020 return value, 100.0
3003
3021
3004 ui.writenoi18n(b'format : %d\n' % format)
3022 ui.writenoi18n(b'format : %d\n' % format)
3005 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3023 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3006
3024
3007 ui.write(b'\n')
3025 ui.write(b'\n')
3008 fmt = pcfmtstr(totalsize)
3026 fmt = pcfmtstr(totalsize)
3009 fmt2 = dfmtstr(totalsize)
3027 fmt2 = dfmtstr(totalsize)
3010 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3028 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3011 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3029 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3012 ui.writenoi18n(
3030 ui.writenoi18n(
3013 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3031 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3014 )
3032 )
3015 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3033 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3016 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3034 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3017 ui.writenoi18n(
3035 ui.writenoi18n(
3018 b' text : '
3036 b' text : '
3019 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3037 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3020 )
3038 )
3021 ui.writenoi18n(
3039 ui.writenoi18n(
3022 b' delta : '
3040 b' delta : '
3023 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3041 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3024 )
3042 )
3025 ui.writenoi18n(
3043 ui.writenoi18n(
3026 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3044 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3027 )
3045 )
3028 for depth in sorted(numsnapdepth):
3046 for depth in sorted(numsnapdepth):
3029 ui.write(
3047 ui.write(
3030 (b' lvl-%-3d : ' % depth)
3048 (b' lvl-%-3d : ' % depth)
3031 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3049 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3032 )
3050 )
3033 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3051 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3034 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3052 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3035 ui.writenoi18n(
3053 ui.writenoi18n(
3036 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3054 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3037 )
3055 )
3038 for depth in sorted(numsnapdepth):
3056 for depth in sorted(numsnapdepth):
3039 ui.write(
3057 ui.write(
3040 (b' lvl-%-3d : ' % depth)
3058 (b' lvl-%-3d : ' % depth)
3041 + fmt % pcfmt(snaptotal[depth], totalsize)
3059 + fmt % pcfmt(snaptotal[depth], totalsize)
3042 )
3060 )
3043 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3061 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3044
3062
3045 def fmtchunktype(chunktype):
3063 def fmtchunktype(chunktype):
3046 if chunktype == b'empty':
3064 if chunktype == b'empty':
3047 return b' %s : ' % chunktype
3065 return b' %s : ' % chunktype
3048 elif chunktype in pycompat.bytestr(string.ascii_letters):
3066 elif chunktype in pycompat.bytestr(string.ascii_letters):
3049 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3067 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3050 else:
3068 else:
3051 return b' 0x%s : ' % hex(chunktype)
3069 return b' 0x%s : ' % hex(chunktype)
3052
3070
3053 ui.write(b'\n')
3071 ui.write(b'\n')
3054 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3072 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3055 for chunktype in sorted(chunktypecounts):
3073 for chunktype in sorted(chunktypecounts):
3056 ui.write(fmtchunktype(chunktype))
3074 ui.write(fmtchunktype(chunktype))
3057 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3075 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3058 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3076 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3059 for chunktype in sorted(chunktypecounts):
3077 for chunktype in sorted(chunktypecounts):
3060 ui.write(fmtchunktype(chunktype))
3078 ui.write(fmtchunktype(chunktype))
3061 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3079 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3062
3080
3063 ui.write(b'\n')
3081 ui.write(b'\n')
3064 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3082 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3065 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3083 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3066 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3084 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3067 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3085 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3068 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3086 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3069
3087
3070 if format > 0:
3088 if format > 0:
3071 ui.write(b'\n')
3089 ui.write(b'\n')
3072 ui.writenoi18n(
3090 ui.writenoi18n(
3073 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3091 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3074 % tuple(datasize)
3092 % tuple(datasize)
3075 )
3093 )
3076 ui.writenoi18n(
3094 ui.writenoi18n(
3077 b'full revision size (min/max/avg) : %d / %d / %d\n'
3095 b'full revision size (min/max/avg) : %d / %d / %d\n'
3078 % tuple(fullsize)
3096 % tuple(fullsize)
3079 )
3097 )
3080 ui.writenoi18n(
3098 ui.writenoi18n(
3081 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3099 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3082 % tuple(semisize)
3100 % tuple(semisize)
3083 )
3101 )
3084 for depth in sorted(snapsizedepth):
3102 for depth in sorted(snapsizedepth):
3085 if depth == 0:
3103 if depth == 0:
3086 continue
3104 continue
3087 ui.writenoi18n(
3105 ui.writenoi18n(
3088 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3106 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3089 % ((depth,) + tuple(snapsizedepth[depth]))
3107 % ((depth,) + tuple(snapsizedepth[depth]))
3090 )
3108 )
3091 ui.writenoi18n(
3109 ui.writenoi18n(
3092 b'delta size (min/max/avg) : %d / %d / %d\n'
3110 b'delta size (min/max/avg) : %d / %d / %d\n'
3093 % tuple(deltasize)
3111 % tuple(deltasize)
3094 )
3112 )
3095
3113
3096 if numdeltas > 0:
3114 if numdeltas > 0:
3097 ui.write(b'\n')
3115 ui.write(b'\n')
3098 fmt = pcfmtstr(numdeltas)
3116 fmt = pcfmtstr(numdeltas)
3099 fmt2 = pcfmtstr(numdeltas, 4)
3117 fmt2 = pcfmtstr(numdeltas, 4)
3100 ui.writenoi18n(
3118 ui.writenoi18n(
3101 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3119 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3102 )
3120 )
3103 if numprev > 0:
3121 if numprev > 0:
3104 ui.writenoi18n(
3122 ui.writenoi18n(
3105 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3123 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3106 )
3124 )
3107 ui.writenoi18n(
3125 ui.writenoi18n(
3108 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3126 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3109 )
3127 )
3110 ui.writenoi18n(
3128 ui.writenoi18n(
3111 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3129 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3112 )
3130 )
3113 if gdelta:
3131 if gdelta:
3114 ui.writenoi18n(
3132 ui.writenoi18n(
3115 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3133 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3116 )
3134 )
3117 ui.writenoi18n(
3135 ui.writenoi18n(
3118 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3136 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3119 )
3137 )
3120 ui.writenoi18n(
3138 ui.writenoi18n(
3121 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3139 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3122 )
3140 )
3123
3141
3124
3142
3125 @command(
3143 @command(
3126 b'debugrevlogindex',
3144 b'debugrevlogindex',
3127 cmdutil.debugrevlogopts
3145 cmdutil.debugrevlogopts
3128 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3146 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3129 _(b'[-f FORMAT] -c|-m|FILE'),
3147 _(b'[-f FORMAT] -c|-m|FILE'),
3130 optionalrepo=True,
3148 optionalrepo=True,
3131 )
3149 )
3132 def debugrevlogindex(ui, repo, file_=None, **opts):
3150 def debugrevlogindex(ui, repo, file_=None, **opts):
3133 """dump the contents of a revlog index"""
3151 """dump the contents of a revlog index"""
3134 opts = pycompat.byteskwargs(opts)
3152 opts = pycompat.byteskwargs(opts)
3135 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3153 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3136 format = opts.get(b'format', 0)
3154 format = opts.get(b'format', 0)
3137 if format not in (0, 1):
3155 if format not in (0, 1):
3138 raise error.Abort(_(b"unknown format %d") % format)
3156 raise error.Abort(_(b"unknown format %d") % format)
3139
3157
3140 if ui.debugflag:
3158 if ui.debugflag:
3141 shortfn = hex
3159 shortfn = hex
3142 else:
3160 else:
3143 shortfn = short
3161 shortfn = short
3144
3162
3145 # There might not be anything in r, so have a sane default
3163 # There might not be anything in r, so have a sane default
3146 idlen = 12
3164 idlen = 12
3147 for i in r:
3165 for i in r:
3148 idlen = len(shortfn(r.node(i)))
3166 idlen = len(shortfn(r.node(i)))
3149 break
3167 break
3150
3168
3151 if format == 0:
3169 if format == 0:
3152 if ui.verbose:
3170 if ui.verbose:
3153 ui.writenoi18n(
3171 ui.writenoi18n(
3154 b" rev offset length linkrev %s %s p2\n"
3172 b" rev offset length linkrev %s %s p2\n"
3155 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3173 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3156 )
3174 )
3157 else:
3175 else:
3158 ui.writenoi18n(
3176 ui.writenoi18n(
3159 b" rev linkrev %s %s p2\n"
3177 b" rev linkrev %s %s p2\n"
3160 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3178 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3161 )
3179 )
3162 elif format == 1:
3180 elif format == 1:
3163 if ui.verbose:
3181 if ui.verbose:
3164 ui.writenoi18n(
3182 ui.writenoi18n(
3165 (
3183 (
3166 b" rev flag offset length size link p1"
3184 b" rev flag offset length size link p1"
3167 b" p2 %s\n"
3185 b" p2 %s\n"
3168 )
3186 )
3169 % b"nodeid".rjust(idlen)
3187 % b"nodeid".rjust(idlen)
3170 )
3188 )
3171 else:
3189 else:
3172 ui.writenoi18n(
3190 ui.writenoi18n(
3173 b" rev flag size link p1 p2 %s\n"
3191 b" rev flag size link p1 p2 %s\n"
3174 % b"nodeid".rjust(idlen)
3192 % b"nodeid".rjust(idlen)
3175 )
3193 )
3176
3194
3177 for i in r:
3195 for i in r:
3178 node = r.node(i)
3196 node = r.node(i)
3179 if format == 0:
3197 if format == 0:
3180 try:
3198 try:
3181 pp = r.parents(node)
3199 pp = r.parents(node)
3182 except Exception:
3200 except Exception:
3183 pp = [nullid, nullid]
3201 pp = [nullid, nullid]
3184 if ui.verbose:
3202 if ui.verbose:
3185 ui.write(
3203 ui.write(
3186 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3204 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3187 % (
3205 % (
3188 i,
3206 i,
3189 r.start(i),
3207 r.start(i),
3190 r.length(i),
3208 r.length(i),
3191 r.linkrev(i),
3209 r.linkrev(i),
3192 shortfn(node),
3210 shortfn(node),
3193 shortfn(pp[0]),
3211 shortfn(pp[0]),
3194 shortfn(pp[1]),
3212 shortfn(pp[1]),
3195 )
3213 )
3196 )
3214 )
3197 else:
3215 else:
3198 ui.write(
3216 ui.write(
3199 b"% 6d % 7d %s %s %s\n"
3217 b"% 6d % 7d %s %s %s\n"
3200 % (
3218 % (
3201 i,
3219 i,
3202 r.linkrev(i),
3220 r.linkrev(i),
3203 shortfn(node),
3221 shortfn(node),
3204 shortfn(pp[0]),
3222 shortfn(pp[0]),
3205 shortfn(pp[1]),
3223 shortfn(pp[1]),
3206 )
3224 )
3207 )
3225 )
3208 elif format == 1:
3226 elif format == 1:
3209 pr = r.parentrevs(i)
3227 pr = r.parentrevs(i)
3210 if ui.verbose:
3228 if ui.verbose:
3211 ui.write(
3229 ui.write(
3212 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3230 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3213 % (
3231 % (
3214 i,
3232 i,
3215 r.flags(i),
3233 r.flags(i),
3216 r.start(i),
3234 r.start(i),
3217 r.length(i),
3235 r.length(i),
3218 r.rawsize(i),
3236 r.rawsize(i),
3219 r.linkrev(i),
3237 r.linkrev(i),
3220 pr[0],
3238 pr[0],
3221 pr[1],
3239 pr[1],
3222 shortfn(node),
3240 shortfn(node),
3223 )
3241 )
3224 )
3242 )
3225 else:
3243 else:
3226 ui.write(
3244 ui.write(
3227 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3245 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3228 % (
3246 % (
3229 i,
3247 i,
3230 r.flags(i),
3248 r.flags(i),
3231 r.rawsize(i),
3249 r.rawsize(i),
3232 r.linkrev(i),
3250 r.linkrev(i),
3233 pr[0],
3251 pr[0],
3234 pr[1],
3252 pr[1],
3235 shortfn(node),
3253 shortfn(node),
3236 )
3254 )
3237 )
3255 )
3238
3256
3239
3257
3240 @command(
3258 @command(
3241 b'debugrevspec',
3259 b'debugrevspec',
3242 [
3260 [
3243 (
3261 (
3244 b'',
3262 b'',
3245 b'optimize',
3263 b'optimize',
3246 None,
3264 None,
3247 _(b'print parsed tree after optimizing (DEPRECATED)'),
3265 _(b'print parsed tree after optimizing (DEPRECATED)'),
3248 ),
3266 ),
3249 (
3267 (
3250 b'',
3268 b'',
3251 b'show-revs',
3269 b'show-revs',
3252 True,
3270 True,
3253 _(b'print list of result revisions (default)'),
3271 _(b'print list of result revisions (default)'),
3254 ),
3272 ),
3255 (
3273 (
3256 b's',
3274 b's',
3257 b'show-set',
3275 b'show-set',
3258 None,
3276 None,
3259 _(b'print internal representation of result set'),
3277 _(b'print internal representation of result set'),
3260 ),
3278 ),
3261 (
3279 (
3262 b'p',
3280 b'p',
3263 b'show-stage',
3281 b'show-stage',
3264 [],
3282 [],
3265 _(b'print parsed tree at the given stage'),
3283 _(b'print parsed tree at the given stage'),
3266 _(b'NAME'),
3284 _(b'NAME'),
3267 ),
3285 ),
3268 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3286 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3269 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3287 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3270 ],
3288 ],
3271 b'REVSPEC',
3289 b'REVSPEC',
3272 )
3290 )
3273 def debugrevspec(ui, repo, expr, **opts):
3291 def debugrevspec(ui, repo, expr, **opts):
3274 """parse and apply a revision specification
3292 """parse and apply a revision specification
3275
3293
3276 Use -p/--show-stage option to print the parsed tree at the given stages.
3294 Use -p/--show-stage option to print the parsed tree at the given stages.
3277 Use -p all to print tree at every stage.
3295 Use -p all to print tree at every stage.
3278
3296
3279 Use --no-show-revs option with -s or -p to print only the set
3297 Use --no-show-revs option with -s or -p to print only the set
3280 representation or the parsed tree respectively.
3298 representation or the parsed tree respectively.
3281
3299
3282 Use --verify-optimized to compare the optimized result with the unoptimized
3300 Use --verify-optimized to compare the optimized result with the unoptimized
3283 one. Returns 1 if the optimized result differs.
3301 one. Returns 1 if the optimized result differs.
3284 """
3302 """
3285 opts = pycompat.byteskwargs(opts)
3303 opts = pycompat.byteskwargs(opts)
3286 aliases = ui.configitems(b'revsetalias')
3304 aliases = ui.configitems(b'revsetalias')
3287 stages = [
3305 stages = [
3288 (b'parsed', lambda tree: tree),
3306 (b'parsed', lambda tree: tree),
3289 (
3307 (
3290 b'expanded',
3308 b'expanded',
3291 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3309 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3292 ),
3310 ),
3293 (b'concatenated', revsetlang.foldconcat),
3311 (b'concatenated', revsetlang.foldconcat),
3294 (b'analyzed', revsetlang.analyze),
3312 (b'analyzed', revsetlang.analyze),
3295 (b'optimized', revsetlang.optimize),
3313 (b'optimized', revsetlang.optimize),
3296 ]
3314 ]
3297 if opts[b'no_optimized']:
3315 if opts[b'no_optimized']:
3298 stages = stages[:-1]
3316 stages = stages[:-1]
3299 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3317 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3300 raise error.Abort(
3318 raise error.Abort(
3301 _(b'cannot use --verify-optimized with --no-optimized')
3319 _(b'cannot use --verify-optimized with --no-optimized')
3302 )
3320 )
3303 stagenames = {n for n, f in stages}
3321 stagenames = {n for n, f in stages}
3304
3322
3305 showalways = set()
3323 showalways = set()
3306 showchanged = set()
3324 showchanged = set()
3307 if ui.verbose and not opts[b'show_stage']:
3325 if ui.verbose and not opts[b'show_stage']:
3308 # show parsed tree by --verbose (deprecated)
3326 # show parsed tree by --verbose (deprecated)
3309 showalways.add(b'parsed')
3327 showalways.add(b'parsed')
3310 showchanged.update([b'expanded', b'concatenated'])
3328 showchanged.update([b'expanded', b'concatenated'])
3311 if opts[b'optimize']:
3329 if opts[b'optimize']:
3312 showalways.add(b'optimized')
3330 showalways.add(b'optimized')
3313 if opts[b'show_stage'] and opts[b'optimize']:
3331 if opts[b'show_stage'] and opts[b'optimize']:
3314 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3332 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3315 if opts[b'show_stage'] == [b'all']:
3333 if opts[b'show_stage'] == [b'all']:
3316 showalways.update(stagenames)
3334 showalways.update(stagenames)
3317 else:
3335 else:
3318 for n in opts[b'show_stage']:
3336 for n in opts[b'show_stage']:
3319 if n not in stagenames:
3337 if n not in stagenames:
3320 raise error.Abort(_(b'invalid stage name: %s') % n)
3338 raise error.Abort(_(b'invalid stage name: %s') % n)
3321 showalways.update(opts[b'show_stage'])
3339 showalways.update(opts[b'show_stage'])
3322
3340
3323 treebystage = {}
3341 treebystage = {}
3324 printedtree = None
3342 printedtree = None
3325 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3343 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3326 for n, f in stages:
3344 for n, f in stages:
3327 treebystage[n] = tree = f(tree)
3345 treebystage[n] = tree = f(tree)
3328 if n in showalways or (n in showchanged and tree != printedtree):
3346 if n in showalways or (n in showchanged and tree != printedtree):
3329 if opts[b'show_stage'] or n != b'parsed':
3347 if opts[b'show_stage'] or n != b'parsed':
3330 ui.write(b"* %s:\n" % n)
3348 ui.write(b"* %s:\n" % n)
3331 ui.write(revsetlang.prettyformat(tree), b"\n")
3349 ui.write(revsetlang.prettyformat(tree), b"\n")
3332 printedtree = tree
3350 printedtree = tree
3333
3351
3334 if opts[b'verify_optimized']:
3352 if opts[b'verify_optimized']:
3335 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3353 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3336 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3354 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3337 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3355 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3338 ui.writenoi18n(
3356 ui.writenoi18n(
3339 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3357 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3340 )
3358 )
3341 ui.writenoi18n(
3359 ui.writenoi18n(
3342 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3360 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3343 )
3361 )
3344 arevs = list(arevs)
3362 arevs = list(arevs)
3345 brevs = list(brevs)
3363 brevs = list(brevs)
3346 if arevs == brevs:
3364 if arevs == brevs:
3347 return 0
3365 return 0
3348 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3366 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3349 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3367 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3350 sm = difflib.SequenceMatcher(None, arevs, brevs)
3368 sm = difflib.SequenceMatcher(None, arevs, brevs)
3351 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3369 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3352 if tag in ('delete', 'replace'):
3370 if tag in ('delete', 'replace'):
3353 for c in arevs[alo:ahi]:
3371 for c in arevs[alo:ahi]:
3354 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3372 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3355 if tag in ('insert', 'replace'):
3373 if tag in ('insert', 'replace'):
3356 for c in brevs[blo:bhi]:
3374 for c in brevs[blo:bhi]:
3357 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3375 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3358 if tag == 'equal':
3376 if tag == 'equal':
3359 for c in arevs[alo:ahi]:
3377 for c in arevs[alo:ahi]:
3360 ui.write(b' %d\n' % c)
3378 ui.write(b' %d\n' % c)
3361 return 1
3379 return 1
3362
3380
3363 func = revset.makematcher(tree)
3381 func = revset.makematcher(tree)
3364 revs = func(repo)
3382 revs = func(repo)
3365 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3383 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3366 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3384 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3367 if not opts[b'show_revs']:
3385 if not opts[b'show_revs']:
3368 return
3386 return
3369 for c in revs:
3387 for c in revs:
3370 ui.write(b"%d\n" % c)
3388 ui.write(b"%d\n" % c)
3371
3389
3372
3390
3373 @command(
3391 @command(
3374 b'debugserve',
3392 b'debugserve',
3375 [
3393 [
3376 (
3394 (
3377 b'',
3395 b'',
3378 b'sshstdio',
3396 b'sshstdio',
3379 False,
3397 False,
3380 _(b'run an SSH server bound to process handles'),
3398 _(b'run an SSH server bound to process handles'),
3381 ),
3399 ),
3382 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3400 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3383 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3401 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3384 ],
3402 ],
3385 b'',
3403 b'',
3386 )
3404 )
3387 def debugserve(ui, repo, **opts):
3405 def debugserve(ui, repo, **opts):
3388 """run a server with advanced settings
3406 """run a server with advanced settings
3389
3407
3390 This command is similar to :hg:`serve`. It exists partially as a
3408 This command is similar to :hg:`serve`. It exists partially as a
3391 workaround to the fact that ``hg serve --stdio`` must have specific
3409 workaround to the fact that ``hg serve --stdio`` must have specific
3392 arguments for security reasons.
3410 arguments for security reasons.
3393 """
3411 """
3394 opts = pycompat.byteskwargs(opts)
3412 opts = pycompat.byteskwargs(opts)
3395
3413
3396 if not opts[b'sshstdio']:
3414 if not opts[b'sshstdio']:
3397 raise error.Abort(_(b'only --sshstdio is currently supported'))
3415 raise error.Abort(_(b'only --sshstdio is currently supported'))
3398
3416
3399 logfh = None
3417 logfh = None
3400
3418
3401 if opts[b'logiofd'] and opts[b'logiofile']:
3419 if opts[b'logiofd'] and opts[b'logiofile']:
3402 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3420 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3403
3421
3404 if opts[b'logiofd']:
3422 if opts[b'logiofd']:
3405 # Ideally we would be line buffered. But line buffering in binary
3423 # Ideally we would be line buffered. But line buffering in binary
3406 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3424 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3407 # buffering could have performance impacts. But since this isn't
3425 # buffering could have performance impacts. But since this isn't
3408 # performance critical code, it should be fine.
3426 # performance critical code, it should be fine.
3409 try:
3427 try:
3410 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3428 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3411 except OSError as e:
3429 except OSError as e:
3412 if e.errno != errno.ESPIPE:
3430 if e.errno != errno.ESPIPE:
3413 raise
3431 raise
3414 # can't seek a pipe, so `ab` mode fails on py3
3432 # can't seek a pipe, so `ab` mode fails on py3
3415 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3433 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3416 elif opts[b'logiofile']:
3434 elif opts[b'logiofile']:
3417 logfh = open(opts[b'logiofile'], b'ab', 0)
3435 logfh = open(opts[b'logiofile'], b'ab', 0)
3418
3436
3419 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3437 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3420 s.serve_forever()
3438 s.serve_forever()
3421
3439
3422
3440
3423 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3441 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3424 def debugsetparents(ui, repo, rev1, rev2=None):
3442 def debugsetparents(ui, repo, rev1, rev2=None):
3425 """manually set the parents of the current working directory
3443 """manually set the parents of the current working directory
3426
3444
3427 This is useful for writing repository conversion tools, but should
3445 This is useful for writing repository conversion tools, but should
3428 be used with care. For example, neither the working directory nor the
3446 be used with care. For example, neither the working directory nor the
3429 dirstate is updated, so file status may be incorrect after running this
3447 dirstate is updated, so file status may be incorrect after running this
3430 command.
3448 command.
3431
3449
3432 Returns 0 on success.
3450 Returns 0 on success.
3433 """
3451 """
3434
3452
3435 node1 = scmutil.revsingle(repo, rev1).node()
3453 node1 = scmutil.revsingle(repo, rev1).node()
3436 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3454 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3437
3455
3438 with repo.wlock():
3456 with repo.wlock():
3439 repo.setparents(node1, node2)
3457 repo.setparents(node1, node2)
3440
3458
3441
3459
3442 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3460 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3443 def debugsidedata(ui, repo, file_, rev=None, **opts):
3461 def debugsidedata(ui, repo, file_, rev=None, **opts):
3444 """dump the side data for a cl/manifest/file revision
3462 """dump the side data for a cl/manifest/file revision
3445
3463
3446 Use --verbose to dump the sidedata content."""
3464 Use --verbose to dump the sidedata content."""
3447 opts = pycompat.byteskwargs(opts)
3465 opts = pycompat.byteskwargs(opts)
3448 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3466 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3449 if rev is not None:
3467 if rev is not None:
3450 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3468 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3451 file_, rev = None, file_
3469 file_, rev = None, file_
3452 elif rev is None:
3470 elif rev is None:
3453 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3471 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3454 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3472 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3455 r = getattr(r, '_revlog', r)
3473 r = getattr(r, '_revlog', r)
3456 try:
3474 try:
3457 sidedata = r.sidedata(r.lookup(rev))
3475 sidedata = r.sidedata(r.lookup(rev))
3458 except KeyError:
3476 except KeyError:
3459 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3477 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3460 if sidedata:
3478 if sidedata:
3461 sidedata = list(sidedata.items())
3479 sidedata = list(sidedata.items())
3462 sidedata.sort()
3480 sidedata.sort()
3463 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3481 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3464 for key, value in sidedata:
3482 for key, value in sidedata:
3465 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3483 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3466 if ui.verbose:
3484 if ui.verbose:
3467 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3485 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3468
3486
3469
3487
3470 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3488 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3471 def debugssl(ui, repo, source=None, **opts):
3489 def debugssl(ui, repo, source=None, **opts):
3472 """test a secure connection to a server
3490 """test a secure connection to a server
3473
3491
3474 This builds the certificate chain for the server on Windows, installing the
3492 This builds the certificate chain for the server on Windows, installing the
3475 missing intermediates and trusted root via Windows Update if necessary. It
3493 missing intermediates and trusted root via Windows Update if necessary. It
3476 does nothing on other platforms.
3494 does nothing on other platforms.
3477
3495
3478 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3496 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3479 that server is used. See :hg:`help urls` for more information.
3497 that server is used. See :hg:`help urls` for more information.
3480
3498
3481 If the update succeeds, retry the original operation. Otherwise, the cause
3499 If the update succeeds, retry the original operation. Otherwise, the cause
3482 of the SSL error is likely another issue.
3500 of the SSL error is likely another issue.
3483 """
3501 """
3484 if not pycompat.iswindows:
3502 if not pycompat.iswindows:
3485 raise error.Abort(
3503 raise error.Abort(
3486 _(b'certificate chain building is only possible on Windows')
3504 _(b'certificate chain building is only possible on Windows')
3487 )
3505 )
3488
3506
3489 if not source:
3507 if not source:
3490 if not repo:
3508 if not repo:
3491 raise error.Abort(
3509 raise error.Abort(
3492 _(
3510 _(
3493 b"there is no Mercurial repository here, and no "
3511 b"there is no Mercurial repository here, and no "
3494 b"server specified"
3512 b"server specified"
3495 )
3513 )
3496 )
3514 )
3497 source = b"default"
3515 source = b"default"
3498
3516
3499 source, branches = hg.parseurl(ui.expandpath(source))
3517 source, branches = hg.parseurl(ui.expandpath(source))
3500 url = util.url(source)
3518 url = util.url(source)
3501
3519
3502 defaultport = {b'https': 443, b'ssh': 22}
3520 defaultport = {b'https': 443, b'ssh': 22}
3503 if url.scheme in defaultport:
3521 if url.scheme in defaultport:
3504 try:
3522 try:
3505 addr = (url.host, int(url.port or defaultport[url.scheme]))
3523 addr = (url.host, int(url.port or defaultport[url.scheme]))
3506 except ValueError:
3524 except ValueError:
3507 raise error.Abort(_(b"malformed port number in URL"))
3525 raise error.Abort(_(b"malformed port number in URL"))
3508 else:
3526 else:
3509 raise error.Abort(_(b"only https and ssh connections are supported"))
3527 raise error.Abort(_(b"only https and ssh connections are supported"))
3510
3528
3511 from . import win32
3529 from . import win32
3512
3530
3513 s = ssl.wrap_socket(
3531 s = ssl.wrap_socket(
3514 socket.socket(),
3532 socket.socket(),
3515 ssl_version=ssl.PROTOCOL_TLS,
3533 ssl_version=ssl.PROTOCOL_TLS,
3516 cert_reqs=ssl.CERT_NONE,
3534 cert_reqs=ssl.CERT_NONE,
3517 ca_certs=None,
3535 ca_certs=None,
3518 )
3536 )
3519
3537
3520 try:
3538 try:
3521 s.connect(addr)
3539 s.connect(addr)
3522 cert = s.getpeercert(True)
3540 cert = s.getpeercert(True)
3523
3541
3524 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3542 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3525
3543
3526 complete = win32.checkcertificatechain(cert, build=False)
3544 complete = win32.checkcertificatechain(cert, build=False)
3527
3545
3528 if not complete:
3546 if not complete:
3529 ui.status(_(b'certificate chain is incomplete, updating... '))
3547 ui.status(_(b'certificate chain is incomplete, updating... '))
3530
3548
3531 if not win32.checkcertificatechain(cert):
3549 if not win32.checkcertificatechain(cert):
3532 ui.status(_(b'failed.\n'))
3550 ui.status(_(b'failed.\n'))
3533 else:
3551 else:
3534 ui.status(_(b'done.\n'))
3552 ui.status(_(b'done.\n'))
3535 else:
3553 else:
3536 ui.status(_(b'full certificate chain is available\n'))
3554 ui.status(_(b'full certificate chain is available\n'))
3537 finally:
3555 finally:
3538 s.close()
3556 s.close()
3539
3557
3540
3558
3541 @command(
3559 @command(
3542 b"debugbackupbundle",
3560 b"debugbackupbundle",
3543 [
3561 [
3544 (
3562 (
3545 b"",
3563 b"",
3546 b"recover",
3564 b"recover",
3547 b"",
3565 b"",
3548 b"brings the specified changeset back into the repository",
3566 b"brings the specified changeset back into the repository",
3549 )
3567 )
3550 ]
3568 ]
3551 + cmdutil.logopts,
3569 + cmdutil.logopts,
3552 _(b"hg debugbackupbundle [--recover HASH]"),
3570 _(b"hg debugbackupbundle [--recover HASH]"),
3553 )
3571 )
3554 def debugbackupbundle(ui, repo, *pats, **opts):
3572 def debugbackupbundle(ui, repo, *pats, **opts):
3555 """lists the changesets available in backup bundles
3573 """lists the changesets available in backup bundles
3556
3574
3557 Without any arguments, this command prints a list of the changesets in each
3575 Without any arguments, this command prints a list of the changesets in each
3558 backup bundle.
3576 backup bundle.
3559
3577
3560 --recover takes a changeset hash and unbundles the first bundle that
3578 --recover takes a changeset hash and unbundles the first bundle that
3561 contains that hash, which puts that changeset back in your repository.
3579 contains that hash, which puts that changeset back in your repository.
3562
3580
3563 --verbose will print the entire commit message and the bundle path for that
3581 --verbose will print the entire commit message and the bundle path for that
3564 backup.
3582 backup.
3565 """
3583 """
3566 backups = list(
3584 backups = list(
3567 filter(
3585 filter(
3568 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3586 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3569 )
3587 )
3570 )
3588 )
3571 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3589 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3572
3590
3573 opts = pycompat.byteskwargs(opts)
3591 opts = pycompat.byteskwargs(opts)
3574 opts[b"bundle"] = b""
3592 opts[b"bundle"] = b""
3575 opts[b"force"] = None
3593 opts[b"force"] = None
3576 limit = logcmdutil.getlimit(opts)
3594 limit = logcmdutil.getlimit(opts)
3577
3595
3578 def display(other, chlist, displayer):
3596 def display(other, chlist, displayer):
3579 if opts.get(b"newest_first"):
3597 if opts.get(b"newest_first"):
3580 chlist.reverse()
3598 chlist.reverse()
3581 count = 0
3599 count = 0
3582 for n in chlist:
3600 for n in chlist:
3583 if limit is not None and count >= limit:
3601 if limit is not None and count >= limit:
3584 break
3602 break
3585 parents = [True for p in other.changelog.parents(n) if p != nullid]
3603 parents = [True for p in other.changelog.parents(n) if p != nullid]
3586 if opts.get(b"no_merges") and len(parents) == 2:
3604 if opts.get(b"no_merges") and len(parents) == 2:
3587 continue
3605 continue
3588 count += 1
3606 count += 1
3589 displayer.show(other[n])
3607 displayer.show(other[n])
3590
3608
3591 recovernode = opts.get(b"recover")
3609 recovernode = opts.get(b"recover")
3592 if recovernode:
3610 if recovernode:
3593 if scmutil.isrevsymbol(repo, recovernode):
3611 if scmutil.isrevsymbol(repo, recovernode):
3594 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3612 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3595 return
3613 return
3596 elif backups:
3614 elif backups:
3597 msg = _(
3615 msg = _(
3598 b"Recover changesets using: hg debugbackupbundle --recover "
3616 b"Recover changesets using: hg debugbackupbundle --recover "
3599 b"<changeset hash>\n\nAvailable backup changesets:"
3617 b"<changeset hash>\n\nAvailable backup changesets:"
3600 )
3618 )
3601 ui.status(msg, label=b"status.removed")
3619 ui.status(msg, label=b"status.removed")
3602 else:
3620 else:
3603 ui.status(_(b"no backup changesets found\n"))
3621 ui.status(_(b"no backup changesets found\n"))
3604 return
3622 return
3605
3623
3606 for backup in backups:
3624 for backup in backups:
3607 # Much of this is copied from the hg incoming logic
3625 # Much of this is copied from the hg incoming logic
3608 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3626 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3609 source, branches = hg.parseurl(source, opts.get(b"branch"))
3627 source, branches = hg.parseurl(source, opts.get(b"branch"))
3610 try:
3628 try:
3611 other = hg.peer(repo, opts, source)
3629 other = hg.peer(repo, opts, source)
3612 except error.LookupError as ex:
3630 except error.LookupError as ex:
3613 msg = _(b"\nwarning: unable to open bundle %s") % source
3631 msg = _(b"\nwarning: unable to open bundle %s") % source
3614 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3632 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3615 ui.warn(msg, hint=hint)
3633 ui.warn(msg, hint=hint)
3616 continue
3634 continue
3617 revs, checkout = hg.addbranchrevs(
3635 revs, checkout = hg.addbranchrevs(
3618 repo, other, branches, opts.get(b"rev")
3636 repo, other, branches, opts.get(b"rev")
3619 )
3637 )
3620
3638
3621 if revs:
3639 if revs:
3622 revs = [other.lookup(rev) for rev in revs]
3640 revs = [other.lookup(rev) for rev in revs]
3623
3641
3624 quiet = ui.quiet
3642 quiet = ui.quiet
3625 try:
3643 try:
3626 ui.quiet = True
3644 ui.quiet = True
3627 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3645 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3628 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3646 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3629 )
3647 )
3630 except error.LookupError:
3648 except error.LookupError:
3631 continue
3649 continue
3632 finally:
3650 finally:
3633 ui.quiet = quiet
3651 ui.quiet = quiet
3634
3652
3635 try:
3653 try:
3636 if not chlist:
3654 if not chlist:
3637 continue
3655 continue
3638 if recovernode:
3656 if recovernode:
3639 with repo.lock(), repo.transaction(b"unbundle") as tr:
3657 with repo.lock(), repo.transaction(b"unbundle") as tr:
3640 if scmutil.isrevsymbol(other, recovernode):
3658 if scmutil.isrevsymbol(other, recovernode):
3641 ui.status(_(b"Unbundling %s\n") % (recovernode))
3659 ui.status(_(b"Unbundling %s\n") % (recovernode))
3642 f = hg.openpath(ui, source)
3660 f = hg.openpath(ui, source)
3643 gen = exchange.readbundle(ui, f, source)
3661 gen = exchange.readbundle(ui, f, source)
3644 if isinstance(gen, bundle2.unbundle20):
3662 if isinstance(gen, bundle2.unbundle20):
3645 bundle2.applybundle(
3663 bundle2.applybundle(
3646 repo,
3664 repo,
3647 gen,
3665 gen,
3648 tr,
3666 tr,
3649 source=b"unbundle",
3667 source=b"unbundle",
3650 url=b"bundle:" + source,
3668 url=b"bundle:" + source,
3651 )
3669 )
3652 else:
3670 else:
3653 gen.apply(repo, b"unbundle", b"bundle:" + source)
3671 gen.apply(repo, b"unbundle", b"bundle:" + source)
3654 break
3672 break
3655 else:
3673 else:
3656 backupdate = encoding.strtolocal(
3674 backupdate = encoding.strtolocal(
3657 time.strftime(
3675 time.strftime(
3658 "%a %H:%M, %Y-%m-%d",
3676 "%a %H:%M, %Y-%m-%d",
3659 time.localtime(os.path.getmtime(source)),
3677 time.localtime(os.path.getmtime(source)),
3660 )
3678 )
3661 )
3679 )
3662 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3680 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3663 if ui.verbose:
3681 if ui.verbose:
3664 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3682 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3665 else:
3683 else:
3666 opts[
3684 opts[
3667 b"template"
3685 b"template"
3668 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3686 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3669 displayer = logcmdutil.changesetdisplayer(
3687 displayer = logcmdutil.changesetdisplayer(
3670 ui, other, opts, False
3688 ui, other, opts, False
3671 )
3689 )
3672 display(other, chlist, displayer)
3690 display(other, chlist, displayer)
3673 displayer.close()
3691 displayer.close()
3674 finally:
3692 finally:
3675 cleanupfn()
3693 cleanupfn()
3676
3694
3677
3695
3678 @command(
3696 @command(
3679 b'debugsub',
3697 b'debugsub',
3680 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3698 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3681 _(b'[-r REV] [REV]'),
3699 _(b'[-r REV] [REV]'),
3682 )
3700 )
3683 def debugsub(ui, repo, rev=None):
3701 def debugsub(ui, repo, rev=None):
3684 ctx = scmutil.revsingle(repo, rev, None)
3702 ctx = scmutil.revsingle(repo, rev, None)
3685 for k, v in sorted(ctx.substate.items()):
3703 for k, v in sorted(ctx.substate.items()):
3686 ui.writenoi18n(b'path %s\n' % k)
3704 ui.writenoi18n(b'path %s\n' % k)
3687 ui.writenoi18n(b' source %s\n' % v[0])
3705 ui.writenoi18n(b' source %s\n' % v[0])
3688 ui.writenoi18n(b' revision %s\n' % v[1])
3706 ui.writenoi18n(b' revision %s\n' % v[1])
3689
3707
3690
3708
3691 @command(
3709 @command(
3692 b'debugsuccessorssets',
3710 b'debugsuccessorssets',
3693 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3711 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3694 _(b'[REV]'),
3712 _(b'[REV]'),
3695 )
3713 )
3696 def debugsuccessorssets(ui, repo, *revs, **opts):
3714 def debugsuccessorssets(ui, repo, *revs, **opts):
3697 """show set of successors for revision
3715 """show set of successors for revision
3698
3716
3699 A successors set of changeset A is a consistent group of revisions that
3717 A successors set of changeset A is a consistent group of revisions that
3700 succeed A. It contains non-obsolete changesets only unless closests
3718 succeed A. It contains non-obsolete changesets only unless closests
3701 successors set is set.
3719 successors set is set.
3702
3720
3703 In most cases a changeset A has a single successors set containing a single
3721 In most cases a changeset A has a single successors set containing a single
3704 successor (changeset A replaced by A').
3722 successor (changeset A replaced by A').
3705
3723
3706 A changeset that is made obsolete with no successors are called "pruned".
3724 A changeset that is made obsolete with no successors are called "pruned".
3707 Such changesets have no successors sets at all.
3725 Such changesets have no successors sets at all.
3708
3726
3709 A changeset that has been "split" will have a successors set containing
3727 A changeset that has been "split" will have a successors set containing
3710 more than one successor.
3728 more than one successor.
3711
3729
3712 A changeset that has been rewritten in multiple different ways is called
3730 A changeset that has been rewritten in multiple different ways is called
3713 "divergent". Such changesets have multiple successor sets (each of which
3731 "divergent". Such changesets have multiple successor sets (each of which
3714 may also be split, i.e. have multiple successors).
3732 may also be split, i.e. have multiple successors).
3715
3733
3716 Results are displayed as follows::
3734 Results are displayed as follows::
3717
3735
3718 <rev1>
3736 <rev1>
3719 <successors-1A>
3737 <successors-1A>
3720 <rev2>
3738 <rev2>
3721 <successors-2A>
3739 <successors-2A>
3722 <successors-2B1> <successors-2B2> <successors-2B3>
3740 <successors-2B1> <successors-2B2> <successors-2B3>
3723
3741
3724 Here rev2 has two possible (i.e. divergent) successors sets. The first
3742 Here rev2 has two possible (i.e. divergent) successors sets. The first
3725 holds one element, whereas the second holds three (i.e. the changeset has
3743 holds one element, whereas the second holds three (i.e. the changeset has
3726 been split).
3744 been split).
3727 """
3745 """
3728 # passed to successorssets caching computation from one call to another
3746 # passed to successorssets caching computation from one call to another
3729 cache = {}
3747 cache = {}
3730 ctx2str = bytes
3748 ctx2str = bytes
3731 node2str = short
3749 node2str = short
3732 for rev in scmutil.revrange(repo, revs):
3750 for rev in scmutil.revrange(repo, revs):
3733 ctx = repo[rev]
3751 ctx = repo[rev]
3734 ui.write(b'%s\n' % ctx2str(ctx))
3752 ui.write(b'%s\n' % ctx2str(ctx))
3735 for succsset in obsutil.successorssets(
3753 for succsset in obsutil.successorssets(
3736 repo, ctx.node(), closest=opts['closest'], cache=cache
3754 repo, ctx.node(), closest=opts['closest'], cache=cache
3737 ):
3755 ):
3738 if succsset:
3756 if succsset:
3739 ui.write(b' ')
3757 ui.write(b' ')
3740 ui.write(node2str(succsset[0]))
3758 ui.write(node2str(succsset[0]))
3741 for node in succsset[1:]:
3759 for node in succsset[1:]:
3742 ui.write(b' ')
3760 ui.write(b' ')
3743 ui.write(node2str(node))
3761 ui.write(node2str(node))
3744 ui.write(b'\n')
3762 ui.write(b'\n')
3745
3763
3746
3764
3747 @command(b'debugtagscache', [])
3765 @command(b'debugtagscache', [])
3748 def debugtagscache(ui, repo):
3766 def debugtagscache(ui, repo):
3749 """display the contents of .hg/cache/hgtagsfnodes1"""
3767 """display the contents of .hg/cache/hgtagsfnodes1"""
3750 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3768 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3751 for r in repo:
3769 for r in repo:
3752 node = repo[r].node()
3770 node = repo[r].node()
3753 tagsnode = cache.getfnode(node, computemissing=False)
3771 tagsnode = cache.getfnode(node, computemissing=False)
3754 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3772 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3755 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3773 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3756
3774
3757
3775
3758 @command(
3776 @command(
3759 b'debugtemplate',
3777 b'debugtemplate',
3760 [
3778 [
3761 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3779 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3762 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3780 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3763 ],
3781 ],
3764 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3782 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3765 optionalrepo=True,
3783 optionalrepo=True,
3766 )
3784 )
3767 def debugtemplate(ui, repo, tmpl, **opts):
3785 def debugtemplate(ui, repo, tmpl, **opts):
3768 """parse and apply a template
3786 """parse and apply a template
3769
3787
3770 If -r/--rev is given, the template is processed as a log template and
3788 If -r/--rev is given, the template is processed as a log template and
3771 applied to the given changesets. Otherwise, it is processed as a generic
3789 applied to the given changesets. Otherwise, it is processed as a generic
3772 template.
3790 template.
3773
3791
3774 Use --verbose to print the parsed tree.
3792 Use --verbose to print the parsed tree.
3775 """
3793 """
3776 revs = None
3794 revs = None
3777 if opts['rev']:
3795 if opts['rev']:
3778 if repo is None:
3796 if repo is None:
3779 raise error.RepoError(
3797 raise error.RepoError(
3780 _(b'there is no Mercurial repository here (.hg not found)')
3798 _(b'there is no Mercurial repository here (.hg not found)')
3781 )
3799 )
3782 revs = scmutil.revrange(repo, opts['rev'])
3800 revs = scmutil.revrange(repo, opts['rev'])
3783
3801
3784 props = {}
3802 props = {}
3785 for d in opts['define']:
3803 for d in opts['define']:
3786 try:
3804 try:
3787 k, v = (e.strip() for e in d.split(b'=', 1))
3805 k, v = (e.strip() for e in d.split(b'=', 1))
3788 if not k or k == b'ui':
3806 if not k or k == b'ui':
3789 raise ValueError
3807 raise ValueError
3790 props[k] = v
3808 props[k] = v
3791 except ValueError:
3809 except ValueError:
3792 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3810 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3793
3811
3794 if ui.verbose:
3812 if ui.verbose:
3795 aliases = ui.configitems(b'templatealias')
3813 aliases = ui.configitems(b'templatealias')
3796 tree = templater.parse(tmpl)
3814 tree = templater.parse(tmpl)
3797 ui.note(templater.prettyformat(tree), b'\n')
3815 ui.note(templater.prettyformat(tree), b'\n')
3798 newtree = templater.expandaliases(tree, aliases)
3816 newtree = templater.expandaliases(tree, aliases)
3799 if newtree != tree:
3817 if newtree != tree:
3800 ui.notenoi18n(
3818 ui.notenoi18n(
3801 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3819 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3802 )
3820 )
3803
3821
3804 if revs is None:
3822 if revs is None:
3805 tres = formatter.templateresources(ui, repo)
3823 tres = formatter.templateresources(ui, repo)
3806 t = formatter.maketemplater(ui, tmpl, resources=tres)
3824 t = formatter.maketemplater(ui, tmpl, resources=tres)
3807 if ui.verbose:
3825 if ui.verbose:
3808 kwds, funcs = t.symbolsuseddefault()
3826 kwds, funcs = t.symbolsuseddefault()
3809 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3827 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3810 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3828 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3811 ui.write(t.renderdefault(props))
3829 ui.write(t.renderdefault(props))
3812 else:
3830 else:
3813 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3831 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3814 if ui.verbose:
3832 if ui.verbose:
3815 kwds, funcs = displayer.t.symbolsuseddefault()
3833 kwds, funcs = displayer.t.symbolsuseddefault()
3816 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3834 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3817 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3835 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3818 for r in revs:
3836 for r in revs:
3819 displayer.show(repo[r], **pycompat.strkwargs(props))
3837 displayer.show(repo[r], **pycompat.strkwargs(props))
3820 displayer.close()
3838 displayer.close()
3821
3839
3822
3840
3823 @command(
3841 @command(
3824 b'debuguigetpass',
3842 b'debuguigetpass',
3825 [
3843 [
3826 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3844 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3827 ],
3845 ],
3828 _(b'[-p TEXT]'),
3846 _(b'[-p TEXT]'),
3829 norepo=True,
3847 norepo=True,
3830 )
3848 )
3831 def debuguigetpass(ui, prompt=b''):
3849 def debuguigetpass(ui, prompt=b''):
3832 """show prompt to type password"""
3850 """show prompt to type password"""
3833 r = ui.getpass(prompt)
3851 r = ui.getpass(prompt)
3834 if r is not None:
3852 if r is not None:
3835 r = encoding.strtolocal(r)
3853 r = encoding.strtolocal(r)
3836 else:
3854 else:
3837 r = b"<default response>"
3855 r = b"<default response>"
3838 ui.writenoi18n(b'response: %s\n' % r)
3856 ui.writenoi18n(b'response: %s\n' % r)
3839
3857
3840
3858
3841 @command(
3859 @command(
3842 b'debuguiprompt',
3860 b'debuguiprompt',
3843 [
3861 [
3844 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3862 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3845 ],
3863 ],
3846 _(b'[-p TEXT]'),
3864 _(b'[-p TEXT]'),
3847 norepo=True,
3865 norepo=True,
3848 )
3866 )
3849 def debuguiprompt(ui, prompt=b''):
3867 def debuguiprompt(ui, prompt=b''):
3850 """show plain prompt"""
3868 """show plain prompt"""
3851 r = ui.prompt(prompt)
3869 r = ui.prompt(prompt)
3852 ui.writenoi18n(b'response: %s\n' % r)
3870 ui.writenoi18n(b'response: %s\n' % r)
3853
3871
3854
3872
3855 @command(b'debugupdatecaches', [])
3873 @command(b'debugupdatecaches', [])
3856 def debugupdatecaches(ui, repo, *pats, **opts):
3874 def debugupdatecaches(ui, repo, *pats, **opts):
3857 """warm all known caches in the repository"""
3875 """warm all known caches in the repository"""
3858 with repo.wlock(), repo.lock():
3876 with repo.wlock(), repo.lock():
3859 repo.updatecaches(full=True)
3877 repo.updatecaches(full=True)
3860
3878
3861
3879
3862 @command(
3880 @command(
3863 b'debugupgraderepo',
3881 b'debugupgraderepo',
3864 [
3882 [
3865 (
3883 (
3866 b'o',
3884 b'o',
3867 b'optimize',
3885 b'optimize',
3868 [],
3886 [],
3869 _(b'extra optimization to perform'),
3887 _(b'extra optimization to perform'),
3870 _(b'NAME'),
3888 _(b'NAME'),
3871 ),
3889 ),
3872 (b'', b'run', False, _(b'performs an upgrade')),
3890 (b'', b'run', False, _(b'performs an upgrade')),
3873 (b'', b'backup', True, _(b'keep the old repository content around')),
3891 (b'', b'backup', True, _(b'keep the old repository content around')),
3874 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3892 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3875 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3893 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3876 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3894 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3877 ],
3895 ],
3878 )
3896 )
3879 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3897 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3880 """upgrade a repository to use different features
3898 """upgrade a repository to use different features
3881
3899
3882 If no arguments are specified, the repository is evaluated for upgrade
3900 If no arguments are specified, the repository is evaluated for upgrade
3883 and a list of problems and potential optimizations is printed.
3901 and a list of problems and potential optimizations is printed.
3884
3902
3885 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3903 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3886 can be influenced via additional arguments. More details will be provided
3904 can be influenced via additional arguments. More details will be provided
3887 by the command output when run without ``--run``.
3905 by the command output when run without ``--run``.
3888
3906
3889 During the upgrade, the repository will be locked and no writes will be
3907 During the upgrade, the repository will be locked and no writes will be
3890 allowed.
3908 allowed.
3891
3909
3892 At the end of the upgrade, the repository may not be readable while new
3910 At the end of the upgrade, the repository may not be readable while new
3893 repository data is swapped in. This window will be as long as it takes to
3911 repository data is swapped in. This window will be as long as it takes to
3894 rename some directories inside the ``.hg`` directory. On most machines, this
3912 rename some directories inside the ``.hg`` directory. On most machines, this
3895 should complete almost instantaneously and the chances of a consumer being
3913 should complete almost instantaneously and the chances of a consumer being
3896 unable to access the repository should be low.
3914 unable to access the repository should be low.
3897
3915
3898 By default, all revlog will be upgraded. You can restrict this using flag
3916 By default, all revlog will be upgraded. You can restrict this using flag
3899 such as `--manifest`:
3917 such as `--manifest`:
3900
3918
3901 * `--manifest`: only optimize the manifest
3919 * `--manifest`: only optimize the manifest
3902 * `--no-manifest`: optimize all revlog but the manifest
3920 * `--no-manifest`: optimize all revlog but the manifest
3903 * `--changelog`: optimize the changelog only
3921 * `--changelog`: optimize the changelog only
3904 * `--no-changelog --no-manifest`: optimize filelogs only
3922 * `--no-changelog --no-manifest`: optimize filelogs only
3905 * `--filelogs`: optimize the filelogs only
3923 * `--filelogs`: optimize the filelogs only
3906 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3924 * `--no-changelog --no-manifest --no-filelogs`: skip all filelog optimisation
3907 """
3925 """
3908 return upgrade.upgraderepo(
3926 return upgrade.upgraderepo(
3909 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3927 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3910 )
3928 )
3911
3929
3912
3930
3913 @command(
3931 @command(
3914 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3932 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3915 )
3933 )
3916 def debugwalk(ui, repo, *pats, **opts):
3934 def debugwalk(ui, repo, *pats, **opts):
3917 """show how files match on given patterns"""
3935 """show how files match on given patterns"""
3918 opts = pycompat.byteskwargs(opts)
3936 opts = pycompat.byteskwargs(opts)
3919 m = scmutil.match(repo[None], pats, opts)
3937 m = scmutil.match(repo[None], pats, opts)
3920 if ui.verbose:
3938 if ui.verbose:
3921 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3939 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3922 items = list(repo[None].walk(m))
3940 items = list(repo[None].walk(m))
3923 if not items:
3941 if not items:
3924 return
3942 return
3925 f = lambda fn: fn
3943 f = lambda fn: fn
3926 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3944 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3927 f = lambda fn: util.normpath(fn)
3945 f = lambda fn: util.normpath(fn)
3928 fmt = b'f %%-%ds %%-%ds %%s' % (
3946 fmt = b'f %%-%ds %%-%ds %%s' % (
3929 max([len(abs) for abs in items]),
3947 max([len(abs) for abs in items]),
3930 max([len(repo.pathto(abs)) for abs in items]),
3948 max([len(repo.pathto(abs)) for abs in items]),
3931 )
3949 )
3932 for abs in items:
3950 for abs in items:
3933 line = fmt % (
3951 line = fmt % (
3934 abs,
3952 abs,
3935 f(repo.pathto(abs)),
3953 f(repo.pathto(abs)),
3936 m.exact(abs) and b'exact' or b'',
3954 m.exact(abs) and b'exact' or b'',
3937 )
3955 )
3938 ui.write(b"%s\n" % line.rstrip())
3956 ui.write(b"%s\n" % line.rstrip())
3939
3957
3940
3958
3941 @command(b'debugwhyunstable', [], _(b'REV'))
3959 @command(b'debugwhyunstable', [], _(b'REV'))
3942 def debugwhyunstable(ui, repo, rev):
3960 def debugwhyunstable(ui, repo, rev):
3943 """explain instabilities of a changeset"""
3961 """explain instabilities of a changeset"""
3944 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3962 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3945 dnodes = b''
3963 dnodes = b''
3946 if entry.get(b'divergentnodes'):
3964 if entry.get(b'divergentnodes'):
3947 dnodes = (
3965 dnodes = (
3948 b' '.join(
3966 b' '.join(
3949 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3967 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3950 for ctx in entry[b'divergentnodes']
3968 for ctx in entry[b'divergentnodes']
3951 )
3969 )
3952 + b' '
3970 + b' '
3953 )
3971 )
3954 ui.write(
3972 ui.write(
3955 b'%s: %s%s %s\n'
3973 b'%s: %s%s %s\n'
3956 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3974 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3957 )
3975 )
3958
3976
3959
3977
3960 @command(
3978 @command(
3961 b'debugwireargs',
3979 b'debugwireargs',
3962 [
3980 [
3963 (b'', b'three', b'', b'three'),
3981 (b'', b'three', b'', b'three'),
3964 (b'', b'four', b'', b'four'),
3982 (b'', b'four', b'', b'four'),
3965 (b'', b'five', b'', b'five'),
3983 (b'', b'five', b'', b'five'),
3966 ]
3984 ]
3967 + cmdutil.remoteopts,
3985 + cmdutil.remoteopts,
3968 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3986 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3969 norepo=True,
3987 norepo=True,
3970 )
3988 )
3971 def debugwireargs(ui, repopath, *vals, **opts):
3989 def debugwireargs(ui, repopath, *vals, **opts):
3972 opts = pycompat.byteskwargs(opts)
3990 opts = pycompat.byteskwargs(opts)
3973 repo = hg.peer(ui, opts, repopath)
3991 repo = hg.peer(ui, opts, repopath)
3974 for opt in cmdutil.remoteopts:
3992 for opt in cmdutil.remoteopts:
3975 del opts[opt[1]]
3993 del opts[opt[1]]
3976 args = {}
3994 args = {}
3977 for k, v in pycompat.iteritems(opts):
3995 for k, v in pycompat.iteritems(opts):
3978 if v:
3996 if v:
3979 args[k] = v
3997 args[k] = v
3980 args = pycompat.strkwargs(args)
3998 args = pycompat.strkwargs(args)
3981 # run twice to check that we don't mess up the stream for the next command
3999 # run twice to check that we don't mess up the stream for the next command
3982 res1 = repo.debugwireargs(*vals, **args)
4000 res1 = repo.debugwireargs(*vals, **args)
3983 res2 = repo.debugwireargs(*vals, **args)
4001 res2 = repo.debugwireargs(*vals, **args)
3984 ui.write(b"%s\n" % res1)
4002 ui.write(b"%s\n" % res1)
3985 if res1 != res2:
4003 if res1 != res2:
3986 ui.warn(b"%s\n" % res2)
4004 ui.warn(b"%s\n" % res2)
3987
4005
3988
4006
3989 def _parsewirelangblocks(fh):
4007 def _parsewirelangblocks(fh):
3990 activeaction = None
4008 activeaction = None
3991 blocklines = []
4009 blocklines = []
3992 lastindent = 0
4010 lastindent = 0
3993
4011
3994 for line in fh:
4012 for line in fh:
3995 line = line.rstrip()
4013 line = line.rstrip()
3996 if not line:
4014 if not line:
3997 continue
4015 continue
3998
4016
3999 if line.startswith(b'#'):
4017 if line.startswith(b'#'):
4000 continue
4018 continue
4001
4019
4002 if not line.startswith(b' '):
4020 if not line.startswith(b' '):
4003 # New block. Flush previous one.
4021 # New block. Flush previous one.
4004 if activeaction:
4022 if activeaction:
4005 yield activeaction, blocklines
4023 yield activeaction, blocklines
4006
4024
4007 activeaction = line
4025 activeaction = line
4008 blocklines = []
4026 blocklines = []
4009 lastindent = 0
4027 lastindent = 0
4010 continue
4028 continue
4011
4029
4012 # Else we start with an indent.
4030 # Else we start with an indent.
4013
4031
4014 if not activeaction:
4032 if not activeaction:
4015 raise error.Abort(_(b'indented line outside of block'))
4033 raise error.Abort(_(b'indented line outside of block'))
4016
4034
4017 indent = len(line) - len(line.lstrip())
4035 indent = len(line) - len(line.lstrip())
4018
4036
4019 # If this line is indented more than the last line, concatenate it.
4037 # If this line is indented more than the last line, concatenate it.
4020 if indent > lastindent and blocklines:
4038 if indent > lastindent and blocklines:
4021 blocklines[-1] += line.lstrip()
4039 blocklines[-1] += line.lstrip()
4022 else:
4040 else:
4023 blocklines.append(line)
4041 blocklines.append(line)
4024 lastindent = indent
4042 lastindent = indent
4025
4043
4026 # Flush last block.
4044 # Flush last block.
4027 if activeaction:
4045 if activeaction:
4028 yield activeaction, blocklines
4046 yield activeaction, blocklines
4029
4047
4030
4048
4031 @command(
4049 @command(
4032 b'debugwireproto',
4050 b'debugwireproto',
4033 [
4051 [
4034 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4052 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4035 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4053 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4036 (
4054 (
4037 b'',
4055 b'',
4038 b'noreadstderr',
4056 b'noreadstderr',
4039 False,
4057 False,
4040 _(b'do not read from stderr of the remote'),
4058 _(b'do not read from stderr of the remote'),
4041 ),
4059 ),
4042 (
4060 (
4043 b'',
4061 b'',
4044 b'nologhandshake',
4062 b'nologhandshake',
4045 False,
4063 False,
4046 _(b'do not log I/O related to the peer handshake'),
4064 _(b'do not log I/O related to the peer handshake'),
4047 ),
4065 ),
4048 ]
4066 ]
4049 + cmdutil.remoteopts,
4067 + cmdutil.remoteopts,
4050 _(b'[PATH]'),
4068 _(b'[PATH]'),
4051 optionalrepo=True,
4069 optionalrepo=True,
4052 )
4070 )
4053 def debugwireproto(ui, repo, path=None, **opts):
4071 def debugwireproto(ui, repo, path=None, **opts):
4054 """send wire protocol commands to a server
4072 """send wire protocol commands to a server
4055
4073
4056 This command can be used to issue wire protocol commands to remote
4074 This command can be used to issue wire protocol commands to remote
4057 peers and to debug the raw data being exchanged.
4075 peers and to debug the raw data being exchanged.
4058
4076
4059 ``--localssh`` will start an SSH server against the current repository
4077 ``--localssh`` will start an SSH server against the current repository
4060 and connect to that. By default, the connection will perform a handshake
4078 and connect to that. By default, the connection will perform a handshake
4061 and establish an appropriate peer instance.
4079 and establish an appropriate peer instance.
4062
4080
4063 ``--peer`` can be used to bypass the handshake protocol and construct a
4081 ``--peer`` can be used to bypass the handshake protocol and construct a
4064 peer instance using the specified class type. Valid values are ``raw``,
4082 peer instance using the specified class type. Valid values are ``raw``,
4065 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4083 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4066 raw data payloads and don't support higher-level command actions.
4084 raw data payloads and don't support higher-level command actions.
4067
4085
4068 ``--noreadstderr`` can be used to disable automatic reading from stderr
4086 ``--noreadstderr`` can be used to disable automatic reading from stderr
4069 of the peer (for SSH connections only). Disabling automatic reading of
4087 of the peer (for SSH connections only). Disabling automatic reading of
4070 stderr is useful for making output more deterministic.
4088 stderr is useful for making output more deterministic.
4071
4089
4072 Commands are issued via a mini language which is specified via stdin.
4090 Commands are issued via a mini language which is specified via stdin.
4073 The language consists of individual actions to perform. An action is
4091 The language consists of individual actions to perform. An action is
4074 defined by a block. A block is defined as a line with no leading
4092 defined by a block. A block is defined as a line with no leading
4075 space followed by 0 or more lines with leading space. Blocks are
4093 space followed by 0 or more lines with leading space. Blocks are
4076 effectively a high-level command with additional metadata.
4094 effectively a high-level command with additional metadata.
4077
4095
4078 Lines beginning with ``#`` are ignored.
4096 Lines beginning with ``#`` are ignored.
4079
4097
4080 The following sections denote available actions.
4098 The following sections denote available actions.
4081
4099
4082 raw
4100 raw
4083 ---
4101 ---
4084
4102
4085 Send raw data to the server.
4103 Send raw data to the server.
4086
4104
4087 The block payload contains the raw data to send as one atomic send
4105 The block payload contains the raw data to send as one atomic send
4088 operation. The data may not actually be delivered in a single system
4106 operation. The data may not actually be delivered in a single system
4089 call: it depends on the abilities of the transport being used.
4107 call: it depends on the abilities of the transport being used.
4090
4108
4091 Each line in the block is de-indented and concatenated. Then, that
4109 Each line in the block is de-indented and concatenated. Then, that
4092 value is evaluated as a Python b'' literal. This allows the use of
4110 value is evaluated as a Python b'' literal. This allows the use of
4093 backslash escaping, etc.
4111 backslash escaping, etc.
4094
4112
4095 raw+
4113 raw+
4096 ----
4114 ----
4097
4115
4098 Behaves like ``raw`` except flushes output afterwards.
4116 Behaves like ``raw`` except flushes output afterwards.
4099
4117
4100 command <X>
4118 command <X>
4101 -----------
4119 -----------
4102
4120
4103 Send a request to run a named command, whose name follows the ``command``
4121 Send a request to run a named command, whose name follows the ``command``
4104 string.
4122 string.
4105
4123
4106 Arguments to the command are defined as lines in this block. The format of
4124 Arguments to the command are defined as lines in this block. The format of
4107 each line is ``<key> <value>``. e.g.::
4125 each line is ``<key> <value>``. e.g.::
4108
4126
4109 command listkeys
4127 command listkeys
4110 namespace bookmarks
4128 namespace bookmarks
4111
4129
4112 If the value begins with ``eval:``, it will be interpreted as a Python
4130 If the value begins with ``eval:``, it will be interpreted as a Python
4113 literal expression. Otherwise values are interpreted as Python b'' literals.
4131 literal expression. Otherwise values are interpreted as Python b'' literals.
4114 This allows sending complex types and encoding special byte sequences via
4132 This allows sending complex types and encoding special byte sequences via
4115 backslash escaping.
4133 backslash escaping.
4116
4134
4117 The following arguments have special meaning:
4135 The following arguments have special meaning:
4118
4136
4119 ``PUSHFILE``
4137 ``PUSHFILE``
4120 When defined, the *push* mechanism of the peer will be used instead
4138 When defined, the *push* mechanism of the peer will be used instead
4121 of the static request-response mechanism and the content of the
4139 of the static request-response mechanism and the content of the
4122 file specified in the value of this argument will be sent as the
4140 file specified in the value of this argument will be sent as the
4123 command payload.
4141 command payload.
4124
4142
4125 This can be used to submit a local bundle file to the remote.
4143 This can be used to submit a local bundle file to the remote.
4126
4144
4127 batchbegin
4145 batchbegin
4128 ----------
4146 ----------
4129
4147
4130 Instruct the peer to begin a batched send.
4148 Instruct the peer to begin a batched send.
4131
4149
4132 All ``command`` blocks are queued for execution until the next
4150 All ``command`` blocks are queued for execution until the next
4133 ``batchsubmit`` block.
4151 ``batchsubmit`` block.
4134
4152
4135 batchsubmit
4153 batchsubmit
4136 -----------
4154 -----------
4137
4155
4138 Submit previously queued ``command`` blocks as a batch request.
4156 Submit previously queued ``command`` blocks as a batch request.
4139
4157
4140 This action MUST be paired with a ``batchbegin`` action.
4158 This action MUST be paired with a ``batchbegin`` action.
4141
4159
4142 httprequest <method> <path>
4160 httprequest <method> <path>
4143 ---------------------------
4161 ---------------------------
4144
4162
4145 (HTTP peer only)
4163 (HTTP peer only)
4146
4164
4147 Send an HTTP request to the peer.
4165 Send an HTTP request to the peer.
4148
4166
4149 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4167 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4150
4168
4151 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4169 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4152 headers to add to the request. e.g. ``Accept: foo``.
4170 headers to add to the request. e.g. ``Accept: foo``.
4153
4171
4154 The following arguments are special:
4172 The following arguments are special:
4155
4173
4156 ``BODYFILE``
4174 ``BODYFILE``
4157 The content of the file defined as the value to this argument will be
4175 The content of the file defined as the value to this argument will be
4158 transferred verbatim as the HTTP request body.
4176 transferred verbatim as the HTTP request body.
4159
4177
4160 ``frame <type> <flags> <payload>``
4178 ``frame <type> <flags> <payload>``
4161 Send a unified protocol frame as part of the request body.
4179 Send a unified protocol frame as part of the request body.
4162
4180
4163 All frames will be collected and sent as the body to the HTTP
4181 All frames will be collected and sent as the body to the HTTP
4164 request.
4182 request.
4165
4183
4166 close
4184 close
4167 -----
4185 -----
4168
4186
4169 Close the connection to the server.
4187 Close the connection to the server.
4170
4188
4171 flush
4189 flush
4172 -----
4190 -----
4173
4191
4174 Flush data written to the server.
4192 Flush data written to the server.
4175
4193
4176 readavailable
4194 readavailable
4177 -------------
4195 -------------
4178
4196
4179 Close the write end of the connection and read all available data from
4197 Close the write end of the connection and read all available data from
4180 the server.
4198 the server.
4181
4199
4182 If the connection to the server encompasses multiple pipes, we poll both
4200 If the connection to the server encompasses multiple pipes, we poll both
4183 pipes and read available data.
4201 pipes and read available data.
4184
4202
4185 readline
4203 readline
4186 --------
4204 --------
4187
4205
4188 Read a line of output from the server. If there are multiple output
4206 Read a line of output from the server. If there are multiple output
4189 pipes, reads only the main pipe.
4207 pipes, reads only the main pipe.
4190
4208
4191 ereadline
4209 ereadline
4192 ---------
4210 ---------
4193
4211
4194 Like ``readline``, but read from the stderr pipe, if available.
4212 Like ``readline``, but read from the stderr pipe, if available.
4195
4213
4196 read <X>
4214 read <X>
4197 --------
4215 --------
4198
4216
4199 ``read()`` N bytes from the server's main output pipe.
4217 ``read()`` N bytes from the server's main output pipe.
4200
4218
4201 eread <X>
4219 eread <X>
4202 ---------
4220 ---------
4203
4221
4204 ``read()`` N bytes from the server's stderr pipe, if available.
4222 ``read()`` N bytes from the server's stderr pipe, if available.
4205
4223
4206 Specifying Unified Frame-Based Protocol Frames
4224 Specifying Unified Frame-Based Protocol Frames
4207 ----------------------------------------------
4225 ----------------------------------------------
4208
4226
4209 It is possible to emit a *Unified Frame-Based Protocol* by using special
4227 It is possible to emit a *Unified Frame-Based Protocol* by using special
4210 syntax.
4228 syntax.
4211
4229
4212 A frame is composed as a type, flags, and payload. These can be parsed
4230 A frame is composed as a type, flags, and payload. These can be parsed
4213 from a string of the form:
4231 from a string of the form:
4214
4232
4215 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4233 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4216
4234
4217 ``request-id`` and ``stream-id`` are integers defining the request and
4235 ``request-id`` and ``stream-id`` are integers defining the request and
4218 stream identifiers.
4236 stream identifiers.
4219
4237
4220 ``type`` can be an integer value for the frame type or the string name
4238 ``type`` can be an integer value for the frame type or the string name
4221 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4239 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4222 ``command-name``.
4240 ``command-name``.
4223
4241
4224 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4242 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4225 components. Each component (and there can be just one) can be an integer
4243 components. Each component (and there can be just one) can be an integer
4226 or a flag name for stream flags or frame flags, respectively. Values are
4244 or a flag name for stream flags or frame flags, respectively. Values are
4227 resolved to integers and then bitwise OR'd together.
4245 resolved to integers and then bitwise OR'd together.
4228
4246
4229 ``payload`` represents the raw frame payload. If it begins with
4247 ``payload`` represents the raw frame payload. If it begins with
4230 ``cbor:``, the following string is evaluated as Python code and the
4248 ``cbor:``, the following string is evaluated as Python code and the
4231 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4249 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4232 as a Python byte string literal.
4250 as a Python byte string literal.
4233 """
4251 """
4234 opts = pycompat.byteskwargs(opts)
4252 opts = pycompat.byteskwargs(opts)
4235
4253
4236 if opts[b'localssh'] and not repo:
4254 if opts[b'localssh'] and not repo:
4237 raise error.Abort(_(b'--localssh requires a repository'))
4255 raise error.Abort(_(b'--localssh requires a repository'))
4238
4256
4239 if opts[b'peer'] and opts[b'peer'] not in (
4257 if opts[b'peer'] and opts[b'peer'] not in (
4240 b'raw',
4258 b'raw',
4241 b'http2',
4259 b'http2',
4242 b'ssh1',
4260 b'ssh1',
4243 b'ssh2',
4261 b'ssh2',
4244 ):
4262 ):
4245 raise error.Abort(
4263 raise error.Abort(
4246 _(b'invalid value for --peer'),
4264 _(b'invalid value for --peer'),
4247 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4265 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4248 )
4266 )
4249
4267
4250 if path and opts[b'localssh']:
4268 if path and opts[b'localssh']:
4251 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4269 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4252
4270
4253 if ui.interactive():
4271 if ui.interactive():
4254 ui.write(_(b'(waiting for commands on stdin)\n'))
4272 ui.write(_(b'(waiting for commands on stdin)\n'))
4255
4273
4256 blocks = list(_parsewirelangblocks(ui.fin))
4274 blocks = list(_parsewirelangblocks(ui.fin))
4257
4275
4258 proc = None
4276 proc = None
4259 stdin = None
4277 stdin = None
4260 stdout = None
4278 stdout = None
4261 stderr = None
4279 stderr = None
4262 opener = None
4280 opener = None
4263
4281
4264 if opts[b'localssh']:
4282 if opts[b'localssh']:
4265 # We start the SSH server in its own process so there is process
4283 # We start the SSH server in its own process so there is process
4266 # separation. This prevents a whole class of potential bugs around
4284 # separation. This prevents a whole class of potential bugs around
4267 # shared state from interfering with server operation.
4285 # shared state from interfering with server operation.
4268 args = procutil.hgcmd() + [
4286 args = procutil.hgcmd() + [
4269 b'-R',
4287 b'-R',
4270 repo.root,
4288 repo.root,
4271 b'debugserve',
4289 b'debugserve',
4272 b'--sshstdio',
4290 b'--sshstdio',
4273 ]
4291 ]
4274 proc = subprocess.Popen(
4292 proc = subprocess.Popen(
4275 pycompat.rapply(procutil.tonativestr, args),
4293 pycompat.rapply(procutil.tonativestr, args),
4276 stdin=subprocess.PIPE,
4294 stdin=subprocess.PIPE,
4277 stdout=subprocess.PIPE,
4295 stdout=subprocess.PIPE,
4278 stderr=subprocess.PIPE,
4296 stderr=subprocess.PIPE,
4279 bufsize=0,
4297 bufsize=0,
4280 )
4298 )
4281
4299
4282 stdin = proc.stdin
4300 stdin = proc.stdin
4283 stdout = proc.stdout
4301 stdout = proc.stdout
4284 stderr = proc.stderr
4302 stderr = proc.stderr
4285
4303
4286 # We turn the pipes into observers so we can log I/O.
4304 # We turn the pipes into observers so we can log I/O.
4287 if ui.verbose or opts[b'peer'] == b'raw':
4305 if ui.verbose or opts[b'peer'] == b'raw':
4288 stdin = util.makeloggingfileobject(
4306 stdin = util.makeloggingfileobject(
4289 ui, proc.stdin, b'i', logdata=True
4307 ui, proc.stdin, b'i', logdata=True
4290 )
4308 )
4291 stdout = util.makeloggingfileobject(
4309 stdout = util.makeloggingfileobject(
4292 ui, proc.stdout, b'o', logdata=True
4310 ui, proc.stdout, b'o', logdata=True
4293 )
4311 )
4294 stderr = util.makeloggingfileobject(
4312 stderr = util.makeloggingfileobject(
4295 ui, proc.stderr, b'e', logdata=True
4313 ui, proc.stderr, b'e', logdata=True
4296 )
4314 )
4297
4315
4298 # --localssh also implies the peer connection settings.
4316 # --localssh also implies the peer connection settings.
4299
4317
4300 url = b'ssh://localserver'
4318 url = b'ssh://localserver'
4301 autoreadstderr = not opts[b'noreadstderr']
4319 autoreadstderr = not opts[b'noreadstderr']
4302
4320
4303 if opts[b'peer'] == b'ssh1':
4321 if opts[b'peer'] == b'ssh1':
4304 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4322 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4305 peer = sshpeer.sshv1peer(
4323 peer = sshpeer.sshv1peer(
4306 ui,
4324 ui,
4307 url,
4325 url,
4308 proc,
4326 proc,
4309 stdin,
4327 stdin,
4310 stdout,
4328 stdout,
4311 stderr,
4329 stderr,
4312 None,
4330 None,
4313 autoreadstderr=autoreadstderr,
4331 autoreadstderr=autoreadstderr,
4314 )
4332 )
4315 elif opts[b'peer'] == b'ssh2':
4333 elif opts[b'peer'] == b'ssh2':
4316 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4334 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4317 peer = sshpeer.sshv2peer(
4335 peer = sshpeer.sshv2peer(
4318 ui,
4336 ui,
4319 url,
4337 url,
4320 proc,
4338 proc,
4321 stdin,
4339 stdin,
4322 stdout,
4340 stdout,
4323 stderr,
4341 stderr,
4324 None,
4342 None,
4325 autoreadstderr=autoreadstderr,
4343 autoreadstderr=autoreadstderr,
4326 )
4344 )
4327 elif opts[b'peer'] == b'raw':
4345 elif opts[b'peer'] == b'raw':
4328 ui.write(_(b'using raw connection to peer\n'))
4346 ui.write(_(b'using raw connection to peer\n'))
4329 peer = None
4347 peer = None
4330 else:
4348 else:
4331 ui.write(_(b'creating ssh peer from handshake results\n'))
4349 ui.write(_(b'creating ssh peer from handshake results\n'))
4332 peer = sshpeer.makepeer(
4350 peer = sshpeer.makepeer(
4333 ui,
4351 ui,
4334 url,
4352 url,
4335 proc,
4353 proc,
4336 stdin,
4354 stdin,
4337 stdout,
4355 stdout,
4338 stderr,
4356 stderr,
4339 autoreadstderr=autoreadstderr,
4357 autoreadstderr=autoreadstderr,
4340 )
4358 )
4341
4359
4342 elif path:
4360 elif path:
4343 # We bypass hg.peer() so we can proxy the sockets.
4361 # We bypass hg.peer() so we can proxy the sockets.
4344 # TODO consider not doing this because we skip
4362 # TODO consider not doing this because we skip
4345 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4363 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4346 u = util.url(path)
4364 u = util.url(path)
4347 if u.scheme != b'http':
4365 if u.scheme != b'http':
4348 raise error.Abort(_(b'only http:// paths are currently supported'))
4366 raise error.Abort(_(b'only http:// paths are currently supported'))
4349
4367
4350 url, authinfo = u.authinfo()
4368 url, authinfo = u.authinfo()
4351 openerargs = {
4369 openerargs = {
4352 'useragent': b'Mercurial debugwireproto',
4370 'useragent': b'Mercurial debugwireproto',
4353 }
4371 }
4354
4372
4355 # Turn pipes/sockets into observers so we can log I/O.
4373 # Turn pipes/sockets into observers so we can log I/O.
4356 if ui.verbose:
4374 if ui.verbose:
4357 openerargs.update(
4375 openerargs.update(
4358 {
4376 {
4359 'loggingfh': ui,
4377 'loggingfh': ui,
4360 'loggingname': b's',
4378 'loggingname': b's',
4361 'loggingopts': {
4379 'loggingopts': {
4362 'logdata': True,
4380 'logdata': True,
4363 'logdataapis': False,
4381 'logdataapis': False,
4364 },
4382 },
4365 }
4383 }
4366 )
4384 )
4367
4385
4368 if ui.debugflag:
4386 if ui.debugflag:
4369 openerargs['loggingopts']['logdataapis'] = True
4387 openerargs['loggingopts']['logdataapis'] = True
4370
4388
4371 # Don't send default headers when in raw mode. This allows us to
4389 # Don't send default headers when in raw mode. This allows us to
4372 # bypass most of the behavior of our URL handling code so we can
4390 # bypass most of the behavior of our URL handling code so we can
4373 # have near complete control over what's sent on the wire.
4391 # have near complete control over what's sent on the wire.
4374 if opts[b'peer'] == b'raw':
4392 if opts[b'peer'] == b'raw':
4375 openerargs['sendaccept'] = False
4393 openerargs['sendaccept'] = False
4376
4394
4377 opener = urlmod.opener(ui, authinfo, **openerargs)
4395 opener = urlmod.opener(ui, authinfo, **openerargs)
4378
4396
4379 if opts[b'peer'] == b'http2':
4397 if opts[b'peer'] == b'http2':
4380 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4398 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4381 # We go through makepeer() because we need an API descriptor for
4399 # We go through makepeer() because we need an API descriptor for
4382 # the peer instance to be useful.
4400 # the peer instance to be useful.
4383 with ui.configoverride(
4401 with ui.configoverride(
4384 {(b'experimental', b'httppeer.advertise-v2'): True}
4402 {(b'experimental', b'httppeer.advertise-v2'): True}
4385 ):
4403 ):
4386 if opts[b'nologhandshake']:
4404 if opts[b'nologhandshake']:
4387 ui.pushbuffer()
4405 ui.pushbuffer()
4388
4406
4389 peer = httppeer.makepeer(ui, path, opener=opener)
4407 peer = httppeer.makepeer(ui, path, opener=opener)
4390
4408
4391 if opts[b'nologhandshake']:
4409 if opts[b'nologhandshake']:
4392 ui.popbuffer()
4410 ui.popbuffer()
4393
4411
4394 if not isinstance(peer, httppeer.httpv2peer):
4412 if not isinstance(peer, httppeer.httpv2peer):
4395 raise error.Abort(
4413 raise error.Abort(
4396 _(
4414 _(
4397 b'could not instantiate HTTP peer for '
4415 b'could not instantiate HTTP peer for '
4398 b'wire protocol version 2'
4416 b'wire protocol version 2'
4399 ),
4417 ),
4400 hint=_(
4418 hint=_(
4401 b'the server may not have the feature '
4419 b'the server may not have the feature '
4402 b'enabled or is not allowing this '
4420 b'enabled or is not allowing this '
4403 b'client version'
4421 b'client version'
4404 ),
4422 ),
4405 )
4423 )
4406
4424
4407 elif opts[b'peer'] == b'raw':
4425 elif opts[b'peer'] == b'raw':
4408 ui.write(_(b'using raw connection to peer\n'))
4426 ui.write(_(b'using raw connection to peer\n'))
4409 peer = None
4427 peer = None
4410 elif opts[b'peer']:
4428 elif opts[b'peer']:
4411 raise error.Abort(
4429 raise error.Abort(
4412 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4430 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4413 )
4431 )
4414 else:
4432 else:
4415 peer = httppeer.makepeer(ui, path, opener=opener)
4433 peer = httppeer.makepeer(ui, path, opener=opener)
4416
4434
4417 # We /could/ populate stdin/stdout with sock.makefile()...
4435 # We /could/ populate stdin/stdout with sock.makefile()...
4418 else:
4436 else:
4419 raise error.Abort(_(b'unsupported connection configuration'))
4437 raise error.Abort(_(b'unsupported connection configuration'))
4420
4438
4421 batchedcommands = None
4439 batchedcommands = None
4422
4440
4423 # Now perform actions based on the parsed wire language instructions.
4441 # Now perform actions based on the parsed wire language instructions.
4424 for action, lines in blocks:
4442 for action, lines in blocks:
4425 if action in (b'raw', b'raw+'):
4443 if action in (b'raw', b'raw+'):
4426 if not stdin:
4444 if not stdin:
4427 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4445 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4428
4446
4429 # Concatenate the data together.
4447 # Concatenate the data together.
4430 data = b''.join(l.lstrip() for l in lines)
4448 data = b''.join(l.lstrip() for l in lines)
4431 data = stringutil.unescapestr(data)
4449 data = stringutil.unescapestr(data)
4432 stdin.write(data)
4450 stdin.write(data)
4433
4451
4434 if action == b'raw+':
4452 if action == b'raw+':
4435 stdin.flush()
4453 stdin.flush()
4436 elif action == b'flush':
4454 elif action == b'flush':
4437 if not stdin:
4455 if not stdin:
4438 raise error.Abort(_(b'cannot call flush on this peer'))
4456 raise error.Abort(_(b'cannot call flush on this peer'))
4439 stdin.flush()
4457 stdin.flush()
4440 elif action.startswith(b'command'):
4458 elif action.startswith(b'command'):
4441 if not peer:
4459 if not peer:
4442 raise error.Abort(
4460 raise error.Abort(
4443 _(
4461 _(
4444 b'cannot send commands unless peer instance '
4462 b'cannot send commands unless peer instance '
4445 b'is available'
4463 b'is available'
4446 )
4464 )
4447 )
4465 )
4448
4466
4449 command = action.split(b' ', 1)[1]
4467 command = action.split(b' ', 1)[1]
4450
4468
4451 args = {}
4469 args = {}
4452 for line in lines:
4470 for line in lines:
4453 # We need to allow empty values.
4471 # We need to allow empty values.
4454 fields = line.lstrip().split(b' ', 1)
4472 fields = line.lstrip().split(b' ', 1)
4455 if len(fields) == 1:
4473 if len(fields) == 1:
4456 key = fields[0]
4474 key = fields[0]
4457 value = b''
4475 value = b''
4458 else:
4476 else:
4459 key, value = fields
4477 key, value = fields
4460
4478
4461 if value.startswith(b'eval:'):
4479 if value.startswith(b'eval:'):
4462 value = stringutil.evalpythonliteral(value[5:])
4480 value = stringutil.evalpythonliteral(value[5:])
4463 else:
4481 else:
4464 value = stringutil.unescapestr(value)
4482 value = stringutil.unescapestr(value)
4465
4483
4466 args[key] = value
4484 args[key] = value
4467
4485
4468 if batchedcommands is not None:
4486 if batchedcommands is not None:
4469 batchedcommands.append((command, args))
4487 batchedcommands.append((command, args))
4470 continue
4488 continue
4471
4489
4472 ui.status(_(b'sending %s command\n') % command)
4490 ui.status(_(b'sending %s command\n') % command)
4473
4491
4474 if b'PUSHFILE' in args:
4492 if b'PUSHFILE' in args:
4475 with open(args[b'PUSHFILE'], 'rb') as fh:
4493 with open(args[b'PUSHFILE'], 'rb') as fh:
4476 del args[b'PUSHFILE']
4494 del args[b'PUSHFILE']
4477 res, output = peer._callpush(
4495 res, output = peer._callpush(
4478 command, fh, **pycompat.strkwargs(args)
4496 command, fh, **pycompat.strkwargs(args)
4479 )
4497 )
4480 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4498 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4481 ui.status(
4499 ui.status(
4482 _(b'remote output: %s\n') % stringutil.escapestr(output)
4500 _(b'remote output: %s\n') % stringutil.escapestr(output)
4483 )
4501 )
4484 else:
4502 else:
4485 with peer.commandexecutor() as e:
4503 with peer.commandexecutor() as e:
4486 res = e.callcommand(command, args).result()
4504 res = e.callcommand(command, args).result()
4487
4505
4488 if isinstance(res, wireprotov2peer.commandresponse):
4506 if isinstance(res, wireprotov2peer.commandresponse):
4489 val = res.objects()
4507 val = res.objects()
4490 ui.status(
4508 ui.status(
4491 _(b'response: %s\n')
4509 _(b'response: %s\n')
4492 % stringutil.pprint(val, bprefix=True, indent=2)
4510 % stringutil.pprint(val, bprefix=True, indent=2)
4493 )
4511 )
4494 else:
4512 else:
4495 ui.status(
4513 ui.status(
4496 _(b'response: %s\n')
4514 _(b'response: %s\n')
4497 % stringutil.pprint(res, bprefix=True, indent=2)
4515 % stringutil.pprint(res, bprefix=True, indent=2)
4498 )
4516 )
4499
4517
4500 elif action == b'batchbegin':
4518 elif action == b'batchbegin':
4501 if batchedcommands is not None:
4519 if batchedcommands is not None:
4502 raise error.Abort(_(b'nested batchbegin not allowed'))
4520 raise error.Abort(_(b'nested batchbegin not allowed'))
4503
4521
4504 batchedcommands = []
4522 batchedcommands = []
4505 elif action == b'batchsubmit':
4523 elif action == b'batchsubmit':
4506 # There is a batching API we could go through. But it would be
4524 # There is a batching API we could go through. But it would be
4507 # difficult to normalize requests into function calls. It is easier
4525 # difficult to normalize requests into function calls. It is easier
4508 # to bypass this layer and normalize to commands + args.
4526 # to bypass this layer and normalize to commands + args.
4509 ui.status(
4527 ui.status(
4510 _(b'sending batch with %d sub-commands\n')
4528 _(b'sending batch with %d sub-commands\n')
4511 % len(batchedcommands)
4529 % len(batchedcommands)
4512 )
4530 )
4513 assert peer is not None
4531 assert peer is not None
4514 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4532 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4515 ui.status(
4533 ui.status(
4516 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4534 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4517 )
4535 )
4518
4536
4519 batchedcommands = None
4537 batchedcommands = None
4520
4538
4521 elif action.startswith(b'httprequest '):
4539 elif action.startswith(b'httprequest '):
4522 if not opener:
4540 if not opener:
4523 raise error.Abort(
4541 raise error.Abort(
4524 _(b'cannot use httprequest without an HTTP peer')
4542 _(b'cannot use httprequest without an HTTP peer')
4525 )
4543 )
4526
4544
4527 request = action.split(b' ', 2)
4545 request = action.split(b' ', 2)
4528 if len(request) != 3:
4546 if len(request) != 3:
4529 raise error.Abort(
4547 raise error.Abort(
4530 _(
4548 _(
4531 b'invalid httprequest: expected format is '
4549 b'invalid httprequest: expected format is '
4532 b'"httprequest <method> <path>'
4550 b'"httprequest <method> <path>'
4533 )
4551 )
4534 )
4552 )
4535
4553
4536 method, httppath = request[1:]
4554 method, httppath = request[1:]
4537 headers = {}
4555 headers = {}
4538 body = None
4556 body = None
4539 frames = []
4557 frames = []
4540 for line in lines:
4558 for line in lines:
4541 line = line.lstrip()
4559 line = line.lstrip()
4542 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4560 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4543 if m:
4561 if m:
4544 # Headers need to use native strings.
4562 # Headers need to use native strings.
4545 key = pycompat.strurl(m.group(1))
4563 key = pycompat.strurl(m.group(1))
4546 value = pycompat.strurl(m.group(2))
4564 value = pycompat.strurl(m.group(2))
4547 headers[key] = value
4565 headers[key] = value
4548 continue
4566 continue
4549
4567
4550 if line.startswith(b'BODYFILE '):
4568 if line.startswith(b'BODYFILE '):
4551 with open(line.split(b' ', 1), b'rb') as fh:
4569 with open(line.split(b' ', 1), b'rb') as fh:
4552 body = fh.read()
4570 body = fh.read()
4553 elif line.startswith(b'frame '):
4571 elif line.startswith(b'frame '):
4554 frame = wireprotoframing.makeframefromhumanstring(
4572 frame = wireprotoframing.makeframefromhumanstring(
4555 line[len(b'frame ') :]
4573 line[len(b'frame ') :]
4556 )
4574 )
4557
4575
4558 frames.append(frame)
4576 frames.append(frame)
4559 else:
4577 else:
4560 raise error.Abort(
4578 raise error.Abort(
4561 _(b'unknown argument to httprequest: %s') % line
4579 _(b'unknown argument to httprequest: %s') % line
4562 )
4580 )
4563
4581
4564 url = path + httppath
4582 url = path + httppath
4565
4583
4566 if frames:
4584 if frames:
4567 body = b''.join(bytes(f) for f in frames)
4585 body = b''.join(bytes(f) for f in frames)
4568
4586
4569 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4587 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4570
4588
4571 # urllib.Request insists on using has_data() as a proxy for
4589 # urllib.Request insists on using has_data() as a proxy for
4572 # determining the request method. Override that to use our
4590 # determining the request method. Override that to use our
4573 # explicitly requested method.
4591 # explicitly requested method.
4574 req.get_method = lambda: pycompat.sysstr(method)
4592 req.get_method = lambda: pycompat.sysstr(method)
4575
4593
4576 try:
4594 try:
4577 res = opener.open(req)
4595 res = opener.open(req)
4578 body = res.read()
4596 body = res.read()
4579 except util.urlerr.urlerror as e:
4597 except util.urlerr.urlerror as e:
4580 # read() method must be called, but only exists in Python 2
4598 # read() method must be called, but only exists in Python 2
4581 getattr(e, 'read', lambda: None)()
4599 getattr(e, 'read', lambda: None)()
4582 continue
4600 continue
4583
4601
4584 ct = res.headers.get('Content-Type')
4602 ct = res.headers.get('Content-Type')
4585 if ct == 'application/mercurial-cbor':
4603 if ct == 'application/mercurial-cbor':
4586 ui.write(
4604 ui.write(
4587 _(b'cbor> %s\n')
4605 _(b'cbor> %s\n')
4588 % stringutil.pprint(
4606 % stringutil.pprint(
4589 cborutil.decodeall(body), bprefix=True, indent=2
4607 cborutil.decodeall(body), bprefix=True, indent=2
4590 )
4608 )
4591 )
4609 )
4592
4610
4593 elif action == b'close':
4611 elif action == b'close':
4594 assert peer is not None
4612 assert peer is not None
4595 peer.close()
4613 peer.close()
4596 elif action == b'readavailable':
4614 elif action == b'readavailable':
4597 if not stdout or not stderr:
4615 if not stdout or not stderr:
4598 raise error.Abort(
4616 raise error.Abort(
4599 _(b'readavailable not available on this peer')
4617 _(b'readavailable not available on this peer')
4600 )
4618 )
4601
4619
4602 stdin.close()
4620 stdin.close()
4603 stdout.read()
4621 stdout.read()
4604 stderr.read()
4622 stderr.read()
4605
4623
4606 elif action == b'readline':
4624 elif action == b'readline':
4607 if not stdout:
4625 if not stdout:
4608 raise error.Abort(_(b'readline not available on this peer'))
4626 raise error.Abort(_(b'readline not available on this peer'))
4609 stdout.readline()
4627 stdout.readline()
4610 elif action == b'ereadline':
4628 elif action == b'ereadline':
4611 if not stderr:
4629 if not stderr:
4612 raise error.Abort(_(b'ereadline not available on this peer'))
4630 raise error.Abort(_(b'ereadline not available on this peer'))
4613 stderr.readline()
4631 stderr.readline()
4614 elif action.startswith(b'read '):
4632 elif action.startswith(b'read '):
4615 count = int(action.split(b' ', 1)[1])
4633 count = int(action.split(b' ', 1)[1])
4616 if not stdout:
4634 if not stdout:
4617 raise error.Abort(_(b'read not available on this peer'))
4635 raise error.Abort(_(b'read not available on this peer'))
4618 stdout.read(count)
4636 stdout.read(count)
4619 elif action.startswith(b'eread '):
4637 elif action.startswith(b'eread '):
4620 count = int(action.split(b' ', 1)[1])
4638 count = int(action.split(b' ', 1)[1])
4621 if not stderr:
4639 if not stderr:
4622 raise error.Abort(_(b'eread not available on this peer'))
4640 raise error.Abort(_(b'eread not available on this peer'))
4623 stderr.read(count)
4641 stderr.read(count)
4624 else:
4642 else:
4625 raise error.Abort(_(b'unknown action: %s') % action)
4643 raise error.Abort(_(b'unknown action: %s') % action)
4626
4644
4627 if batchedcommands is not None:
4645 if batchedcommands is not None:
4628 raise error.Abort(_(b'unclosed "batchbegin" request'))
4646 raise error.Abort(_(b'unclosed "batchbegin" request'))
4629
4647
4630 if peer:
4648 if peer:
4631 peer.close()
4649 peer.close()
4632
4650
4633 if proc:
4651 if proc:
4634 proc.kill()
4652 proc.kill()
@@ -1,1307 +1,1502
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 elapsed time: * seconds (glob)
47 heads summary:
47 heads summary:
48 total common heads: 2
48 total common heads: 2
49 also local heads: 2
49 also local heads: 2
50 also remote heads: 1
50 also remote heads: 1
51 both: 1
51 both: 1
52 local heads: 2
52 local heads: 2
53 common: 2
53 common: 2
54 missing: 0
54 missing: 0
55 remote heads: 3
55 remote heads: 3
56 common: 1
56 common: 1
57 unknown: 2
57 unknown: 2
58 local changesets: 7
58 local changesets: 7
59 common: 7
59 common: 7
60 heads: 2
60 heads: 2
61 roots: 1
61 roots: 1
62 missing: 0
62 missing: 0
63 heads: 0
63 heads: 0
64 roots: 0
64 roots: 0
65 first undecided set: 3
66 heads: 1
67 roots: 1
68 common: 3
69 missing: 0
65 common heads: 01241442b3c2 b5714e113bc0
70 common heads: 01241442b3c2 b5714e113bc0
66
71
67 % -- a -> b set
72 % -- a -> b set
68 comparing with b
73 comparing with b
69 query 1; heads
74 query 1; heads
70 searching for changes
75 searching for changes
71 all local changesets known remotely
76 all local changesets known remotely
72 elapsed time: * seconds (glob)
77 elapsed time: * seconds (glob)
73 heads summary:
78 heads summary:
74 total common heads: 2
79 total common heads: 2
75 also local heads: 2
80 also local heads: 2
76 also remote heads: 1
81 also remote heads: 1
77 both: 1
82 both: 1
78 local heads: 2
83 local heads: 2
79 common: 2
84 common: 2
80 missing: 0
85 missing: 0
81 remote heads: 3
86 remote heads: 3
82 common: 1
87 common: 1
83 unknown: 2
88 unknown: 2
84 local changesets: 7
89 local changesets: 7
85 common: 7
90 common: 7
86 heads: 2
91 heads: 2
87 roots: 1
92 roots: 1
88 missing: 0
93 missing: 0
89 heads: 0
94 heads: 0
90 roots: 0
95 roots: 0
96 first undecided set: 3
97 heads: 1
98 roots: 1
99 common: 3
100 missing: 0
91 common heads: 01241442b3c2 b5714e113bc0
101 common heads: 01241442b3c2 b5714e113bc0
92
102
93 % -- a -> b set (tip only)
103 % -- a -> b set (tip only)
94 comparing with b
104 comparing with b
95 query 1; heads
105 query 1; heads
96 searching for changes
106 searching for changes
97 all local changesets known remotely
107 all local changesets known remotely
98 elapsed time: * seconds (glob)
108 elapsed time: * seconds (glob)
99 heads summary:
109 heads summary:
100 total common heads: 1
110 total common heads: 1
101 also local heads: 1
111 also local heads: 1
102 also remote heads: 0
112 also remote heads: 0
103 both: 0
113 both: 0
104 local heads: 2
114 local heads: 2
105 common: 1
115 common: 1
106 missing: 1
116 missing: 1
107 remote heads: 3
117 remote heads: 3
108 common: 0
118 common: 0
109 unknown: 3
119 unknown: 3
110 local changesets: 7
120 local changesets: 7
111 common: 6
121 common: 6
112 heads: 1
122 heads: 1
113 roots: 1
123 roots: 1
114 missing: 1
124 missing: 1
115 heads: 1
125 heads: 1
116 roots: 1
126 roots: 1
127 first undecided set: 6
128 heads: 2
129 roots: 1
130 common: 5
131 missing: 1
117 common heads: b5714e113bc0
132 common heads: b5714e113bc0
118
133
119 % -- b -> a tree
134 % -- b -> a tree
120 comparing with a
135 comparing with a
121 searching for changes
136 searching for changes
122 unpruned common: 01241442b3c2 b5714e113bc0
137 unpruned common: 01241442b3c2 b5714e113bc0
123 elapsed time: * seconds (glob)
138 elapsed time: * seconds (glob)
124 heads summary:
139 heads summary:
125 total common heads: 2
140 total common heads: 2
126 also local heads: 1
141 also local heads: 1
127 also remote heads: 2
142 also remote heads: 2
128 both: 1
143 both: 1
129 local heads: 3
144 local heads: 3
130 common: 1
145 common: 1
131 missing: 2
146 missing: 2
132 remote heads: 2
147 remote heads: 2
133 common: 2
148 common: 2
134 unknown: 0
149 unknown: 0
135 local changesets: 15
150 local changesets: 15
136 common: 7
151 common: 7
137 heads: 2
152 heads: 2
138 roots: 1
153 roots: 1
139 missing: 8
154 missing: 8
140 heads: 2
155 heads: 2
141 roots: 2
156 roots: 2
157 first undecided set: 8
158 heads: 2
159 roots: 2
160 common: 0
161 missing: 8
142 common heads: 01241442b3c2 b5714e113bc0
162 common heads: 01241442b3c2 b5714e113bc0
143
163
144 % -- b -> a set
164 % -- b -> a set
145 comparing with a
165 comparing with a
146 query 1; heads
166 query 1; heads
147 searching for changes
167 searching for changes
148 all remote heads known locally
168 all remote heads known locally
149 elapsed time: * seconds (glob)
169 elapsed time: * seconds (glob)
150 heads summary:
170 heads summary:
151 total common heads: 2
171 total common heads: 2
152 also local heads: 1
172 also local heads: 1
153 also remote heads: 2
173 also remote heads: 2
154 both: 1
174 both: 1
155 local heads: 3
175 local heads: 3
156 common: 1
176 common: 1
157 missing: 2
177 missing: 2
158 remote heads: 2
178 remote heads: 2
159 common: 2
179 common: 2
160 unknown: 0
180 unknown: 0
161 local changesets: 15
181 local changesets: 15
162 common: 7
182 common: 7
163 heads: 2
183 heads: 2
164 roots: 1
184 roots: 1
165 missing: 8
185 missing: 8
166 heads: 2
186 heads: 2
167 roots: 2
187 roots: 2
188 first undecided set: 8
189 heads: 2
190 roots: 2
191 common: 0
192 missing: 8
168 common heads: 01241442b3c2 b5714e113bc0
193 common heads: 01241442b3c2 b5714e113bc0
169
194
170 % -- b -> a set (tip only)
195 % -- b -> a set (tip only)
171 comparing with a
196 comparing with a
172 query 1; heads
197 query 1; heads
173 searching for changes
198 searching for changes
174 all remote heads known locally
199 all remote heads known locally
175 elapsed time: * seconds (glob)
200 elapsed time: * seconds (glob)
176 heads summary:
201 heads summary:
177 total common heads: 2
202 total common heads: 2
178 also local heads: 1
203 also local heads: 1
179 also remote heads: 2
204 also remote heads: 2
180 both: 1
205 both: 1
181 local heads: 3
206 local heads: 3
182 common: 1
207 common: 1
183 missing: 2
208 missing: 2
184 remote heads: 2
209 remote heads: 2
185 common: 2
210 common: 2
186 unknown: 0
211 unknown: 0
187 local changesets: 15
212 local changesets: 15
188 common: 7
213 common: 7
189 heads: 2
214 heads: 2
190 roots: 1
215 roots: 1
191 missing: 8
216 missing: 8
192 heads: 2
217 heads: 2
193 roots: 2
218 roots: 2
219 first undecided set: 8
220 heads: 2
221 roots: 2
222 common: 0
223 missing: 8
194 common heads: 01241442b3c2 b5714e113bc0
224 common heads: 01241442b3c2 b5714e113bc0
195
225
196
226
197 Many new:
227 Many new:
198
228
199 $ testdesc '-ra1 -ra2' '-rb' '
229 $ testdesc '-ra1 -ra2' '-rb' '
200 > +2:f +3:a1 +3:b
230 > +2:f +3:a1 +3:b
201 > <f +30 :a2'
231 > <f +30 :a2'
202
232
203 % -- a -> b tree
233 % -- a -> b tree
204 comparing with b
234 comparing with b
205 searching for changes
235 searching for changes
206 unpruned common: bebd167eb94d
236 unpruned common: bebd167eb94d
207 elapsed time: * seconds (glob)
237 elapsed time: * seconds (glob)
208 heads summary:
238 heads summary:
209 total common heads: 1
239 total common heads: 1
210 also local heads: 1
240 also local heads: 1
211 also remote heads: 0
241 also remote heads: 0
212 both: 0
242 both: 0
213 local heads: 2
243 local heads: 2
214 common: 1
244 common: 1
215 missing: 1
245 missing: 1
216 remote heads: 1
246 remote heads: 1
217 common: 0
247 common: 0
218 unknown: 1
248 unknown: 1
219 local changesets: 35
249 local changesets: 35
220 common: 5
250 common: 5
221 heads: 1
251 heads: 1
222 roots: 1
252 roots: 1
223 missing: 30
253 missing: 30
224 heads: 1
254 heads: 1
225 roots: 1
255 roots: 1
256 first undecided set: 34
257 heads: 2
258 roots: 1
259 common: 4
260 missing: 30
226 common heads: bebd167eb94d
261 common heads: bebd167eb94d
227
262
228 % -- a -> b set
263 % -- a -> b set
229 comparing with b
264 comparing with b
230 query 1; heads
265 query 1; heads
231 searching for changes
266 searching for changes
232 taking initial sample
267 taking initial sample
233 searching: 2 queries
268 searching: 2 queries
234 query 2; still undecided: 29, sample size is: 29
269 query 2; still undecided: 29, sample size is: 29
235 2 total queries in *.????s (glob)
270 2 total queries in *.????s (glob)
236 elapsed time: * seconds (glob)
271 elapsed time: * seconds (glob)
237 heads summary:
272 heads summary:
238 total common heads: 1
273 total common heads: 1
239 also local heads: 1
274 also local heads: 1
240 also remote heads: 0
275 also remote heads: 0
241 both: 0
276 both: 0
242 local heads: 2
277 local heads: 2
243 common: 1
278 common: 1
244 missing: 1
279 missing: 1
245 remote heads: 1
280 remote heads: 1
246 common: 0
281 common: 0
247 unknown: 1
282 unknown: 1
248 local changesets: 35
283 local changesets: 35
249 common: 5
284 common: 5
250 heads: 1
285 heads: 1
251 roots: 1
286 roots: 1
252 missing: 30
287 missing: 30
253 heads: 1
288 heads: 1
254 roots: 1
289 roots: 1
290 first undecided set: 34
291 heads: 2
292 roots: 1
293 common: 4
294 missing: 30
255 common heads: bebd167eb94d
295 common heads: bebd167eb94d
256
296
257 % -- a -> b set (tip only)
297 % -- a -> b set (tip only)
258 comparing with b
298 comparing with b
259 query 1; heads
299 query 1; heads
260 searching for changes
300 searching for changes
261 taking quick initial sample
301 taking quick initial sample
262 searching: 2 queries
302 searching: 2 queries
263 query 2; still undecided: 31, sample size is: 31
303 query 2; still undecided: 31, sample size is: 31
264 2 total queries in *.????s (glob)
304 2 total queries in *.????s (glob)
265 elapsed time: * seconds (glob)
305 elapsed time: * seconds (glob)
266 heads summary:
306 heads summary:
267 total common heads: 1
307 total common heads: 1
268 also local heads: 0
308 also local heads: 0
269 also remote heads: 0
309 also remote heads: 0
270 both: 0
310 both: 0
271 local heads: 2
311 local heads: 2
272 common: 0
312 common: 0
273 missing: 2
313 missing: 2
274 remote heads: 1
314 remote heads: 1
275 common: 0
315 common: 0
276 unknown: 1
316 unknown: 1
277 local changesets: 35
317 local changesets: 35
278 common: 2
318 common: 2
279 heads: 1
319 heads: 1
280 roots: 1
320 roots: 1
281 missing: 33
321 missing: 33
282 heads: 2
322 heads: 2
283 roots: 2
323 roots: 2
324 first undecided set: 35
325 heads: 2
326 roots: 1
327 common: 2
328 missing: 33
284 common heads: 66f7d451a68b
329 common heads: 66f7d451a68b
285
330
286 % -- b -> a tree
331 % -- b -> a tree
287 comparing with a
332 comparing with a
288 searching for changes
333 searching for changes
289 unpruned common: 66f7d451a68b bebd167eb94d
334 unpruned common: 66f7d451a68b bebd167eb94d
290 elapsed time: * seconds (glob)
335 elapsed time: * seconds (glob)
291 heads summary:
336 heads summary:
292 total common heads: 1
337 total common heads: 1
293 also local heads: 0
338 also local heads: 0
294 also remote heads: 1
339 also remote heads: 1
295 both: 0
340 both: 0
296 local heads: 1
341 local heads: 1
297 common: 0
342 common: 0
298 missing: 1
343 missing: 1
299 remote heads: 2
344 remote heads: 2
300 common: 1
345 common: 1
301 unknown: 1
346 unknown: 1
302 local changesets: 8
347 local changesets: 8
303 common: 5
348 common: 5
304 heads: 1
349 heads: 1
305 roots: 1
350 roots: 1
306 missing: 3
351 missing: 3
307 heads: 1
352 heads: 1
308 roots: 1
353 roots: 1
354 first undecided set: 3
355 heads: 1
356 roots: 1
357 common: 0
358 missing: 3
309 common heads: bebd167eb94d
359 common heads: bebd167eb94d
310
360
311 % -- b -> a set
361 % -- b -> a set
312 comparing with a
362 comparing with a
313 query 1; heads
363 query 1; heads
314 searching for changes
364 searching for changes
315 taking initial sample
365 taking initial sample
316 searching: 2 queries
366 searching: 2 queries
317 query 2; still undecided: 2, sample size is: 2
367 query 2; still undecided: 2, sample size is: 2
318 2 total queries in *.????s (glob)
368 2 total queries in *.????s (glob)
319 elapsed time: * seconds (glob)
369 elapsed time: * seconds (glob)
320 heads summary:
370 heads summary:
321 total common heads: 1
371 total common heads: 1
322 also local heads: 0
372 also local heads: 0
323 also remote heads: 1
373 also remote heads: 1
324 both: 0
374 both: 0
325 local heads: 1
375 local heads: 1
326 common: 0
376 common: 0
327 missing: 1
377 missing: 1
328 remote heads: 2
378 remote heads: 2
329 common: 1
379 common: 1
330 unknown: 1
380 unknown: 1
331 local changesets: 8
381 local changesets: 8
332 common: 5
382 common: 5
333 heads: 1
383 heads: 1
334 roots: 1
384 roots: 1
335 missing: 3
385 missing: 3
336 heads: 1
386 heads: 1
337 roots: 1
387 roots: 1
388 first undecided set: 3
389 heads: 1
390 roots: 1
391 common: 0
392 missing: 3
338 common heads: bebd167eb94d
393 common heads: bebd167eb94d
339
394
340 % -- b -> a set (tip only)
395 % -- b -> a set (tip only)
341 comparing with a
396 comparing with a
342 query 1; heads
397 query 1; heads
343 searching for changes
398 searching for changes
344 taking initial sample
399 taking initial sample
345 searching: 2 queries
400 searching: 2 queries
346 query 2; still undecided: 2, sample size is: 2
401 query 2; still undecided: 2, sample size is: 2
347 2 total queries in *.????s (glob)
402 2 total queries in *.????s (glob)
348 elapsed time: * seconds (glob)
403 elapsed time: * seconds (glob)
349 heads summary:
404 heads summary:
350 total common heads: 1
405 total common heads: 1
351 also local heads: 0
406 also local heads: 0
352 also remote heads: 1
407 also remote heads: 1
353 both: 0
408 both: 0
354 local heads: 1
409 local heads: 1
355 common: 0
410 common: 0
356 missing: 1
411 missing: 1
357 remote heads: 2
412 remote heads: 2
358 common: 1
413 common: 1
359 unknown: 1
414 unknown: 1
360 local changesets: 8
415 local changesets: 8
361 common: 5
416 common: 5
362 heads: 1
417 heads: 1
363 roots: 1
418 roots: 1
364 missing: 3
419 missing: 3
365 heads: 1
420 heads: 1
366 roots: 1
421 roots: 1
422 first undecided set: 3
423 heads: 1
424 roots: 1
425 common: 0
426 missing: 3
367 common heads: bebd167eb94d
427 common heads: bebd167eb94d
368
428
369 Both sides many new with stub:
429 Both sides many new with stub:
370
430
371 $ testdesc '-ra1 -ra2' '-rb' '
431 $ testdesc '-ra1 -ra2' '-rb' '
372 > +2:f +2:a1 +30 :b
432 > +2:f +2:a1 +30 :b
373 > <f +30 :a2'
433 > <f +30 :a2'
374
434
375 % -- a -> b tree
435 % -- a -> b tree
376 comparing with b
436 comparing with b
377 searching for changes
437 searching for changes
378 unpruned common: 2dc09a01254d
438 unpruned common: 2dc09a01254d
379 elapsed time: * seconds (glob)
439 elapsed time: * seconds (glob)
380 heads summary:
440 heads summary:
381 total common heads: 1
441 total common heads: 1
382 also local heads: 1
442 also local heads: 1
383 also remote heads: 0
443 also remote heads: 0
384 both: 0
444 both: 0
385 local heads: 2
445 local heads: 2
386 common: 1
446 common: 1
387 missing: 1
447 missing: 1
388 remote heads: 1
448 remote heads: 1
389 common: 0
449 common: 0
390 unknown: 1
450 unknown: 1
391 local changesets: 34
451 local changesets: 34
392 common: 4
452 common: 4
393 heads: 1
453 heads: 1
394 roots: 1
454 roots: 1
395 missing: 30
455 missing: 30
396 heads: 1
456 heads: 1
397 roots: 1
457 roots: 1
458 first undecided set: 33
459 heads: 2
460 roots: 1
461 common: 3
462 missing: 30
398 common heads: 2dc09a01254d
463 common heads: 2dc09a01254d
399
464
400 % -- a -> b set
465 % -- a -> b set
401 comparing with b
466 comparing with b
402 query 1; heads
467 query 1; heads
403 searching for changes
468 searching for changes
404 taking initial sample
469 taking initial sample
405 searching: 2 queries
470 searching: 2 queries
406 query 2; still undecided: 29, sample size is: 29
471 query 2; still undecided: 29, sample size is: 29
407 2 total queries in *.????s (glob)
472 2 total queries in *.????s (glob)
408 elapsed time: * seconds (glob)
473 elapsed time: * seconds (glob)
409 heads summary:
474 heads summary:
410 total common heads: 1
475 total common heads: 1
411 also local heads: 1
476 also local heads: 1
412 also remote heads: 0
477 also remote heads: 0
413 both: 0
478 both: 0
414 local heads: 2
479 local heads: 2
415 common: 1
480 common: 1
416 missing: 1
481 missing: 1
417 remote heads: 1
482 remote heads: 1
418 common: 0
483 common: 0
419 unknown: 1
484 unknown: 1
420 local changesets: 34
485 local changesets: 34
421 common: 4
486 common: 4
422 heads: 1
487 heads: 1
423 roots: 1
488 roots: 1
424 missing: 30
489 missing: 30
425 heads: 1
490 heads: 1
426 roots: 1
491 roots: 1
492 first undecided set: 33
493 heads: 2
494 roots: 1
495 common: 3
496 missing: 30
427 common heads: 2dc09a01254d
497 common heads: 2dc09a01254d
428
498
429 % -- a -> b set (tip only)
499 % -- a -> b set (tip only)
430 comparing with b
500 comparing with b
431 query 1; heads
501 query 1; heads
432 searching for changes
502 searching for changes
433 taking quick initial sample
503 taking quick initial sample
434 searching: 2 queries
504 searching: 2 queries
435 query 2; still undecided: 31, sample size is: 31
505 query 2; still undecided: 31, sample size is: 31
436 2 total queries in *.????s (glob)
506 2 total queries in *.????s (glob)
437 elapsed time: * seconds (glob)
507 elapsed time: * seconds (glob)
438 heads summary:
508 heads summary:
439 total common heads: 1
509 total common heads: 1
440 also local heads: 0
510 also local heads: 0
441 also remote heads: 0
511 also remote heads: 0
442 both: 0
512 both: 0
443 local heads: 2
513 local heads: 2
444 common: 0
514 common: 0
445 missing: 2
515 missing: 2
446 remote heads: 1
516 remote heads: 1
447 common: 0
517 common: 0
448 unknown: 1
518 unknown: 1
449 local changesets: 34
519 local changesets: 34
450 common: 2
520 common: 2
451 heads: 1
521 heads: 1
452 roots: 1
522 roots: 1
453 missing: 32
523 missing: 32
454 heads: 2
524 heads: 2
455 roots: 2
525 roots: 2
526 first undecided set: 34
527 heads: 2
528 roots: 1
529 common: 2
530 missing: 32
456 common heads: 66f7d451a68b
531 common heads: 66f7d451a68b
457
532
458 % -- b -> a tree
533 % -- b -> a tree
459 comparing with a
534 comparing with a
460 searching for changes
535 searching for changes
461 unpruned common: 2dc09a01254d 66f7d451a68b
536 unpruned common: 2dc09a01254d 66f7d451a68b
462 elapsed time: * seconds (glob)
537 elapsed time: * seconds (glob)
463 heads summary:
538 heads summary:
464 total common heads: 1
539 total common heads: 1
465 also local heads: 0
540 also local heads: 0
466 also remote heads: 1
541 also remote heads: 1
467 both: 0
542 both: 0
468 local heads: 1
543 local heads: 1
469 common: 0
544 common: 0
470 missing: 1
545 missing: 1
471 remote heads: 2
546 remote heads: 2
472 common: 1
547 common: 1
473 unknown: 1
548 unknown: 1
474 local changesets: 34
549 local changesets: 34
475 common: 4
550 common: 4
476 heads: 1
551 heads: 1
477 roots: 1
552 roots: 1
478 missing: 30
553 missing: 30
479 heads: 1
554 heads: 1
480 roots: 1
555 roots: 1
556 first undecided set: 30
557 heads: 1
558 roots: 1
559 common: 0
560 missing: 30
481 common heads: 2dc09a01254d
561 common heads: 2dc09a01254d
482
562
483 % -- b -> a set
563 % -- b -> a set
484 comparing with a
564 comparing with a
485 query 1; heads
565 query 1; heads
486 searching for changes
566 searching for changes
487 taking initial sample
567 taking initial sample
488 searching: 2 queries
568 searching: 2 queries
489 query 2; still undecided: 29, sample size is: 29
569 query 2; still undecided: 29, sample size is: 29
490 2 total queries in *.????s (glob)
570 2 total queries in *.????s (glob)
491 elapsed time: * seconds (glob)
571 elapsed time: * seconds (glob)
492 heads summary:
572 heads summary:
493 total common heads: 1
573 total common heads: 1
494 also local heads: 0
574 also local heads: 0
495 also remote heads: 1
575 also remote heads: 1
496 both: 0
576 both: 0
497 local heads: 1
577 local heads: 1
498 common: 0
578 common: 0
499 missing: 1
579 missing: 1
500 remote heads: 2
580 remote heads: 2
501 common: 1
581 common: 1
502 unknown: 1
582 unknown: 1
503 local changesets: 34
583 local changesets: 34
504 common: 4
584 common: 4
505 heads: 1
585 heads: 1
506 roots: 1
586 roots: 1
507 missing: 30
587 missing: 30
508 heads: 1
588 heads: 1
509 roots: 1
589 roots: 1
590 first undecided set: 30
591 heads: 1
592 roots: 1
593 common: 0
594 missing: 30
510 common heads: 2dc09a01254d
595 common heads: 2dc09a01254d
511
596
512 % -- b -> a set (tip only)
597 % -- b -> a set (tip only)
513 comparing with a
598 comparing with a
514 query 1; heads
599 query 1; heads
515 searching for changes
600 searching for changes
516 taking initial sample
601 taking initial sample
517 searching: 2 queries
602 searching: 2 queries
518 query 2; still undecided: 29, sample size is: 29
603 query 2; still undecided: 29, sample size is: 29
519 2 total queries in *.????s (glob)
604 2 total queries in *.????s (glob)
520 elapsed time: * seconds (glob)
605 elapsed time: * seconds (glob)
521 heads summary:
606 heads summary:
522 total common heads: 1
607 total common heads: 1
523 also local heads: 0
608 also local heads: 0
524 also remote heads: 1
609 also remote heads: 1
525 both: 0
610 both: 0
526 local heads: 1
611 local heads: 1
527 common: 0
612 common: 0
528 missing: 1
613 missing: 1
529 remote heads: 2
614 remote heads: 2
530 common: 1
615 common: 1
531 unknown: 1
616 unknown: 1
532 local changesets: 34
617 local changesets: 34
533 common: 4
618 common: 4
534 heads: 1
619 heads: 1
535 roots: 1
620 roots: 1
536 missing: 30
621 missing: 30
537 heads: 1
622 heads: 1
538 roots: 1
623 roots: 1
624 first undecided set: 30
625 heads: 1
626 roots: 1
627 common: 0
628 missing: 30
539 common heads: 2dc09a01254d
629 common heads: 2dc09a01254d
540
630
541
631
542 Both many new:
632 Both many new:
543
633
544 $ testdesc '-ra' '-rb' '
634 $ testdesc '-ra' '-rb' '
545 > +2:f +30 :b
635 > +2:f +30 :b
546 > <f +30 :a'
636 > <f +30 :a'
547
637
548 % -- a -> b tree
638 % -- a -> b tree
549 comparing with b
639 comparing with b
550 searching for changes
640 searching for changes
551 unpruned common: 66f7d451a68b
641 unpruned common: 66f7d451a68b
552 elapsed time: * seconds (glob)
642 elapsed time: * seconds (glob)
553 heads summary:
643 heads summary:
554 total common heads: 1
644 total common heads: 1
555 also local heads: 0
645 also local heads: 0
556 also remote heads: 0
646 also remote heads: 0
557 both: 0
647 both: 0
558 local heads: 1
648 local heads: 1
559 common: 0
649 common: 0
560 missing: 1
650 missing: 1
561 remote heads: 1
651 remote heads: 1
562 common: 0
652 common: 0
563 unknown: 1
653 unknown: 1
564 local changesets: 32
654 local changesets: 32
565 common: 2
655 common: 2
566 heads: 1
656 heads: 1
567 roots: 1
657 roots: 1
568 missing: 30
658 missing: 30
569 heads: 1
659 heads: 1
570 roots: 1
660 roots: 1
661 first undecided set: 32
662 heads: 1
663 roots: 1
664 common: 2
665 missing: 30
571 common heads: 66f7d451a68b
666 common heads: 66f7d451a68b
572
667
573 % -- a -> b set
668 % -- a -> b set
574 comparing with b
669 comparing with b
575 query 1; heads
670 query 1; heads
576 searching for changes
671 searching for changes
577 taking quick initial sample
672 taking quick initial sample
578 searching: 2 queries
673 searching: 2 queries
579 query 2; still undecided: 31, sample size is: 31
674 query 2; still undecided: 31, sample size is: 31
580 2 total queries in *.????s (glob)
675 2 total queries in *.????s (glob)
581 elapsed time: * seconds (glob)
676 elapsed time: * seconds (glob)
582 heads summary:
677 heads summary:
583 total common heads: 1
678 total common heads: 1
584 also local heads: 0
679 also local heads: 0
585 also remote heads: 0
680 also remote heads: 0
586 both: 0
681 both: 0
587 local heads: 1
682 local heads: 1
588 common: 0
683 common: 0
589 missing: 1
684 missing: 1
590 remote heads: 1
685 remote heads: 1
591 common: 0
686 common: 0
592 unknown: 1
687 unknown: 1
593 local changesets: 32
688 local changesets: 32
594 common: 2
689 common: 2
595 heads: 1
690 heads: 1
596 roots: 1
691 roots: 1
597 missing: 30
692 missing: 30
598 heads: 1
693 heads: 1
599 roots: 1
694 roots: 1
695 first undecided set: 32
696 heads: 1
697 roots: 1
698 common: 2
699 missing: 30
600 common heads: 66f7d451a68b
700 common heads: 66f7d451a68b
601
701
602 % -- a -> b set (tip only)
702 % -- a -> b set (tip only)
603 comparing with b
703 comparing with b
604 query 1; heads
704 query 1; heads
605 searching for changes
705 searching for changes
606 taking quick initial sample
706 taking quick initial sample
607 searching: 2 queries
707 searching: 2 queries
608 query 2; still undecided: 31, sample size is: 31
708 query 2; still undecided: 31, sample size is: 31
609 2 total queries in *.????s (glob)
709 2 total queries in *.????s (glob)
610 elapsed time: * seconds (glob)
710 elapsed time: * seconds (glob)
611 heads summary:
711 heads summary:
612 total common heads: 1
712 total common heads: 1
613 also local heads: 0
713 also local heads: 0
614 also remote heads: 0
714 also remote heads: 0
615 both: 0
715 both: 0
616 local heads: 1
716 local heads: 1
617 common: 0
717 common: 0
618 missing: 1
718 missing: 1
619 remote heads: 1
719 remote heads: 1
620 common: 0
720 common: 0
621 unknown: 1
721 unknown: 1
622 local changesets: 32
722 local changesets: 32
623 common: 2
723 common: 2
624 heads: 1
724 heads: 1
625 roots: 1
725 roots: 1
626 missing: 30
726 missing: 30
627 heads: 1
727 heads: 1
628 roots: 1
728 roots: 1
729 first undecided set: 32
730 heads: 1
731 roots: 1
732 common: 2
733 missing: 30
629 common heads: 66f7d451a68b
734 common heads: 66f7d451a68b
630
735
631 % -- b -> a tree
736 % -- b -> a tree
632 comparing with a
737 comparing with a
633 searching for changes
738 searching for changes
634 unpruned common: 66f7d451a68b
739 unpruned common: 66f7d451a68b
635 elapsed time: * seconds (glob)
740 elapsed time: * seconds (glob)
636 heads summary:
741 heads summary:
637 total common heads: 1
742 total common heads: 1
638 also local heads: 0
743 also local heads: 0
639 also remote heads: 0
744 also remote heads: 0
640 both: 0
745 both: 0
641 local heads: 1
746 local heads: 1
642 common: 0
747 common: 0
643 missing: 1
748 missing: 1
644 remote heads: 1
749 remote heads: 1
645 common: 0
750 common: 0
646 unknown: 1
751 unknown: 1
647 local changesets: 32
752 local changesets: 32
648 common: 2
753 common: 2
649 heads: 1
754 heads: 1
650 roots: 1
755 roots: 1
651 missing: 30
756 missing: 30
652 heads: 1
757 heads: 1
653 roots: 1
758 roots: 1
759 first undecided set: 32
760 heads: 1
761 roots: 1
762 common: 2
763 missing: 30
654 common heads: 66f7d451a68b
764 common heads: 66f7d451a68b
655
765
656 % -- b -> a set
766 % -- b -> a set
657 comparing with a
767 comparing with a
658 query 1; heads
768 query 1; heads
659 searching for changes
769 searching for changes
660 taking quick initial sample
770 taking quick initial sample
661 searching: 2 queries
771 searching: 2 queries
662 query 2; still undecided: 31, sample size is: 31
772 query 2; still undecided: 31, sample size is: 31
663 2 total queries in *.????s (glob)
773 2 total queries in *.????s (glob)
664 elapsed time: * seconds (glob)
774 elapsed time: * seconds (glob)
665 heads summary:
775 heads summary:
666 total common heads: 1
776 total common heads: 1
667 also local heads: 0
777 also local heads: 0
668 also remote heads: 0
778 also remote heads: 0
669 both: 0
779 both: 0
670 local heads: 1
780 local heads: 1
671 common: 0
781 common: 0
672 missing: 1
782 missing: 1
673 remote heads: 1
783 remote heads: 1
674 common: 0
784 common: 0
675 unknown: 1
785 unknown: 1
676 local changesets: 32
786 local changesets: 32
677 common: 2
787 common: 2
678 heads: 1
788 heads: 1
679 roots: 1
789 roots: 1
680 missing: 30
790 missing: 30
681 heads: 1
791 heads: 1
682 roots: 1
792 roots: 1
793 first undecided set: 32
794 heads: 1
795 roots: 1
796 common: 2
797 missing: 30
683 common heads: 66f7d451a68b
798 common heads: 66f7d451a68b
684
799
685 % -- b -> a set (tip only)
800 % -- b -> a set (tip only)
686 comparing with a
801 comparing with a
687 query 1; heads
802 query 1; heads
688 searching for changes
803 searching for changes
689 taking quick initial sample
804 taking quick initial sample
690 searching: 2 queries
805 searching: 2 queries
691 query 2; still undecided: 31, sample size is: 31
806 query 2; still undecided: 31, sample size is: 31
692 2 total queries in *.????s (glob)
807 2 total queries in *.????s (glob)
693 elapsed time: * seconds (glob)
808 elapsed time: * seconds (glob)
694 heads summary:
809 heads summary:
695 total common heads: 1
810 total common heads: 1
696 also local heads: 0
811 also local heads: 0
697 also remote heads: 0
812 also remote heads: 0
698 both: 0
813 both: 0
699 local heads: 1
814 local heads: 1
700 common: 0
815 common: 0
701 missing: 1
816 missing: 1
702 remote heads: 1
817 remote heads: 1
703 common: 0
818 common: 0
704 unknown: 1
819 unknown: 1
705 local changesets: 32
820 local changesets: 32
706 common: 2
821 common: 2
707 heads: 1
822 heads: 1
708 roots: 1
823 roots: 1
709 missing: 30
824 missing: 30
710 heads: 1
825 heads: 1
711 roots: 1
826 roots: 1
827 first undecided set: 32
828 heads: 1
829 roots: 1
830 common: 2
831 missing: 30
712 common heads: 66f7d451a68b
832 common heads: 66f7d451a68b
713
833
714
834
715 Both many new skewed:
835 Both many new skewed:
716
836
717 $ testdesc '-ra' '-rb' '
837 $ testdesc '-ra' '-rb' '
718 > +2:f +30 :b
838 > +2:f +30 :b
719 > <f +50 :a'
839 > <f +50 :a'
720
840
721 % -- a -> b tree
841 % -- a -> b tree
722 comparing with b
842 comparing with b
723 searching for changes
843 searching for changes
724 unpruned common: 66f7d451a68b
844 unpruned common: 66f7d451a68b
725 elapsed time: * seconds (glob)
845 elapsed time: * seconds (glob)
726 heads summary:
846 heads summary:
727 total common heads: 1
847 total common heads: 1
728 also local heads: 0
848 also local heads: 0
729 also remote heads: 0
849 also remote heads: 0
730 both: 0
850 both: 0
731 local heads: 1
851 local heads: 1
732 common: 0
852 common: 0
733 missing: 1
853 missing: 1
734 remote heads: 1
854 remote heads: 1
735 common: 0
855 common: 0
736 unknown: 1
856 unknown: 1
737 local changesets: 52
857 local changesets: 52
738 common: 2
858 common: 2
739 heads: 1
859 heads: 1
740 roots: 1
860 roots: 1
741 missing: 50
861 missing: 50
742 heads: 1
862 heads: 1
743 roots: 1
863 roots: 1
864 first undecided set: 52
865 heads: 1
866 roots: 1
867 common: 2
868 missing: 50
744 common heads: 66f7d451a68b
869 common heads: 66f7d451a68b
745
870
746 % -- a -> b set
871 % -- a -> b set
747 comparing with b
872 comparing with b
748 query 1; heads
873 query 1; heads
749 searching for changes
874 searching for changes
750 taking quick initial sample
875 taking quick initial sample
751 searching: 2 queries
876 searching: 2 queries
752 query 2; still undecided: 51, sample size is: 51
877 query 2; still undecided: 51, sample size is: 51
753 2 total queries in *.????s (glob)
878 2 total queries in *.????s (glob)
754 elapsed time: * seconds (glob)
879 elapsed time: * seconds (glob)
755 heads summary:
880 heads summary:
756 total common heads: 1
881 total common heads: 1
757 also local heads: 0
882 also local heads: 0
758 also remote heads: 0
883 also remote heads: 0
759 both: 0
884 both: 0
760 local heads: 1
885 local heads: 1
761 common: 0
886 common: 0
762 missing: 1
887 missing: 1
763 remote heads: 1
888 remote heads: 1
764 common: 0
889 common: 0
765 unknown: 1
890 unknown: 1
766 local changesets: 52
891 local changesets: 52
767 common: 2
892 common: 2
768 heads: 1
893 heads: 1
769 roots: 1
894 roots: 1
770 missing: 50
895 missing: 50
771 heads: 1
896 heads: 1
772 roots: 1
897 roots: 1
898 first undecided set: 52
899 heads: 1
900 roots: 1
901 common: 2
902 missing: 50
773 common heads: 66f7d451a68b
903 common heads: 66f7d451a68b
774
904
775 % -- a -> b set (tip only)
905 % -- a -> b set (tip only)
776 comparing with b
906 comparing with b
777 query 1; heads
907 query 1; heads
778 searching for changes
908 searching for changes
779 taking quick initial sample
909 taking quick initial sample
780 searching: 2 queries
910 searching: 2 queries
781 query 2; still undecided: 51, sample size is: 51
911 query 2; still undecided: 51, sample size is: 51
782 2 total queries in *.????s (glob)
912 2 total queries in *.????s (glob)
783 elapsed time: * seconds (glob)
913 elapsed time: * seconds (glob)
784 heads summary:
914 heads summary:
785 total common heads: 1
915 total common heads: 1
786 also local heads: 0
916 also local heads: 0
787 also remote heads: 0
917 also remote heads: 0
788 both: 0
918 both: 0
789 local heads: 1
919 local heads: 1
790 common: 0
920 common: 0
791 missing: 1
921 missing: 1
792 remote heads: 1
922 remote heads: 1
793 common: 0
923 common: 0
794 unknown: 1
924 unknown: 1
795 local changesets: 52
925 local changesets: 52
796 common: 2
926 common: 2
797 heads: 1
927 heads: 1
798 roots: 1
928 roots: 1
799 missing: 50
929 missing: 50
800 heads: 1
930 heads: 1
801 roots: 1
931 roots: 1
932 first undecided set: 52
933 heads: 1
934 roots: 1
935 common: 2
936 missing: 50
802 common heads: 66f7d451a68b
937 common heads: 66f7d451a68b
803
938
804 % -- b -> a tree
939 % -- b -> a tree
805 comparing with a
940 comparing with a
806 searching for changes
941 searching for changes
807 unpruned common: 66f7d451a68b
942 unpruned common: 66f7d451a68b
808 elapsed time: * seconds (glob)
943 elapsed time: * seconds (glob)
809 heads summary:
944 heads summary:
810 total common heads: 1
945 total common heads: 1
811 also local heads: 0
946 also local heads: 0
812 also remote heads: 0
947 also remote heads: 0
813 both: 0
948 both: 0
814 local heads: 1
949 local heads: 1
815 common: 0
950 common: 0
816 missing: 1
951 missing: 1
817 remote heads: 1
952 remote heads: 1
818 common: 0
953 common: 0
819 unknown: 1
954 unknown: 1
820 local changesets: 32
955 local changesets: 32
821 common: 2
956 common: 2
822 heads: 1
957 heads: 1
823 roots: 1
958 roots: 1
824 missing: 30
959 missing: 30
825 heads: 1
960 heads: 1
826 roots: 1
961 roots: 1
962 first undecided set: 32
963 heads: 1
964 roots: 1
965 common: 2
966 missing: 30
827 common heads: 66f7d451a68b
967 common heads: 66f7d451a68b
828
968
829 % -- b -> a set
969 % -- b -> a set
830 comparing with a
970 comparing with a
831 query 1; heads
971 query 1; heads
832 searching for changes
972 searching for changes
833 taking quick initial sample
973 taking quick initial sample
834 searching: 2 queries
974 searching: 2 queries
835 query 2; still undecided: 31, sample size is: 31
975 query 2; still undecided: 31, sample size is: 31
836 2 total queries in *.????s (glob)
976 2 total queries in *.????s (glob)
837 elapsed time: * seconds (glob)
977 elapsed time: * seconds (glob)
838 heads summary:
978 heads summary:
839 total common heads: 1
979 total common heads: 1
840 also local heads: 0
980 also local heads: 0
841 also remote heads: 0
981 also remote heads: 0
842 both: 0
982 both: 0
843 local heads: 1
983 local heads: 1
844 common: 0
984 common: 0
845 missing: 1
985 missing: 1
846 remote heads: 1
986 remote heads: 1
847 common: 0
987 common: 0
848 unknown: 1
988 unknown: 1
849 local changesets: 32
989 local changesets: 32
850 common: 2
990 common: 2
851 heads: 1
991 heads: 1
852 roots: 1
992 roots: 1
853 missing: 30
993 missing: 30
854 heads: 1
994 heads: 1
855 roots: 1
995 roots: 1
996 first undecided set: 32
997 heads: 1
998 roots: 1
999 common: 2
1000 missing: 30
856 common heads: 66f7d451a68b
1001 common heads: 66f7d451a68b
857
1002
858 % -- b -> a set (tip only)
1003 % -- b -> a set (tip only)
859 comparing with a
1004 comparing with a
860 query 1; heads
1005 query 1; heads
861 searching for changes
1006 searching for changes
862 taking quick initial sample
1007 taking quick initial sample
863 searching: 2 queries
1008 searching: 2 queries
864 query 2; still undecided: 31, sample size is: 31
1009 query 2; still undecided: 31, sample size is: 31
865 2 total queries in *.????s (glob)
1010 2 total queries in *.????s (glob)
866 elapsed time: * seconds (glob)
1011 elapsed time: * seconds (glob)
867 heads summary:
1012 heads summary:
868 total common heads: 1
1013 total common heads: 1
869 also local heads: 0
1014 also local heads: 0
870 also remote heads: 0
1015 also remote heads: 0
871 both: 0
1016 both: 0
872 local heads: 1
1017 local heads: 1
873 common: 0
1018 common: 0
874 missing: 1
1019 missing: 1
875 remote heads: 1
1020 remote heads: 1
876 common: 0
1021 common: 0
877 unknown: 1
1022 unknown: 1
878 local changesets: 32
1023 local changesets: 32
879 common: 2
1024 common: 2
880 heads: 1
1025 heads: 1
881 roots: 1
1026 roots: 1
882 missing: 30
1027 missing: 30
883 heads: 1
1028 heads: 1
884 roots: 1
1029 roots: 1
1030 first undecided set: 32
1031 heads: 1
1032 roots: 1
1033 common: 2
1034 missing: 30
885 common heads: 66f7d451a68b
1035 common heads: 66f7d451a68b
886
1036
887
1037
888 Both many new on top of long history:
1038 Both many new on top of long history:
889
1039
890 $ testdesc '-ra' '-rb' '
1040 $ testdesc '-ra' '-rb' '
891 > +1000:f +30 :b
1041 > +1000:f +30 :b
892 > <f +50 :a'
1042 > <f +50 :a'
893
1043
894 % -- a -> b tree
1044 % -- a -> b tree
895 comparing with b
1045 comparing with b
896 searching for changes
1046 searching for changes
897 unpruned common: 7ead0cba2838
1047 unpruned common: 7ead0cba2838
898 elapsed time: * seconds (glob)
1048 elapsed time: * seconds (glob)
899 heads summary:
1049 heads summary:
900 total common heads: 1
1050 total common heads: 1
901 also local heads: 0
1051 also local heads: 0
902 also remote heads: 0
1052 also remote heads: 0
903 both: 0
1053 both: 0
904 local heads: 1
1054 local heads: 1
905 common: 0
1055 common: 0
906 missing: 1
1056 missing: 1
907 remote heads: 1
1057 remote heads: 1
908 common: 0
1058 common: 0
909 unknown: 1
1059 unknown: 1
910 local changesets: 1050
1060 local changesets: 1050
911 common: 1000
1061 common: 1000
912 heads: 1
1062 heads: 1
913 roots: 1
1063 roots: 1
914 missing: 50
1064 missing: 50
915 heads: 1
1065 heads: 1
916 roots: 1
1066 roots: 1
1067 first undecided set: 1050
1068 heads: 1
1069 roots: 1
1070 common: 1000
1071 missing: 50
917 common heads: 7ead0cba2838
1072 common heads: 7ead0cba2838
918
1073
919 % -- a -> b set
1074 % -- a -> b set
920 comparing with b
1075 comparing with b
921 query 1; heads
1076 query 1; heads
922 searching for changes
1077 searching for changes
923 taking quick initial sample
1078 taking quick initial sample
924 searching: 2 queries
1079 searching: 2 queries
925 query 2; still undecided: 1049, sample size is: 11
1080 query 2; still undecided: 1049, sample size is: 11
926 sampling from both directions
1081 sampling from both directions
927 searching: 3 queries
1082 searching: 3 queries
928 query 3; still undecided: 31, sample size is: 31
1083 query 3; still undecided: 31, sample size is: 31
929 3 total queries in *.????s (glob)
1084 3 total queries in *.????s (glob)
930 elapsed time: * seconds (glob)
1085 elapsed time: * seconds (glob)
931 heads summary:
1086 heads summary:
932 total common heads: 1
1087 total common heads: 1
933 also local heads: 0
1088 also local heads: 0
934 also remote heads: 0
1089 also remote heads: 0
935 both: 0
1090 both: 0
936 local heads: 1
1091 local heads: 1
937 common: 0
1092 common: 0
938 missing: 1
1093 missing: 1
939 remote heads: 1
1094 remote heads: 1
940 common: 0
1095 common: 0
941 unknown: 1
1096 unknown: 1
942 local changesets: 1050
1097 local changesets: 1050
943 common: 1000
1098 common: 1000
944 heads: 1
1099 heads: 1
945 roots: 1
1100 roots: 1
946 missing: 50
1101 missing: 50
947 heads: 1
1102 heads: 1
948 roots: 1
1103 roots: 1
1104 first undecided set: 1050
1105 heads: 1
1106 roots: 1
1107 common: 1000
1108 missing: 50
949 common heads: 7ead0cba2838
1109 common heads: 7ead0cba2838
950
1110
951 % -- a -> b set (tip only)
1111 % -- a -> b set (tip only)
952 comparing with b
1112 comparing with b
953 query 1; heads
1113 query 1; heads
954 searching for changes
1114 searching for changes
955 taking quick initial sample
1115 taking quick initial sample
956 searching: 2 queries
1116 searching: 2 queries
957 query 2; still undecided: 1049, sample size is: 11
1117 query 2; still undecided: 1049, sample size is: 11
958 sampling from both directions
1118 sampling from both directions
959 searching: 3 queries
1119 searching: 3 queries
960 query 3; still undecided: 31, sample size is: 31
1120 query 3; still undecided: 31, sample size is: 31
961 3 total queries in *.????s (glob)
1121 3 total queries in *.????s (glob)
962 elapsed time: * seconds (glob)
1122 elapsed time: * seconds (glob)
963 heads summary:
1123 heads summary:
964 total common heads: 1
1124 total common heads: 1
965 also local heads: 0
1125 also local heads: 0
966 also remote heads: 0
1126 also remote heads: 0
967 both: 0
1127 both: 0
968 local heads: 1
1128 local heads: 1
969 common: 0
1129 common: 0
970 missing: 1
1130 missing: 1
971 remote heads: 1
1131 remote heads: 1
972 common: 0
1132 common: 0
973 unknown: 1
1133 unknown: 1
974 local changesets: 1050
1134 local changesets: 1050
975 common: 1000
1135 common: 1000
976 heads: 1
1136 heads: 1
977 roots: 1
1137 roots: 1
978 missing: 50
1138 missing: 50
979 heads: 1
1139 heads: 1
980 roots: 1
1140 roots: 1
1141 first undecided set: 1050
1142 heads: 1
1143 roots: 1
1144 common: 1000
1145 missing: 50
981 common heads: 7ead0cba2838
1146 common heads: 7ead0cba2838
982
1147
983 % -- b -> a tree
1148 % -- b -> a tree
984 comparing with a
1149 comparing with a
985 searching for changes
1150 searching for changes
986 unpruned common: 7ead0cba2838
1151 unpruned common: 7ead0cba2838
987 elapsed time: * seconds (glob)
1152 elapsed time: * seconds (glob)
988 heads summary:
1153 heads summary:
989 total common heads: 1
1154 total common heads: 1
990 also local heads: 0
1155 also local heads: 0
991 also remote heads: 0
1156 also remote heads: 0
992 both: 0
1157 both: 0
993 local heads: 1
1158 local heads: 1
994 common: 0
1159 common: 0
995 missing: 1
1160 missing: 1
996 remote heads: 1
1161 remote heads: 1
997 common: 0
1162 common: 0
998 unknown: 1
1163 unknown: 1
999 local changesets: 1030
1164 local changesets: 1030
1000 common: 1000
1165 common: 1000
1001 heads: 1
1166 heads: 1
1002 roots: 1
1167 roots: 1
1003 missing: 30
1168 missing: 30
1004 heads: 1
1169 heads: 1
1005 roots: 1
1170 roots: 1
1171 first undecided set: 1030
1172 heads: 1
1173 roots: 1
1174 common: 1000
1175 missing: 30
1006 common heads: 7ead0cba2838
1176 common heads: 7ead0cba2838
1007
1177
1008 % -- b -> a set
1178 % -- b -> a set
1009 comparing with a
1179 comparing with a
1010 query 1; heads
1180 query 1; heads
1011 searching for changes
1181 searching for changes
1012 taking quick initial sample
1182 taking quick initial sample
1013 searching: 2 queries
1183 searching: 2 queries
1014 query 2; still undecided: 1029, sample size is: 11
1184 query 2; still undecided: 1029, sample size is: 11
1015 sampling from both directions
1185 sampling from both directions
1016 searching: 3 queries
1186 searching: 3 queries
1017 query 3; still undecided: 15, sample size is: 15
1187 query 3; still undecided: 15, sample size is: 15
1018 3 total queries in *.????s (glob)
1188 3 total queries in *.????s (glob)
1019 elapsed time: * seconds (glob)
1189 elapsed time: * seconds (glob)
1020 heads summary:
1190 heads summary:
1021 total common heads: 1
1191 total common heads: 1
1022 also local heads: 0
1192 also local heads: 0
1023 also remote heads: 0
1193 also remote heads: 0
1024 both: 0
1194 both: 0
1025 local heads: 1
1195 local heads: 1
1026 common: 0
1196 common: 0
1027 missing: 1
1197 missing: 1
1028 remote heads: 1
1198 remote heads: 1
1029 common: 0
1199 common: 0
1030 unknown: 1
1200 unknown: 1
1031 local changesets: 1030
1201 local changesets: 1030
1032 common: 1000
1202 common: 1000
1033 heads: 1
1203 heads: 1
1034 roots: 1
1204 roots: 1
1035 missing: 30
1205 missing: 30
1036 heads: 1
1206 heads: 1
1037 roots: 1
1207 roots: 1
1208 first undecided set: 1030
1209 heads: 1
1210 roots: 1
1211 common: 1000
1212 missing: 30
1038 common heads: 7ead0cba2838
1213 common heads: 7ead0cba2838
1039
1214
1040 % -- b -> a set (tip only)
1215 % -- b -> a set (tip only)
1041 comparing with a
1216 comparing with a
1042 query 1; heads
1217 query 1; heads
1043 searching for changes
1218 searching for changes
1044 taking quick initial sample
1219 taking quick initial sample
1045 searching: 2 queries
1220 searching: 2 queries
1046 query 2; still undecided: 1029, sample size is: 11
1221 query 2; still undecided: 1029, sample size is: 11
1047 sampling from both directions
1222 sampling from both directions
1048 searching: 3 queries
1223 searching: 3 queries
1049 query 3; still undecided: 15, sample size is: 15
1224 query 3; still undecided: 15, sample size is: 15
1050 3 total queries in *.????s (glob)
1225 3 total queries in *.????s (glob)
1051 elapsed time: * seconds (glob)
1226 elapsed time: * seconds (glob)
1052 heads summary:
1227 heads summary:
1053 total common heads: 1
1228 total common heads: 1
1054 also local heads: 0
1229 also local heads: 0
1055 also remote heads: 0
1230 also remote heads: 0
1056 both: 0
1231 both: 0
1057 local heads: 1
1232 local heads: 1
1058 common: 0
1233 common: 0
1059 missing: 1
1234 missing: 1
1060 remote heads: 1
1235 remote heads: 1
1061 common: 0
1236 common: 0
1062 unknown: 1
1237 unknown: 1
1063 local changesets: 1030
1238 local changesets: 1030
1064 common: 1000
1239 common: 1000
1065 heads: 1
1240 heads: 1
1066 roots: 1
1241 roots: 1
1067 missing: 30
1242 missing: 30
1068 heads: 1
1243 heads: 1
1069 roots: 1
1244 roots: 1
1245 first undecided set: 1030
1246 heads: 1
1247 roots: 1
1248 common: 1000
1249 missing: 30
1070 common heads: 7ead0cba2838
1250 common heads: 7ead0cba2838
1071
1251
1072
1252
1073 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1253 One with >200 heads. We now switch to send them all in the initial roundtrip, but still do sampling for the later request.
1074
1254
1075 $ hg init manyheads
1255 $ hg init manyheads
1076 $ cd manyheads
1256 $ cd manyheads
1077 $ echo "+300:r @a" >dagdesc
1257 $ echo "+300:r @a" >dagdesc
1078 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1258 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1079 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1259 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1080 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1260 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1081 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1261 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1082 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1262 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1083 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1263 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1084 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1264 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1085 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1265 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1086 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1266 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1087 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1267 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1088 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1268 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1089 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1269 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1090 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1270 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
1091 $ echo "@b *r+3" >>dagdesc # one more head
1271 $ echo "@b *r+3" >>dagdesc # one more head
1092 $ hg debugbuilddag <dagdesc
1272 $ hg debugbuilddag <dagdesc
1093 reading DAG from stdin
1273 reading DAG from stdin
1094
1274
1095 $ hg heads -t --template . | wc -c
1275 $ hg heads -t --template . | wc -c
1096 \s*261 (re)
1276 \s*261 (re)
1097
1277
1098 $ hg clone -b a . a
1278 $ hg clone -b a . a
1099 adding changesets
1279 adding changesets
1100 adding manifests
1280 adding manifests
1101 adding file changes
1281 adding file changes
1102 added 1340 changesets with 0 changes to 0 files (+259 heads)
1282 added 1340 changesets with 0 changes to 0 files (+259 heads)
1103 new changesets 1ea73414a91b:1c51e2c80832
1283 new changesets 1ea73414a91b:1c51e2c80832
1104 updating to branch a
1284 updating to branch a
1105 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1285 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1106 $ hg clone -b b . b
1286 $ hg clone -b b . b
1107 adding changesets
1287 adding changesets
1108 adding manifests
1288 adding manifests
1109 adding file changes
1289 adding file changes
1110 added 304 changesets with 0 changes to 0 files
1290 added 304 changesets with 0 changes to 0 files
1111 new changesets 1ea73414a91b:513314ca8b3a
1291 new changesets 1ea73414a91b:513314ca8b3a
1112 updating to branch b
1292 updating to branch b
1113 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1293 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1114
1294
1115 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1295 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
1116 comparing with b
1296 comparing with b
1117 query 1; heads
1297 query 1; heads
1118 searching for changes
1298 searching for changes
1119 taking quick initial sample
1299 taking quick initial sample
1120 searching: 2 queries
1300 searching: 2 queries
1121 query 2; still undecided: 1080, sample size is: 100
1301 query 2; still undecided: 1080, sample size is: 100
1122 sampling from both directions
1302 sampling from both directions
1123 searching: 3 queries
1303 searching: 3 queries
1124 query 3; still undecided: 980, sample size is: 200
1304 query 3; still undecided: 980, sample size is: 200
1125 sampling from both directions
1305 sampling from both directions
1126 searching: 4 queries
1306 searching: 4 queries
1127 query 4; still undecided: 497, sample size is: 210
1307 query 4; still undecided: 497, sample size is: 210
1128 sampling from both directions
1308 sampling from both directions
1129 searching: 5 queries
1309 searching: 5 queries
1130 query 5; still undecided: 285, sample size is: 220
1310 query 5; still undecided: 285, sample size is: 220
1131 sampling from both directions
1311 sampling from both directions
1132 searching: 6 queries
1312 searching: 6 queries
1133 query 6; still undecided: 63, sample size is: 63
1313 query 6; still undecided: 63, sample size is: 63
1134 6 total queries in *.????s (glob)
1314 6 total queries in *.????s (glob)
1135 elapsed time: * seconds (glob)
1315 elapsed time: * seconds (glob)
1136 heads summary:
1316 heads summary:
1137 total common heads: 1
1317 total common heads: 1
1138 also local heads: 0
1318 also local heads: 0
1139 also remote heads: 0
1319 also remote heads: 0
1140 both: 0
1320 both: 0
1141 local heads: 260
1321 local heads: 260
1142 common: 0
1322 common: 0
1143 missing: 260
1323 missing: 260
1144 remote heads: 1
1324 remote heads: 1
1145 common: 0
1325 common: 0
1146 unknown: 1
1326 unknown: 1
1147 local changesets: 1340
1327 local changesets: 1340
1148 common: 300
1328 common: 300
1149 heads: 1
1329 heads: 1
1150 roots: 1
1330 roots: 1
1151 missing: 1040
1331 missing: 1040
1152 heads: 260
1332 heads: 260
1153 roots: 260
1333 roots: 260
1334 first undecided set: 1340
1335 heads: 260
1336 roots: 1
1337 common: 300
1338 missing: 1040
1154 common heads: 3ee37d65064a
1339 common heads: 3ee37d65064a
1155 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1340 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
1156 comparing with b
1341 comparing with b
1157 query 1; heads
1342 query 1; heads
1158 searching for changes
1343 searching for changes
1159 taking quick initial sample
1344 taking quick initial sample
1160 searching: 2 queries
1345 searching: 2 queries
1161 query 2; still undecided: 303, sample size is: 9
1346 query 2; still undecided: 303, sample size is: 9
1162 sampling from both directions
1347 sampling from both directions
1163 searching: 3 queries
1348 searching: 3 queries
1164 query 3; still undecided: 3, sample size is: 3
1349 query 3; still undecided: 3, sample size is: 3
1165 3 total queries in *.????s (glob)
1350 3 total queries in *.????s (glob)
1166 elapsed time: * seconds (glob)
1351 elapsed time: * seconds (glob)
1167 heads summary:
1352 heads summary:
1168 total common heads: 1
1353 total common heads: 1
1169 also local heads: 0
1354 also local heads: 0
1170 also remote heads: 0
1355 also remote heads: 0
1171 both: 0
1356 both: 0
1172 local heads: 260
1357 local heads: 260
1173 common: 0
1358 common: 0
1174 missing: 260
1359 missing: 260
1175 remote heads: 1
1360 remote heads: 1
1176 common: 0
1361 common: 0
1177 unknown: 1
1362 unknown: 1
1178 local changesets: 1340
1363 local changesets: 1340
1179 common: 300
1364 common: 300
1180 heads: 1
1365 heads: 1
1181 roots: 1
1366 roots: 1
1182 missing: 1040
1367 missing: 1040
1183 heads: 260
1368 heads: 260
1184 roots: 260
1369 roots: 260
1370 first undecided set: 1340
1371 heads: 260
1372 roots: 1
1373 common: 300
1374 missing: 1040
1185 common heads: 3ee37d65064a
1375 common heads: 3ee37d65064a
1186
1376
1187 Test actual protocol when pulling one new head in addition to common heads
1377 Test actual protocol when pulling one new head in addition to common heads
1188
1378
1189 $ hg clone -U b c
1379 $ hg clone -U b c
1190 $ hg -R c id -ir tip
1380 $ hg -R c id -ir tip
1191 513314ca8b3a
1381 513314ca8b3a
1192 $ hg -R c up -qr default
1382 $ hg -R c up -qr default
1193 $ touch c/f
1383 $ touch c/f
1194 $ hg -R c ci -Aqm "extra head"
1384 $ hg -R c ci -Aqm "extra head"
1195 $ hg -R c id -i
1385 $ hg -R c id -i
1196 e64a39e7da8b
1386 e64a39e7da8b
1197
1387
1198 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1388 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1199 $ cat hg.pid >> $DAEMON_PIDS
1389 $ cat hg.pid >> $DAEMON_PIDS
1200
1390
1201 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1391 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1202 comparing with http://localhost:$HGPORT/
1392 comparing with http://localhost:$HGPORT/
1203 searching for changes
1393 searching for changes
1204 e64a39e7da8b
1394 e64a39e7da8b
1205
1395
1206 $ killdaemons.py
1396 $ killdaemons.py
1207 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1397 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1208 "GET /?cmd=capabilities HTTP/1.1" 200 -
1398 "GET /?cmd=capabilities HTTP/1.1" 200 -
1209 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1399 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1210 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1400 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1211 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1401 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1212 $ cat errors.log
1402 $ cat errors.log
1213
1403
1214 $ cd ..
1404 $ cd ..
1215
1405
1216
1406
1217 Issue 4438 - test coverage for 3ef893520a85 issues.
1407 Issue 4438 - test coverage for 3ef893520a85 issues.
1218
1408
1219 $ mkdir issue4438
1409 $ mkdir issue4438
1220 $ cd issue4438
1410 $ cd issue4438
1221 #if false
1411 #if false
1222 generate new bundles:
1412 generate new bundles:
1223 $ hg init r1
1413 $ hg init r1
1224 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1414 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1225 $ hg clone -q r1 r2
1415 $ hg clone -q r1 r2
1226 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1416 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1227 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1417 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1228 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1418 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1229 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1419 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1230 #else
1420 #else
1231 use existing bundles:
1421 use existing bundles:
1232 $ hg init r1
1422 $ hg init r1
1233 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1423 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1234 $ hg -R r1 -q up
1424 $ hg -R r1 -q up
1235 $ hg init r2
1425 $ hg init r2
1236 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1426 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1237 $ hg -R r2 -q up
1427 $ hg -R r2 -q up
1238 #endif
1428 #endif
1239
1429
1240 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1430 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1241
1431
1242 $ hg -R r1 outgoing r2 -T'{rev} '
1432 $ hg -R r1 outgoing r2 -T'{rev} '
1243 comparing with r2
1433 comparing with r2
1244 searching for changes
1434 searching for changes
1245 101 102 103 104 105 106 107 108 109 110 (no-eol)
1435 101 102 103 104 105 106 107 108 109 110 (no-eol)
1246
1436
1247 The case where all the 'initialsamplesize' samples already were common would
1437 The case where all the 'initialsamplesize' samples already were common would
1248 give 'all remote heads known locally' without checking the remaining heads -
1438 give 'all remote heads known locally' without checking the remaining heads -
1249 fixed in 86c35b7ae300:
1439 fixed in 86c35b7ae300:
1250
1440
1251 $ cat >> r1/.hg/hgrc << EOF
1441 $ cat >> r1/.hg/hgrc << EOF
1252 > [devel]
1442 > [devel]
1253 > discovery.randomize = False
1443 > discovery.randomize = False
1254 > EOF
1444 > EOF
1255
1445
1256 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1446 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1257 > --config blackbox.track='command commandfinish discovery'
1447 > --config blackbox.track='command commandfinish discovery'
1258 comparing with r2
1448 comparing with r2
1259 searching for changes
1449 searching for changes
1260 101 102 103 104 105 106 107 108 109 110 (no-eol)
1450 101 102 103 104 105 106 107 108 109 110 (no-eol)
1261 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1451 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1262 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1452 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1263 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1453 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1264 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1454 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 1 roundtrips in *.????s (glob)
1265 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1455 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1266 $ cd ..
1456 $ cd ..
1267
1457
1268 Even if the set of revs to discover is restricted, unrelated revs may be
1458 Even if the set of revs to discover is restricted, unrelated revs may be
1269 returned as common heads.
1459 returned as common heads.
1270
1460
1271 $ mkdir ancestorsof
1461 $ mkdir ancestorsof
1272 $ cd ancestorsof
1462 $ cd ancestorsof
1273 $ hg init a
1463 $ hg init a
1274 $ hg clone a b -q
1464 $ hg clone a b -q
1275 $ cd b
1465 $ cd b
1276 $ hg debugbuilddag '.:root *root *root'
1466 $ hg debugbuilddag '.:root *root *root'
1277 $ hg log -G -T '{node|short}'
1467 $ hg log -G -T '{node|short}'
1278 o fa942426a6fd
1468 o fa942426a6fd
1279 |
1469 |
1280 | o 66f7d451a68b
1470 | o 66f7d451a68b
1281 |/
1471 |/
1282 o 1ea73414a91b
1472 o 1ea73414a91b
1283
1473
1284 $ hg push -r 66f7d451a68b -q
1474 $ hg push -r 66f7d451a68b -q
1285 $ hg debugdiscovery --verbose --rev fa942426a6fd
1475 $ hg debugdiscovery --verbose --rev fa942426a6fd
1286 comparing with $TESTTMP/ancestorsof/a
1476 comparing with $TESTTMP/ancestorsof/a
1287 searching for changes
1477 searching for changes
1288 elapsed time: * seconds (glob)
1478 elapsed time: * seconds (glob)
1289 heads summary:
1479 heads summary:
1290 total common heads: 1
1480 total common heads: 1
1291 also local heads: 1
1481 also local heads: 1
1292 also remote heads: 1
1482 also remote heads: 1
1293 both: 1
1483 both: 1
1294 local heads: 2
1484 local heads: 2
1295 common: 1
1485 common: 1
1296 missing: 1
1486 missing: 1
1297 remote heads: 1
1487 remote heads: 1
1298 common: 1
1488 common: 1
1299 unknown: 0
1489 unknown: 0
1300 local changesets: 3
1490 local changesets: 3
1301 common: 2
1491 common: 2
1302 heads: 1
1492 heads: 1
1303 roots: 1
1493 roots: 1
1304 missing: 1
1494 missing: 1
1305 heads: 1
1495 heads: 1
1306 roots: 1
1496 roots: 1
1497 first undecided set: 1
1498 heads: 1
1499 roots: 1
1500 common: 0
1501 missing: 1
1307 common heads: 66f7d451a68b
1502 common heads: 66f7d451a68b
General Comments 0
You need to be logged in to leave comments. Login now